Transformers How to use amitha/mllava-llama2-en-zh with Transformers:
# Use a pipeline as a high-level helper
from transformers import pipeline
pipe = pipeline("visual-question-answering", model="amitha/mllava-llama2-en-zh", trust_remote_code=True) # Load model directly
from transformers import AutoModelForVisualQuestionAnswering
model = AutoModelForVisualQuestionAnswering.from_pretrained("amitha/mllava-llama2-en-zh", trust_remote_code=True, dtype="auto")