test-llama2 / app.py
Bunpheng's picture
Update app.py
a4d2712 verified
import gradio as gr
from transformers import AutoModel, AutoTokenizer
# gr.load("models/meta-llama/Llama-2-7b").launch()
self.tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.3")
self.model = AutoModelForCausalLM.from_pretrained("mistralai/Mistral-7B-Instruct-v0.3")