Spaces:
Build error
Build error
File size: 912 Bytes
61fd720 2b847c2 5808f7c 61fd720 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
from transformers.generation import GenerationConfig
import gradio as gr
MODEL_NAME = "X-D-Lab/MindChat-Qwen-7B-v2"
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME, trust_remote_code=True)
if tokenizer.pad_token is None:
tokenizer.pad_token = tokenizer.eos_token
model = AutoModelForCausalLM.from_pretrained(MODEL_NAME, trust_remote_code=True)
model.generation_config = GenerationConfig.from_pretrained(MODEL_NAME, trust_remote_code=True)
def chatbot(input_text, history=[]):
inputs = tokenizer(input_text, return_tensors="pt", padding=True, truncation=True)
response, history = model.chat(
tokenizer,
input_text,
history=history,
attention_mask=inputs["attention_mask"]
)
return response
gr.Interface(fn=chatbot, inputs="text", outputs="text", title="MindChat-Qwen").launch()
|