Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
model_name = "winninghealth/WiNGPT-Babel" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name, trust_remote_code=True) | |
def translate(text): | |
prompt = f"<|im_start|>system\n中英互译下面的内容<|im_end|>\n<|im_start|>user\n{text}<|im_end|>\n<|im_start|>assistant\n" | |
inputs = tokenizer([prompt], return_tensors="pt") | |
outputs = model.generate(**inputs, max_new_tokens=512, do_sample=True, temperature=0.7, top_k=50, top_p=0.95) | |
translated_text = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
return translated_text | |
def custom_api(text_list, source_lang, target_lang): | |
# 假设你的模型只支持中英互译 | |
if source_lang == "zh-CN" and target_lang == "en": | |
translated_list = [translate(text) for text in text_list] | |
elif source_lang == "en" and target_lang == "zh-CN": | |
translated_list = [translate(text) for text in text_list] | |
else: | |
return {"error": "Unsupported language pair"} | |
return {"translations": [{"detected_source_lang": source_lang, "text": translated_text} for translated_text in translated_list]} | |
# 创建 Gradio 接口 | |
iface = gr.Interface( | |
fn=custom_api, | |
inputs=[ | |
gr.Textbox(lines=5, label="输入文本列表 (支持中英互译)", placeholder='["Hello", "World"]'), | |
gr.Textbox(label="源语言", placeholder="zh-CN"), | |
gr.Textbox(label="目标语言", placeholder="en") | |
], | |
outputs=gr.JSON(label="翻译结果"), | |
title="WiNGPT-Babel 翻译 Demo", | |
description="基于 WiNGPT-Babel 模型的翻译演示。支持中英互译。", | |
) | |
iface.launch() |