futranbg's picture
Update app.py
d3cb070
raw
history blame
1.18 kB
import os
import gradio as gr
from langchain.llms import HuggingFaceHub
from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain
model_repo = os.getenv('HF_MODEL_REPO')
eos_string = "</s>"
template = """<s>[INST]<<SYS>>You work as translator. You job is translate user requests from {source} to {target}<<SYS>>
{query}[/INST]</s>\n"""
prompt = PromptTemplate(template=template, input_variables=["source","target","query"])
model_kwargs={
"max_new_tokens":2048,
"temperature":0.5,
"stop" : ["</s>","<|endoftext|>","<|end|>"]
}
llm = HuggingFaceHub(repo_id=model_repo, task="text-generation", model_kwargs=model_kwargs)
chain = LLMChain(prompt=prompt, llm=llm)
def translation(source, target, text):
response=chain.run(question)
return response.partition(eos_string)[0]
inputs = [gr.inputs.Dropdown(['English','Vietnamese'], default='English', label='Source'),
gr.inputs.Dropdown(['English','Vietnamese'], default='Vietnamese', label='Target'),
gr.inputs.Textbox(lines=5, label="Input text"),
]
gr.Interface(translation, inputs=inputs, outputs="text").launch()