Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -2,13 +2,22 @@ import os
|
|
2 |
import gradio as gr
|
3 |
from langchain.llms import HuggingFaceHub
|
4 |
|
5 |
-
model_repo = os.getenv('
|
6 |
-
|
|
|
7 |
Begin of the document:
|
8 |
{query}
|
9 |
End of the document[/INST]
|
10 |
{target} translated document:
|
11 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
"""
|
13 |
|
14 |
model_kwargs={
|
@@ -19,14 +28,22 @@ model_kwargs={
|
|
19 |
"stop" : ["</s>","<|endoftext|>","<|end|>"],
|
20 |
}
|
21 |
|
22 |
-
|
|
|
23 |
|
24 |
def translation(source, target, text):
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
return response
|
31 |
|
32 |
gr.Interface(translation, inputs=["text","text","text"], outputs="text").launch()
|
|
|
2 |
import gradio as gr
|
3 |
from langchain.llms import HuggingFaceHub
|
4 |
|
5 |
+
model_repo = os.getenv('HF_MODEL_LLAMA_REPO')
|
6 |
+
model_repo = os.getenv('HF_MODEL_STARCHAT_REPO')
|
7 |
+
llamma_template = """[INST]<<SYS>>I want you to act as document language translator. You do translation {source} texts in document into then you return to me the translated document AND DO NOTHING ELSE.<</SYS>>
|
8 |
Begin of the document:
|
9 |
{query}
|
10 |
End of the document[/INST]
|
11 |
{target} translated document:
|
12 |
|
13 |
+
"""
|
14 |
+
starchat_template = """<|system|>I want you to act as document language translator. You do translation {source} texts in document into then you return to me the translated document AND DO NOTHING ELSE.<</SYS>>
|
15 |
+
Begin of the document:
|
16 |
+
{query}
|
17 |
+
End of the document<|end|>
|
18 |
+
<|assistant|>
|
19 |
+
{target} translated document:
|
20 |
+
|
21 |
"""
|
22 |
|
23 |
model_kwargs={
|
|
|
28 |
"stop" : ["</s>","<|endoftext|>","<|end|>"],
|
29 |
}
|
30 |
|
31 |
+
llm1 = HuggingFaceHub(repo_id=model_repo, task="text-generation", model_kwargs=model_kwargs)
|
32 |
+
llm2 = HuggingFaceHub(repo_id=model_repo, task="text-generation", model_kwargs=model_kwargs)
|
33 |
|
34 |
def translation(source, target, text):
|
35 |
+
response = text
|
36 |
+
try:
|
37 |
+
input_prompt = llamma_template.replace("{source}", source)
|
38 |
+
input_prompt = input_prompt.replace("{target}", target)
|
39 |
+
input_prompt = input_prompt.replace("{query}", text)
|
40 |
+
response=llm1(input_prompt)
|
41 |
+
except Exception as e:
|
42 |
+
print(f"ERROR: LLM show {e}")
|
43 |
+
input_prompt = starchat_template.replace("{source}", source)
|
44 |
+
input_prompt = input_prompt.replace("{target}", target)
|
45 |
+
input_prompt = input_prompt.replace("{query}", text)
|
46 |
+
response=llm2(input_prompt)
|
47 |
return response
|
48 |
|
49 |
gr.Interface(translation, inputs=["text","text","text"], outputs="text").launch()
|