Spaces:
Runtime error
Runtime error
test
commited on
Commit
·
4a18c89
1
Parent(s):
a5ccdb2
use chatgpt obj instead
Browse files
app.py
CHANGED
@@ -1,11 +1,12 @@
|
|
1 |
import os
|
2 |
import time
|
3 |
import uuid
|
4 |
-
from typing import Dict, List, Tuple
|
5 |
|
6 |
import gradio as gr
|
7 |
import requests
|
8 |
|
|
|
9 |
from store import store_message_pair
|
10 |
|
11 |
# Environment Variables
|
@@ -24,7 +25,9 @@ CHATBOT_HISTORY = List[CHATBOT_MSG]
|
|
24 |
|
25 |
# Constants
|
26 |
LANG_BO = "bo"
|
27 |
-
|
|
|
|
|
28 |
|
29 |
|
30 |
def bing_translate(text: str, from_lang: str, to_lang: str):
|
@@ -51,39 +54,22 @@ def bing_translate(text: str, from_lang: str, to_lang: str):
|
|
51 |
raise Exception("Error in translation API: ", result)
|
52 |
|
53 |
|
54 |
-
def make_completion(history):
|
55 |
-
if DEBUG:
|
56 |
-
time.sleep(2)
|
57 |
-
return "aaaaa"
|
58 |
-
headers = {
|
59 |
-
"Content-Type": "application/json",
|
60 |
-
"Authorization": f"Bearer {OPENAI_API_KEY}",
|
61 |
-
}
|
62 |
-
resp = requests.post(
|
63 |
-
url="https://api.openai.com/v1/chat/completions",
|
64 |
-
json={"model": "gpt-3.5-turbo", "messages": history},
|
65 |
-
headers=headers,
|
66 |
-
)
|
67 |
-
if resp.status_code == 200:
|
68 |
-
return resp.json()["choices"][0]["message"]["content"]
|
69 |
-
else:
|
70 |
-
print(resp.content)
|
71 |
-
return "Sorry, I don't understand."
|
72 |
-
|
73 |
-
|
74 |
def user(input_bo: str, history_bo: list):
|
75 |
history_bo.append([input_bo, None])
|
76 |
return "", history_bo
|
77 |
|
78 |
|
79 |
-
def store_chat(
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
|
|
|
|
|
|
84 |
|
85 |
|
86 |
-
def bot(history_bo: list,
|
87 |
"""Translate user input to English, send to OpenAI, translate response to Tibetan, and return to user.
|
88 |
|
89 |
Args:
|
@@ -95,12 +81,13 @@ def bot(history_bo: list, history_en: list, request: gr.Request):
|
|
95 |
history_bo (CHATBOT_HISTORY): Tibetan history of gradio chatbot
|
96 |
history_en (CHATGPT_HISTORY): English history of OpenAI ChatGPT
|
97 |
"""
|
|
|
|
|
|
|
98 |
input_bo = history_bo[-1][0]
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
resopnse_bo = bing_translate(response_en, LANG_ZH, LANG_BO)
|
103 |
-
history_en.append({"role": ROLE_ASSISTANT, "content": response_en})
|
104 |
history_bo[-1][1] = resopnse_bo
|
105 |
if VERBOSE:
|
106 |
print("------------------------")
|
@@ -109,9 +96,12 @@ def bot(history_bo: list, history_en: list, request: gr.Request):
|
|
109 |
print("------------------------")
|
110 |
|
111 |
store_chat(
|
112 |
-
chat_id=request.client.host,
|
|
|
|
|
|
|
113 |
)
|
114 |
-
return history_bo
|
115 |
|
116 |
|
117 |
with gr.Blocks() as demo:
|
@@ -127,11 +117,11 @@ with gr.Blocks() as demo:
|
|
127 |
queue=False,
|
128 |
).then(
|
129 |
fn=bot,
|
130 |
-
inputs=[history_bo
|
131 |
-
outputs=[history_bo
|
132 |
)
|
133 |
|
134 |
clear = gr.Button("New Chat")
|
135 |
-
clear.click(lambda:
|
136 |
|
137 |
demo.launch()
|
|
|
1 |
import os
|
2 |
import time
|
3 |
import uuid
|
4 |
+
from typing import Dict, List, Optional, Tuple
|
5 |
|
6 |
import gradio as gr
|
7 |
import requests
|
8 |
|
9 |
+
from chat import ChatGpt
|
10 |
from store import store_message_pair
|
11 |
|
12 |
# Environment Variables
|
|
|
25 |
|
26 |
# Constants
|
27 |
LANG_BO = "bo"
|
28 |
+
LANG_MEDIUM = "en"
|
29 |
+
|
30 |
+
chatbot: Optional[ChatGpt] = None
|
31 |
|
32 |
|
33 |
def bing_translate(text: str, from_lang: str, to_lang: str):
|
|
|
54 |
raise Exception("Error in translation API: ", result)
|
55 |
|
56 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
57 |
def user(input_bo: str, history_bo: list):
|
58 |
history_bo.append([input_bo, None])
|
59 |
return "", history_bo
|
60 |
|
61 |
|
62 |
+
def store_chat(
|
63 |
+
chat_id: str,
|
64 |
+
msg_pair_bo: Tuple[str, str],
|
65 |
+
msg_pair_medium: Tuple[str, str],
|
66 |
+
medium_lang: str,
|
67 |
+
):
|
68 |
+
store_message_pair(chat_id, msg_pair_bo, LANG_BO)
|
69 |
+
store_message_pair(chat_id, msg_pair_medium, medium_lang)
|
70 |
|
71 |
|
72 |
+
def bot(history_bo: list, request: gr.Request):
|
73 |
"""Translate user input to English, send to OpenAI, translate response to Tibetan, and return to user.
|
74 |
|
75 |
Args:
|
|
|
81 |
history_bo (CHATBOT_HISTORY): Tibetan history of gradio chatbot
|
82 |
history_en (CHATGPT_HISTORY): English history of OpenAI ChatGPT
|
83 |
"""
|
84 |
+
global chatbot
|
85 |
+
if len(history_bo) <= 1:
|
86 |
+
chatbot = ChatGpt(OPENAI_API_KEY)
|
87 |
input_bo = history_bo[-1][0]
|
88 |
+
input_ = bing_translate(input_bo, LANG_BO, LANG_MEDIUM)
|
89 |
+
response = chatbot.generate_response(input_)
|
90 |
+
resopnse_bo = bing_translate(response, LANG_MEDIUM, LANG_BO)
|
|
|
|
|
91 |
history_bo[-1][1] = resopnse_bo
|
92 |
if VERBOSE:
|
93 |
print("------------------------")
|
|
|
96 |
print("------------------------")
|
97 |
|
98 |
store_chat(
|
99 |
+
chat_id=request.client.host,
|
100 |
+
msg_pair_bo=(input_bo, resopnse_bo),
|
101 |
+
msg_pair_medium=(input_, response),
|
102 |
+
medium_lang=LANG_MEDIUM,
|
103 |
)
|
104 |
+
return history_bo
|
105 |
|
106 |
|
107 |
with gr.Blocks() as demo:
|
|
|
117 |
queue=False,
|
118 |
).then(
|
119 |
fn=bot,
|
120 |
+
inputs=[history_bo],
|
121 |
+
outputs=[history_bo],
|
122 |
)
|
123 |
|
124 |
clear = gr.Button("New Chat")
|
125 |
+
clear.click(lambda: ("", []), None, [input_bo, history_bo], queue=False)
|
126 |
|
127 |
demo.launch()
|
chat.py
CHANGED
@@ -3,9 +3,6 @@ from typing import Tuple
|
|
3 |
|
4 |
import openai
|
5 |
|
6 |
-
USER_MSG = str
|
7 |
-
BOT_MSG = str
|
8 |
-
|
9 |
|
10 |
class ChatGpt:
|
11 |
def __init__(self, api_key, max_tokens=4096):
|
@@ -17,19 +14,19 @@ class ChatGpt:
|
|
17 |
# Set up the OpenAI API client
|
18 |
openai.api_key = self.api_key
|
19 |
|
20 |
-
def add_message(self, role, content):
|
21 |
self.message_history.append({"role": role, "content": content})
|
22 |
self._truncate_history()
|
23 |
|
24 |
-
def add_system_message(self, content):
|
25 |
self.add_message("system", content)
|
26 |
|
27 |
-
def generate_response(self, user_input) ->
|
28 |
self.add_message("user", user_input)
|
29 |
response = self._call_openai_api(self.message_history)
|
30 |
self.add_message("assistant", response)
|
31 |
|
32 |
-
return
|
33 |
|
34 |
def _truncate_history(self):
|
35 |
while self.total_tokens > self.max_tokens:
|
|
|
3 |
|
4 |
import openai
|
5 |
|
|
|
|
|
|
|
6 |
|
7 |
class ChatGpt:
|
8 |
def __init__(self, api_key, max_tokens=4096):
|
|
|
14 |
# Set up the OpenAI API client
|
15 |
openai.api_key = self.api_key
|
16 |
|
17 |
+
def add_message(self, role: str, content: str):
|
18 |
self.message_history.append({"role": role, "content": content})
|
19 |
self._truncate_history()
|
20 |
|
21 |
+
def add_system_message(self, content: str):
|
22 |
self.add_message("system", content)
|
23 |
|
24 |
+
def generate_response(self, user_input: str) -> str:
|
25 |
self.add_message("user", user_input)
|
26 |
response = self._call_openai_api(self.message_history)
|
27 |
self.add_message("assistant", response)
|
28 |
|
29 |
+
return response
|
30 |
|
31 |
def _truncate_history(self):
|
32 |
while self.total_tokens > self.max_tokens:
|