Spaces:
Sleeping
Sleeping
gradio app
Browse files
README.md
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
---
|
2 |
title: OpenChat 3.2
|
3 |
emoji: 🏆
|
4 |
-
colorFrom:
|
5 |
-
colorTo:
|
6 |
sdk: gradio
|
7 |
sdk_version: 3.39.0
|
8 |
app_file: app.py
|
|
|
1 |
---
|
2 |
title: OpenChat 3.2
|
3 |
emoji: 🏆
|
4 |
+
colorFrom: blue
|
5 |
+
colorTo: indigo
|
6 |
sdk: gradio
|
7 |
sdk_version: 3.39.0
|
8 |
app_file: app.py
|
app.py
ADDED
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import gradio as gr
|
3 |
+
import openai
|
4 |
+
|
5 |
+
|
6 |
+
openai.api_base = os.environ.get("OPENAI_API_BASE")
|
7 |
+
openai.api_key = os.environ.get("OPENAI_API_KEY")
|
8 |
+
MODEL_TYPE = os.environ.get("MODEL_TYPE")
|
9 |
+
|
10 |
+
|
11 |
+
def make_prediction(history, max_tokens=None, temperature=None, top_p=None):
|
12 |
+
messages = []
|
13 |
+
for idx, (user, bot) in enumerate(history):
|
14 |
+
messages.append({"role": "user", "content": user})
|
15 |
+
if idx != len(history) - 1:
|
16 |
+
messages.append({"role": "assistant", "content": bot})
|
17 |
+
|
18 |
+
# print(messages)
|
19 |
+
|
20 |
+
completion = openai.ChatCompletion.create(model=MODEL_TYPE, messages=messages, max_tokens=max_tokens, temperature=temperature, top_p=top_p, stream=True)
|
21 |
+
for chunk in completion:
|
22 |
+
content = chunk["choices"][0]["delta"].get("content", "")
|
23 |
+
if content:
|
24 |
+
yield content
|
25 |
+
|
26 |
+
|
27 |
+
def clear_chat(chat_history_state, chat_message):
|
28 |
+
chat_history_state = []
|
29 |
+
chat_message = ''
|
30 |
+
return chat_history_state, chat_message
|
31 |
+
|
32 |
+
|
33 |
+
def user(message, history):
|
34 |
+
history = history or []
|
35 |
+
# Append the user's message to the conversation history
|
36 |
+
history.append([message, ""])
|
37 |
+
return "", history
|
38 |
+
|
39 |
+
|
40 |
+
def chat(history, max_tokens, temperature, top_p):
|
41 |
+
history = history or []
|
42 |
+
|
43 |
+
prediction = make_prediction(
|
44 |
+
history,
|
45 |
+
max_tokens=max_tokens,
|
46 |
+
temperature=temperature,
|
47 |
+
top_p=top_p
|
48 |
+
)
|
49 |
+
|
50 |
+
for delta_text in prediction:
|
51 |
+
history[-1][1] += delta_text
|
52 |
+
# stream the response
|
53 |
+
yield history, history, ""
|
54 |
+
|
55 |
+
|
56 |
+
start_message = ""
|
57 |
+
|
58 |
+
with gr.Blocks() as demo:
|
59 |
+
with gr.Tab("Chatbot"):
|
60 |
+
gr.Markdown("# 💬 OpenChat 3.2 Playground 💬 ")
|
61 |
+
chatbot = gr.Chatbot().style(height=500)
|
62 |
+
with gr.Row():
|
63 |
+
message = gr.Textbox(
|
64 |
+
label="What do you want to chat about?",
|
65 |
+
placeholder="Ask me anything.",
|
66 |
+
lines=3,
|
67 |
+
)
|
68 |
+
with gr.Row():
|
69 |
+
submit = gr.Button(value="Send message", variant="secondary").style(full_width=True)
|
70 |
+
clear = gr.Button(value="New topic", variant="secondary").style(full_width=False)
|
71 |
+
stop = gr.Button(value="Stop", variant="secondary").style(full_width=False)
|
72 |
+
with gr.Row():
|
73 |
+
with gr.Column():
|
74 |
+
max_tokens = gr.Slider(32, 1024, label="Max Tokens", step=20, value=768)
|
75 |
+
temperature = gr.Slider(0.0, 1.0, label="Temperature", step=0.1, value=0.7)
|
76 |
+
top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=1.0)
|
77 |
+
|
78 |
+
chat_history_state = gr.State()
|
79 |
+
clear.click(clear_chat, inputs=[chat_history_state, message], outputs=[chat_history_state, message], queue=False)
|
80 |
+
clear.click(lambda: None, None, chatbot, queue=False)
|
81 |
+
|
82 |
+
submit_click_event = submit.click(
|
83 |
+
fn=user, inputs=[message, chat_history_state], outputs=[message, chat_history_state], queue=True
|
84 |
+
).then(
|
85 |
+
fn=chat, inputs=[chat_history_state, max_tokens, temperature, top_p], outputs=[chat_history_state, chatbot, message], queue=True
|
86 |
+
)
|
87 |
+
stop.click(fn=None, inputs=None, outputs=None, cancels=[submit_click_event], queue=False)
|
88 |
+
|
89 |
+
demo.queue(max_size=128, concurrency_count=48).launch(debug=True, server_name="0.0.0.0", server_port=7860)
|