leolaish commited on
Commit
35beead
·
verified ·
1 Parent(s): 71fa9cc

Update app1.py

Browse files
Files changed (1) hide show
  1. app1.py +27 -29
app1.py CHANGED
@@ -3,34 +3,32 @@ from huggingface_hub import InferenceClient
3
 
4
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
5
 
6
- def respond(message, history, system_message, max_tokens, temperature, top_p):
7
- messages = [{"role": "system", "content": system_message}]
8
- for val in history:
9
- if val[0]:
10
- messages.append({"role": "user", "content": val[0]})
11
- if val[1]:
12
- messages.append({"role": "assistant", "content": val[1]})
13
- messages.append({"role": "user", "content": message})
14
- response = ""
15
- for message in client.chat_completion(messages, max_tokens=max_tokens, stream=True, temperature=temperature, top_p=top_p):
16
- token = message.choices[0].delta.content
17
- response += token
18
- yield response
19
 
20
- def clear_history():
21
- return [], ""
 
 
 
 
 
 
 
 
 
 
 
 
22
 
23
- demo = gr.Interface(
24
- respond,
25
- title="MediPro",
26
- inputs=[
27
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
28
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
29
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
30
- gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="MediPro"),
31
- ],
32
- outputs=gr.Markdown(label="Chat History"),
33
- layout="chat", # Use the "chat" layout
34
- )
35
- if __name__ == "__main__":
36
- demo.launch()
 
3
 
4
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
5
 
6
+ with gr.Blocks() as demo:
7
+ system_message = gr.Textbox(value="You are a friendly Chatbot.", label="System message")
8
+ max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
9
+ temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
10
+ top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top p")
11
+ chat_history = gr.Markdown(label="Chat History")
12
+ message = gr.Textbox(label="Message")
13
+ chatbot = gr.Chatbot()
14
+ clear = gr.Button("Clear")
 
 
 
 
15
 
16
+ def respond(system_message, message, max_tokens, temperature, top_p, chat_history):
17
+ messages = [{"role": "system", "content": system_message}]
18
+ for val in chat_history:
19
+ if val[0]:
20
+ messages.append({"role": "user", "content": val[0]})
21
+ if val[1]:
22
+ messages.append({"role": "assistant", "content": val[1]})
23
+ messages.append({"role": "user", "content": message})
24
+ response = ""
25
+ for message in client.chat_completion(messages, max_tokens=max_tokens, stream=True, temperature=temperature, top_p=top_p):
26
+ token = message.choices[0].delta.content
27
+ response += token
28
+ chat_history.append((message, response))
29
+ return chat_history
30
 
31
+ message.submit(respond, inputs=[system_message, message, max_tokens, temperature, top_p, chat_history], outputs=[chat_history, chatbot])
32
+ clear.click(lambda: [], outputs=[message, chatbot, chat_history])
33
+
34
+ demo.launch()