YangWu001 commited on
Commit
d9961a5
Β·
1 Parent(s): 779c082
Files changed (1) hide show
  1. app.py +27 -30
app.py CHANGED
@@ -1,13 +1,11 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  import time
4
- import threading
5
 
6
  # Inference client setup
7
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
 
9
- # Global flag to determine the mode and to handle cancellation
10
- use_local = False
11
  stop_inference = False
12
 
13
  def respond(
@@ -19,11 +17,10 @@ def respond(
19
  top_p,
20
  use_local_model,
21
  ):
22
- global use_local, stop_inference
23
- use_local = use_local_model
24
  stop_inference = False # Reset cancellation flag
25
 
26
- if use_local:
27
  # Simulate local inference
28
  time.sleep(2) # simulate a delay
29
  response = "This is a response from the local model."
@@ -56,7 +53,6 @@ def respond(
56
  def cancel_inference():
57
  global stop_inference
58
  stop_inference = True
59
- return gr.update(label="Inference cancelled.")
60
 
61
  # Custom CSS for a fancy look
62
  custom_css = """
@@ -105,29 +101,30 @@ custom_css = """
105
  """
106
 
107
  # Define the interface
108
- demo = gr.ChatInterface(
109
- respond,
110
- additional_inputs=[
111
- gr.Textbox(value="You are a friendly Chatbot.", label="System message", interactive=True),
112
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
113
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
114
- gr.Slider(
115
- minimum=0.1,
116
- maximum=1.0,
117
- value=0.95,
118
- step=0.05,
119
- label="Top-p (nucleus sampling)",
120
- ),
121
- gr.Checkbox(label="Use Local Model", value=False),
122
- gr.Button("Cancel Inference"),
123
- ],
124
- css=custom_css,
125
- title="🌟 Fancy AI Chatbot 🌟",
126
- description="Interact with the AI chatbot using customizable settings below."
127
- )
128
-
129
- cancel_button = demo.add_button("Cancel Inference", variant="danger", elem_id="cancel_button")
130
- cancel_button.click(cancel_inference, None, None)
 
131
 
132
  if __name__ == "__main__":
133
  demo.launch()
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  import time
 
4
 
5
  # Inference client setup
6
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
7
 
8
+ # Global flag to handle cancellation
 
9
  stop_inference = False
10
 
11
  def respond(
 
17
  top_p,
18
  use_local_model,
19
  ):
20
+ global stop_inference
 
21
  stop_inference = False # Reset cancellation flag
22
 
23
+ if use_local_model:
24
  # Simulate local inference
25
  time.sleep(2) # simulate a delay
26
  response = "This is a response from the local model."
 
53
  def cancel_inference():
54
  global stop_inference
55
  stop_inference = True
 
56
 
57
  # Custom CSS for a fancy look
58
  custom_css = """
 
101
  """
102
 
103
  # Define the interface
104
+ with gr.Blocks(css=custom_css) as demo:
105
+ gr.Markdown("<h1 style='text-align: center;'>🌟 Fancy AI Chatbot 🌟</h1>")
106
+ gr.Markdown("Interact with the AI chatbot using customizable settings below.")
107
+
108
+ with gr.Row():
109
+ system_message = gr.Textbox(value="You are a friendly Chatbot.", label="System message", interactive=True)
110
+ use_local_model = gr.Checkbox(label="Use Local Model", value=False)
111
+
112
+ with gr.Row():
113
+ max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
114
+ temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
115
+ top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
116
+
117
+ chat_history = gr.Chatbot(label="Chat")
118
+
119
+ user_input = gr.Textbox(show_label=False, placeholder="Type your message here...")
120
+
121
+ cancel_button = gr.Button("Cancel Inference", variant="danger")
122
+
123
+ def chat_fn(message, history):
124
+ return respond(message, history, system_message.value, max_tokens.value, temperature.value, top_p.value, use_local_model.value)
125
+
126
+ user_input.submit(chat_fn, [user_input, chat_history], chat_history)
127
+ cancel_button.click(cancel_inference)
128
 
129
  if __name__ == "__main__":
130
  demo.launch()