ruslanmv commited on
Commit
f70fc29
·
verified ·
1 Parent(s): a26f5ee

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +91 -52
app.py CHANGED
@@ -1,69 +1,108 @@
1
  import gradio as gr
2
 
 
 
3
  def load_model(model_name):
4
- return lambda input_text: f"Response from {model_name}: {input_text}"
 
 
 
 
 
 
 
 
 
5
 
 
6
  deepseek_r1_distill = load_model("DeepSeek-R1-Distill-Qwen-32B")
7
  deepseek_r1 = load_model("DeepSeek-R1")
8
  deepseek_r1_zero = load_model("DeepSeek-R1-Zero")
9
 
10
- def create_optional_parameters():
11
- with gr.Accordion("Optional Parameters", open=False):
12
- system_message = gr.Textbox(
13
- label="System Message",
14
- value="You are a friendly Chatbot created by ruslanmv.com",
15
- lines=2
16
- )
17
- max_new_tokens = gr.Slider(minimum=1, maximum=4000, value=200, label="Max New Tokens")
18
- temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, label="Temperature")
19
- top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.9, label="Top-p (nucleus sampling)")
20
- return system_message, max_new_tokens, temperature, top_p
21
 
22
- def chat_interface(user_input, system_message, max_new_tokens, temperature, top_p):
23
- response = f"""**System Message**: {system_message}
24
- **Your Input**: {user_input}
25
- **Parameters Used**:
26
- - Max New Tokens: {max_new_tokens}
27
- - Temperature: {temperature}
28
- - Top-p: {top_p}
29
- *Note: Actual model integration required for real responses*"""
30
- return response
31
 
32
- with gr.Blocks(css="""
33
- .chat-container { max-width: 700px; margin: auto; }
34
- .chat-input { margin-top: 20px; }
35
- .chat-output { margin-top: 10px; padding: 10px; border: 1px solid #ccc; border-radius: 10px; background-color: #f9f9f9; }
36
- """) as demo:
37
- with gr.Row(variant="panel"):
38
- gr.Markdown(
39
- """# DeepSeek Chatbot
40
- Created by [ruslanmv.com](https://ruslanmv.com/)
41
- A friendly chatbot interface. Start a conversation below!
42
- """,
43
- elem_id="header"
44
- )
45
 
46
- with gr.Row(elem_classes="chat-container"):
47
- with gr.Column():
48
- user_input = gr.Textbox(
49
- label="Your Message",
50
- placeholder="Type your message here...",
51
- lines=3,
52
- elem_classes="chat-input"
53
- )
54
- submit_button = gr.Button("Submit", variant="primary")
55
- system_message, max_new_tokens, temperature, top_p = create_optional_parameters()
56
- with gr.Column():
57
- output = gr.Markdown(
58
- label="Chatbot Response",
59
- elem_classes="chat-output"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
60
  )
61
 
62
- submit_button.click(
63
- chat_interface,
64
- inputs=[user_input, system_message, max_new_tokens, temperature, top_p],
65
- outputs=output
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
  )
67
 
 
68
  if __name__ == "__main__":
69
  demo.launch()
 
1
  import gradio as gr
2
 
3
+ # Placeholder for model loading (adjust as needed for your specific models)
4
+ # In a real application, these would load actual models
5
  def load_model(model_name):
6
+ print(f"Loading {model_name}...") # Indicate model loading
7
+ # Simulate different model behaviors (replace with actual model logic)
8
+ if model_name == "DeepSeek-R1-Distill-Qwen-32B":
9
+ return lambda input_text, history: f"Distilled Model Response to: {input_text}"
10
+ elif model_name == "DeepSeek-R1":
11
+ return lambda input_text, history: f"Base Model Response to: {input_text}"
12
+ elif model_name == "DeepSeek-R1-Zero":
13
+ return lambda input_text, history: f"Zero Model Response to: {input_text}"
14
+ else:
15
+ return lambda input_text, history: f"Default Response to: {input_text}"
16
 
17
+ # Load the models (placeholder functions here)
18
  deepseek_r1_distill = load_model("DeepSeek-R1-Distill-Qwen-32B")
19
  deepseek_r1 = load_model("DeepSeek-R1")
20
  deepseek_r1_zero = load_model("DeepSeek-R1-Zero")
21
 
22
+ # --- Chatbot function ---
23
+ def chatbot(input_text, history, model_choice, system_message, max_new_tokens, temperature, top_p):
24
+ history = history or []
25
+ print(f"Input: {input_text}, History: {history}, Model: {model_choice}")
 
 
 
 
 
 
 
26
 
27
+ # Choose the model based on user selection
28
+ if model_choice == "DeepSeek-R1-Distill-Qwen-32B":
29
+ model_function = deepseek_r1_distill
30
+ elif model_choice == "DeepSeek-R1":
31
+ model_function = deepseek_r1
32
+ elif model_choice == "DeepSeek-R1-Zero":
33
+ model_function = deepseek_r1_zero
34
+ else:
35
+ model_function = lambda x, h: "Please select a model."
36
 
37
+ # Simulate model response with parameters (replace with actual model inference)
38
+ # In a real application, you would pass these parameters to your model
39
+ response = model_function(input_text, history)
40
+ response = f"**System Message:** {system_message}\n\n**Model Response:** {response}\n\n" \
41
+ f"**Parameters Used:**\n- Max New Tokens: {max_new_tokens}\n- Temperature: {temperature}\n- Top-p: {top_p}"
 
 
 
 
 
 
 
 
42
 
43
+ history.append((input_text, response))
44
+ return history, history, "" # Update both chatbot output and state
45
+
46
+
47
+ # --- Gradio Interface ---
48
+ with gr.Blocks(theme=gr.themes.Soft()) as demo: # Apply a theme
49
+ gr.Markdown(
50
+ """
51
+ # DeepSeek Chatbot
52
+ Created by [ruslanmv.com](https://ruslanmv.com/)
53
+
54
+ This is a demo of different DeepSeek models. Select a model, type your message, and click "Submit".
55
+ You can also adjust optional parameters like system message, max new tokens, temperature, and top-p.
56
+ """
57
+ )
58
+
59
+ with gr.Row():
60
+ with gr.Column(scale=4): # Make chatbot take more space
61
+ chatbot_output = gr.Chatbot(label="DeepSeek Chatbot", height=500)
62
+ msg = gr.Textbox(label="Your Message", placeholder="Type your message here...")
63
+
64
+ with gr.Row():
65
+ submit_btn = gr.Button("Submit", variant="primary")
66
+ clear_btn = gr.ClearButton([msg, chatbot_output])
67
+
68
+ with gr.Column(scale=1):
69
+ model_choice = gr.Radio(
70
+ choices=["DeepSeek-R1-Distill-Qwen-32B", "DeepSeek-R1", "DeepSeek-R1-Zero"],
71
+ label="Choose a Model",
72
+ value="DeepSeek-R1" # Default model
73
  )
74
 
75
+ with gr.Accordion("Optional Parameters", open=False):
76
+ system_message = gr.Textbox(
77
+ label="System Message",
78
+ value="You are a friendly Chatbot created by ruslanmv.com",
79
+ lines=2,
80
+ )
81
+ max_new_tokens = gr.Slider(
82
+ minimum=1, maximum=4000, value=200, label="Max New Tokens"
83
+ )
84
+ temperature = gr.Slider(
85
+ minimum=0.10, maximum=4.00, value=0.70, label="Temperature"
86
+ )
87
+ top_p = gr.Slider(
88
+ minimum=0.10, maximum=1.00, value=0.90, label="Top-p (nucleus sampling)"
89
+ )
90
+
91
+ # Maintain chat history
92
+ chat_history = gr.State([])
93
+
94
+ # Event handling
95
+ submit_btn.click(
96
+ chatbot,
97
+ [msg, chat_history, model_choice, system_message, max_new_tokens, temperature, top_p],
98
+ [chatbot_output, chat_history, msg],
99
+ )
100
+ msg.submit(
101
+ chatbot,
102
+ [msg, chat_history, model_choice, system_message, max_new_tokens, temperature, top_p],
103
+ [chatbot_output, chat_history, msg],
104
  )
105
 
106
+ # Launch the demo
107
  if __name__ == "__main__":
108
  demo.launch()