ruslanmv commited on
Commit
c7e00fb
·
verified ·
1 Parent(s): 07a96e5

Create old.py

Browse files
Files changed (1) hide show
  1. old.py +109 -0
old.py ADDED
@@ -0,0 +1,109 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from models import demo # Import the demo object from models.py
3
+
4
+ # --- Streamlit App Configuration ---
5
+ st.set_page_config(
6
+ page_title="DeepSeek Chatbot",
7
+ page_icon="🤖",
8
+ layout="wide"
9
+ )
10
+
11
+ # --- App Title and Description ---
12
+ st.title("DeepSeek Chatbot")
13
+ st.markdown("""
14
+ Created by [ruslanmv.com](https://ruslanmv.com/)
15
+ This is a demo of different DeepSeek models. Select a model, type your message, and click "Submit".
16
+ You can also adjust optional parameters like system message, max new tokens, temperature, and top-p.
17
+ """)
18
+
19
+ # --- Sidebar for Model Selection and Parameters ---
20
+ with st.sidebar:
21
+ st.header("Options")
22
+ model_choice = st.radio(
23
+ "Choose a Model",
24
+ options=["DeepSeek-R1-Distill-Qwen-32B", "DeepSeek-R1", "DeepSeek-R1-Zero"],
25
+ index=1 # Default to "DeepSeek-R1"
26
+ )
27
+
28
+ with st.expander("Optional Parameters", expanded=False):
29
+ system_message = st.text_area(
30
+ "System Message",
31
+ value="You are a friendly Chatbot created by ruslanmv.com",
32
+ height=100
33
+ )
34
+ max_new_tokens = st.slider(
35
+ "Max New Tokens",
36
+ min_value=1,
37
+ max_value=4000,
38
+ value=200
39
+ )
40
+ temperature = st.slider(
41
+ "Temperature",
42
+ min_value=0.10,
43
+ max_value=4.00,
44
+ value=0.70
45
+ )
46
+ top_p = st.slider(
47
+ "Top-p (nucleus sampling)",
48
+ min_value=0.10,
49
+ max_value=1.00,
50
+ value=0.90
51
+ )
52
+
53
+ # --- Chatbot Function ---
54
+ def chatbot(input_text, history, model_choice, system_message, max_new_tokens, temperature, top_p):
55
+ # Create payload for the model
56
+ payload = {
57
+ "messages": [{"role": "user", "content": input_text}],
58
+ "system": system_message,
59
+ "max_tokens": max_new_tokens,
60
+ "temperature": temperature,
61
+ "top_p": top_p
62
+ }
63
+
64
+ # Run inference using the selected model
65
+ try:
66
+ response = demo(payload) # Use the demo object directly
67
+ if isinstance(response, dict) and "choices" in response:
68
+ assistant_response = response["choices"][0]["message"]["content"]
69
+ else:
70
+ assistant_response = "Unexpected model response format."
71
+ except Exception as e:
72
+ assistant_response = f"Error: {str(e)}"
73
+
74
+ # Append user and assistant messages to history
75
+ history.append((input_text, assistant_response))
76
+ return history
77
+
78
+ # --- Chat History Management ---
79
+ if "chat_history" not in st.session_state:
80
+ st.session_state.chat_history = []
81
+
82
+ # --- Chat Interface ---
83
+ st.header("Chat with DeepSeek")
84
+
85
+ # Display chat history
86
+ for user_msg, assistant_msg in st.session_state.chat_history:
87
+ with st.chat_message("user"):
88
+ st.write(user_msg)
89
+ with st.chat_message("assistant"):
90
+ st.write(assistant_msg)
91
+
92
+ # Input for new message
93
+ input_text = st.chat_input("Type your message here...")
94
+
95
+ # Handle new message submission
96
+ if input_text:
97
+ # Update chat history
98
+ st.session_state.chat_history = chatbot(
99
+ input_text,
100
+ st.session_state.chat_history,
101
+ model_choice,
102
+ system_message,
103
+ max_new_tokens,
104
+ temperature,
105
+ top_p
106
+ )
107
+
108
+ # Rerun the app to display the updated chat history
109
+ st.rerun()