ruslanmv commited on
Commit
e10cc1c
·
verified ·
1 Parent(s): a65d723

Create old2.py

Browse files
Files changed (1) hide show
  1. old2.py +127 -0
old2.py ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # app.py
2
+ import streamlit as st
3
+ from huggingface_hub import InferenceClient
4
+ from datetime import datetime
5
+
6
+ # Configure page
7
+ st.set_page_config(
8
+ page_title="DeepSeek Chatbot - ruslanmv.com",
9
+ page_icon="🤖",
10
+ layout="centered",
11
+ initial_sidebar_state="expanded"
12
+ )
13
+
14
+ # Initialize session state
15
+ if "messages" not in st.session_state:
16
+ st.session_state.messages = []
17
+
18
+ # Sidebar controls
19
+ with st.sidebar:
20
+ st.title("🤖 Chatbot Settings")
21
+ st.markdown("Created by [ruslanmv.com](https://ruslanmv.com/)")
22
+
23
+ # Model selection
24
+ selected_model = st.selectbox(
25
+ "Choose Model",
26
+ options=[
27
+ "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
28
+ "deepseek-ai/DeepSeek-R1",
29
+ "deepseek-ai/DeepSeek-R1-Zero"
30
+ ],
31
+ index=0
32
+ )
33
+
34
+ # System message
35
+ system_message = st.text_area(
36
+ "System Message",
37
+ value="You are a friendly Chatbot created by ruslanmv.com",
38
+ height=100
39
+ )
40
+
41
+ # Generation parameters
42
+ max_new_tokens = st.slider(
43
+ "Max new tokens",
44
+ min_value=1,
45
+ max_value=4000,
46
+ value=512,
47
+ step=50
48
+ )
49
+
50
+ temperature = st.slider(
51
+ "Temperature",
52
+ min_value=0.1,
53
+ max_value=4.0,
54
+ value=1.0,
55
+ step=0.1
56
+ )
57
+
58
+ top_p = st.slider(
59
+ "Top-p (nucleus sampling)",
60
+ min_value=0.1,
61
+ max_value=1.0,
62
+ value=0.9,
63
+ step=0.1
64
+ )
65
+
66
+ # Optional HF Token
67
+ hf_token = st.text_input(
68
+ "HuggingFace Token (optional)",
69
+ type="password",
70
+ help="Enter your HuggingFace token if required for model access"
71
+ )
72
+
73
+ # Main chat interface
74
+ st.title("💬 DeepSeek Chatbot")
75
+ st.caption("🚀 A conversational AI powered by DeepSeek models")
76
+
77
+ # Display chat messages
78
+ for message in st.session_state.messages:
79
+ with st.chat_message(message["role"]):
80
+ st.markdown(message["content"])
81
+ if "timestamp" in message:
82
+ st.caption(f"_{message['timestamp']}_")
83
+
84
+ # Chat input and processing
85
+ if prompt := st.chat_input("Type your message..."):
86
+ # Add user message to history
87
+ st.session_state.messages.append({
88
+ "role": "user",
89
+ "content": prompt,
90
+ "timestamp": datetime.now().strftime("%H:%M:%S")
91
+ })
92
+
93
+ # Display user message
94
+ with st.chat_message("user"):
95
+ st.markdown(prompt)
96
+ st.caption(f"_{st.session_state.messages[-1]['timestamp']}_")
97
+
98
+ # Create full prompt with system message
99
+ full_prompt = f"{system_message}\n\nUser: {prompt}\nAssistant:"
100
+
101
+ # Create client and generate response
102
+ client = InferenceClient(model=selected_model, token=hf_token)
103
+
104
+ # Display assistant response
105
+ with st.chat_message("assistant"):
106
+ response = st.write_stream(
107
+ client.text_generation(
108
+ full_prompt,
109
+ max_new_tokens=max_new_tokens,
110
+ temperature=temperature,
111
+ top_p=top_p,
112
+ stream=True
113
+ )
114
+ )
115
+ timestamp = datetime.now().strftime("%H:%M:%S")
116
+ st.caption(f"_{timestamp}_")
117
+
118
+ # Add assistant response to history
119
+ st.session_state.messages.append({
120
+ "role": "assistant",
121
+ "content": response,
122
+ "timestamp": timestamp
123
+ })
124
+
125
+ # Optional debug information
126
+ # st.sidebar.markdown("---")
127
+ # st.sidebar.json(st.session_state.messages)