ruslanmv commited on
Commit
155e743
·
verified ·
1 Parent(s): 189960f

Update app-work-only-1.py

Browse files
Files changed (1) hide show
  1. app-work-only-1.py +112 -0
app-work-only-1.py CHANGED
@@ -1,3 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
  import requests
3
 
@@ -100,3 +207,8 @@ if prompt := st.chat_input("Type your message..."):
100
 
101
  except Exception as e:
102
  st.error(f"Application Error: {str(e)}")
 
 
 
 
 
 
1
+
2
+ import streamlit as st
3
+ import requests
4
+
5
+ # Function to query the Hugging Face API
6
+ def query(payload, api_url):
7
+ headers = {"Authorization": f"Bearer {st.secrets['HF_TOKEN']}"}
8
+ response = requests.post(api_url, headers=headers, json=payload)
9
+ return response.json()
10
+
11
+ # Page configuration
12
+ st.set_page_config(
13
+ page_title="DeepSeek Chatbot - ruslanmv.com",
14
+ page_icon="🤖",
15
+ layout="centered"
16
+ )
17
+
18
+ # Initialize session state for chat history
19
+ if "messages" not in st.session_state:
20
+ st.session_state.messages = []
21
+ if "selected_model" not in st.session_state:
22
+ st.session_state.selected_model = "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B"
23
+
24
+ # Sidebar configuration
25
+ with st.sidebar:
26
+ st.header("Model Configuration")
27
+ st.markdown("[Get HuggingFace Token](https://huggingface.co/settings/tokens)")
28
+
29
+ # Dropdown to select model
30
+ model_options = [
31
+ "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
32
+ "deepseek-ai/DeepSeek-R1",
33
+ "deepseek-ai/DeepSeek-R1-Zero"
34
+ ]
35
+ selected_model = st.selectbox("Select Model", model_options, index=model_options.index(st.session_state.selected_model))
36
+ st.session_state.selected_model = selected_model
37
+
38
+ system_message = st.text_area(
39
+ "System Message",
40
+ value="You are a friendly Chatbot created by ruslanmv.com",
41
+ height=100
42
+ )
43
+
44
+ max_tokens = st.slider(
45
+ "Max Tokens",
46
+ 1, 4000, 512
47
+ )
48
+
49
+ temperature = st.slider(
50
+ "Temperature",
51
+ 0.1, 4.0, 0.7
52
+ )
53
+
54
+ top_p = st.slider(
55
+ "Top-p",
56
+ 0.1, 1.0, 0.9
57
+ )
58
+
59
+ # Chat interface
60
+ st.title("🤖 DeepSeek Chatbot")
61
+ st.caption("Powered by Hugging Face Inference API - Configure in sidebar")
62
+
63
+ # Display chat history
64
+ for message in st.session_state.messages:
65
+ with st.chat_message(message["role"]):
66
+ st.markdown(message["content"])
67
+
68
+ # Handle input
69
+ if prompt := st.chat_input("Type your message..."):
70
+ st.session_state.messages.append({"role": "user", "content": prompt})
71
+
72
+ with st.chat_message("user"):
73
+ st.markdown(prompt)
74
+
75
+ try:
76
+ with st.spinner("Generating response..."):
77
+ # Prepare the payload for the API
78
+ payload = {
79
+ "inputs": prompt,
80
+ "parameters": {
81
+ "max_new_tokens": max_tokens,
82
+ "temperature": temperature,
83
+ "top_p": top_p,
84
+ "return_full_text": False
85
+ }
86
+ }
87
+
88
+ # Query the Hugging Face API using the selected model
89
+ api_url = f"https://api-inference.huggingface.co/models/{st.session_state.selected_model}"
90
+ output = query(payload, api_url)
91
+
92
+ # Handle API response
93
+ if isinstance(output, list) and len(output) > 0 and 'generated_text' in output[0]:
94
+ assistant_response = output[0]['generated_text']
95
+
96
+ with st.chat_message("assistant"):
97
+ st.markdown(assistant_response)
98
+
99
+ st.session_state.messages.append({"role": "assistant", "content": assistant_response})
100
+ else:
101
+ st.error("Error: Unable to generate a response. Please try again.")
102
+
103
+ except Exception as e:
104
+ st.error(f"Application Error: {str(e)}")
105
+
106
+ '''
107
+
108
  import streamlit as st
109
  import requests
110
 
 
207
 
208
  except Exception as e:
209
  st.error(f"Application Error: {str(e)}")
210
+
211
+
212
+
213
+
214
+ '''