Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,31 +1,29 @@
|
|
1 |
import streamlit as st
|
2 |
import requests
|
3 |
-
import json
|
4 |
|
5 |
-
|
6 |
-
|
7 |
-
# The Inference API endpoint for your model
|
8 |
API_URL = "https://api-inference.huggingface.co/models/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B"
|
9 |
|
10 |
-
#
|
11 |
-
|
12 |
-
|
13 |
-
headers =
|
14 |
-
|
15 |
-
def query_hf_api(prompt: str):
|
16 |
-
"""
|
17 |
-
Sends a JSON payload to the HF Inference API.
|
18 |
-
"""
|
19 |
-
payload = {"inputs": prompt}
|
20 |
-
response = requests.post(API_URL, headers=headers, data=json.dumps(payload))
|
21 |
return response.json()
|
22 |
|
23 |
-
#
|
24 |
-
|
|
|
|
|
|
|
25 |
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import streamlit as st
|
2 |
import requests
|
|
|
3 |
|
4 |
+
# Hugging Face API URL
|
|
|
|
|
5 |
API_URL = "https://api-inference.huggingface.co/models/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B"
|
6 |
|
7 |
+
# Function to query the Hugging Face API
|
8 |
+
def query(payload):
|
9 |
+
headers = {"Authorization": f"Bearer {st.secrets['HF_TOKEN']}"}
|
10 |
+
response = requests.post(API_URL, headers=headers, json=payload)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
return response.json()
|
12 |
|
13 |
+
# Streamlit app
|
14 |
+
st.title("DeepSeek-R1-Distill-Qwen-32B Chatbot")
|
15 |
+
|
16 |
+
# Input text box
|
17 |
+
user_input = st.text_input("Enter your message:")
|
18 |
|
19 |
+
if user_input:
|
20 |
+
# Query the Hugging Face API with the user input
|
21 |
+
payload = {"inputs": user_input}
|
22 |
+
output = query(payload)
|
23 |
+
|
24 |
+
# Display the output
|
25 |
+
if isinstance(output, list) and len(output) > 0 and 'generated_text' in output[0]:
|
26 |
+
st.write("Response:")
|
27 |
+
st.write(output[0]['generated_text'])
|
28 |
+
else:
|
29 |
+
st.write("Error: Unable to generate a response. Please try again.")
|