Spaces:
Running
Running
File size: 3,326 Bytes
c7e00fb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 |
import streamlit as st
from models import demo # Import the demo object from models.py
# --- Streamlit App Configuration ---
st.set_page_config(
page_title="DeepSeek Chatbot",
page_icon="🤖",
layout="wide"
)
# --- App Title and Description ---
st.title("DeepSeek Chatbot")
st.markdown("""
Created by [ruslanmv.com](https://ruslanmv.com/)
This is a demo of different DeepSeek models. Select a model, type your message, and click "Submit".
You can also adjust optional parameters like system message, max new tokens, temperature, and top-p.
""")
# --- Sidebar for Model Selection and Parameters ---
with st.sidebar:
st.header("Options")
model_choice = st.radio(
"Choose a Model",
options=["DeepSeek-R1-Distill-Qwen-32B", "DeepSeek-R1", "DeepSeek-R1-Zero"],
index=1 # Default to "DeepSeek-R1"
)
with st.expander("Optional Parameters", expanded=False):
system_message = st.text_area(
"System Message",
value="You are a friendly Chatbot created by ruslanmv.com",
height=100
)
max_new_tokens = st.slider(
"Max New Tokens",
min_value=1,
max_value=4000,
value=200
)
temperature = st.slider(
"Temperature",
min_value=0.10,
max_value=4.00,
value=0.70
)
top_p = st.slider(
"Top-p (nucleus sampling)",
min_value=0.10,
max_value=1.00,
value=0.90
)
# --- Chatbot Function ---
def chatbot(input_text, history, model_choice, system_message, max_new_tokens, temperature, top_p):
# Create payload for the model
payload = {
"messages": [{"role": "user", "content": input_text}],
"system": system_message,
"max_tokens": max_new_tokens,
"temperature": temperature,
"top_p": top_p
}
# Run inference using the selected model
try:
response = demo(payload) # Use the demo object directly
if isinstance(response, dict) and "choices" in response:
assistant_response = response["choices"][0]["message"]["content"]
else:
assistant_response = "Unexpected model response format."
except Exception as e:
assistant_response = f"Error: {str(e)}"
# Append user and assistant messages to history
history.append((input_text, assistant_response))
return history
# --- Chat History Management ---
if "chat_history" not in st.session_state:
st.session_state.chat_history = []
# --- Chat Interface ---
st.header("Chat with DeepSeek")
# Display chat history
for user_msg, assistant_msg in st.session_state.chat_history:
with st.chat_message("user"):
st.write(user_msg)
with st.chat_message("assistant"):
st.write(assistant_msg)
# Input for new message
input_text = st.chat_input("Type your message here...")
# Handle new message submission
if input_text:
# Update chat history
st.session_state.chat_history = chatbot(
input_text,
st.session_state.chat_history,
model_choice,
system_message,
max_new_tokens,
temperature,
top_p
)
# Rerun the app to display the updated chat history
st.rerun() |