bishwathakuri commited on
Commit
a79c393
·
verified ·
1 Parent(s): 38916c0

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +3 -38
src/streamlit_app.py CHANGED
@@ -1,42 +1,7 @@
1
- import os
2
-
3
- os.environ["MPLCONFIGDIR"] = "/tmp" # Prevent matplotlib config errors
4
  os.environ["STREAMLIT_BROWSER_GATHER_USAGE_STATS"] = "false"
5
  os.environ["STREAMLIT_SERVER_HEADLESS"] = "true"
6
  os.environ["STREAMLIT_SERVER_ENABLE_FILE_WATCHER"] = "false"
7
-
8
  os.environ["HF_HOME"] = "/tmp/huggingface"
9
-
10
- import streamlit as st
11
- import torch
12
- from transformers import AutoModelForCausalLM, AutoTokenizer
13
-
14
- # Title and UI
15
- st.set_page_config(page_title="DeepSeek-R1 Chatbot", page_icon="🤖")
16
- st.title("🧠 DeepSeek-R1 CPU Chatbot")
17
- st.caption("Running entirely on CPU using Hugging Face Transformers")
18
-
19
-
20
- # Load the model and tokenizer
21
- @st.cache_resource
22
- def load_model():
23
- tokenizer = AutoTokenizer.from_pretrained("deepseek-ai/DeepSeek-Coder-1.3B-base")
24
- model = AutoModelForCausalLM.from_pretrained("deepseek-ai/DeepSeek-Coder-1.3B-base")
25
- return tokenizer, model
26
-
27
-
28
- tokenizer, model = load_model()
29
-
30
- # Prompt input
31
- user_input = st.text_area(
32
- "📥 Enter your prompt here:", "Explain what a neural network is."
33
- )
34
-
35
- if st.button("🧠 Generate Response"):
36
- with st.spinner("Thinking..."):
37
- inputs = tokenizer(user_input, return_tensors="pt")
38
- outputs = model.generate(**inputs, max_new_tokens=100)
39
- response = tokenizer.decode(outputs[0], skip_special_tokens=True)
40
-
41
- st.markdown("### 🤖 Response:")
42
- st.write(response)
 
1
+ os.environ["MPLCONFIGDIR"] = "/tmp"
 
 
2
  os.environ["STREAMLIT_BROWSER_GATHER_USAGE_STATS"] = "false"
3
  os.environ["STREAMLIT_SERVER_HEADLESS"] = "true"
4
  os.environ["STREAMLIT_SERVER_ENABLE_FILE_WATCHER"] = "false"
5
+ os.environ["STREAMLIT_CONFIG_DIR"] = "/tmp/.streamlit"
6
  os.environ["HF_HOME"] = "/tmp/huggingface"
7
+ os.environ["TRANSFORMERS_CACHE"] = "/tmp/huggingface"