drod75 commited on
Commit
64eb99f
·
1 Parent(s): 63993cb

message_edits

Browse files
Files changed (2) hide show
  1. .gitignore +2 -0
  2. app.py +34 -22
.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ __pycache__
2
+ .vscode
app.py CHANGED
@@ -1,38 +1,50 @@
1
- import streamlit as st
2
  from transformers import pipeline
3
 
4
- ## Model stuff
5
- model_name = "deepseek-ai/DeepSeek-R1"
6
  messages = [
7
- {"role": "System", "content": "You are a AI model that hits on people when they talk to you."},
8
  ]
9
 
10
- @st.cache_data
11
- def load_model():
12
- pipeline("text-generation", model="deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B")
13
- return pipeline
14
 
15
- def response(content: str):
 
 
 
16
  try:
17
- pipe = load_model()
18
- messages.append({"role": "user", "content": str})
19
- content = pipe(messages, temperature=0.7, top_p=0.95)
20
- response = content[0]['generated_text']
21
- messages.append({"role": "assistant", "answer_to_previous_question": response_text})
22
- return response
23
  except Exception as e:
24
  st.error(f"An error occurred: {e}")
25
  return "I'm having a little trouble right now. Please try again later."
26
 
27
 
28
- ## Streamlit Start :)
29
- st.title('Deepseek AI Partner')
 
 
 
 
 
 
 
30
 
31
  if prompt := st.chat_input("What is up?"):
32
- # Display user message in chat message container
33
  with st.chat_message("user"):
34
  st.markdown(prompt)
35
- if prompt:
36
- with st.chat_message("assistant"):
37
- respone = response(prompt)
38
- st.markdown(response)
 
 
 
 
 
 
 
1
+ import streamlit as st
2
  from transformers import pipeline
3
 
4
+ # Model stuff
5
+ model_name = "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B"
6
  messages = [
7
+ {"role": "System", "content": "You are an AI model that responds in a flirtatious and playful manner."},
8
  ]
9
 
10
+ @st.cache_resource
11
+ def load_model(model_name):
12
+ return pipeline("text-generation", model=model_name, device_map="auto")
 
13
 
14
+ pipe = load_model(model_name)
15
+
16
+ def generate_response(prompt: str):
17
+ messages.append({"role": "user", "content": prompt})
18
  try:
19
+ content = pipe(messages, max_new_tokens=200, temperature=0.7, top_p=0.95)
20
+ response_text = content[0]['generated_text']
21
+ messages.append({"role": "assistant", "content": response_text})
22
+ return response_text
 
 
23
  except Exception as e:
24
  st.error(f"An error occurred: {e}")
25
  return "I'm having a little trouble right now. Please try again later."
26
 
27
 
28
+ # Streamlit Start :)
29
+ st.title('DeepSeek AI Partner')
30
+
31
+ if "messages" not in st.session_state:
32
+ st.session_state["messages"] = messages
33
+
34
+ for message in st.session_state.messages:
35
+ with st.chat_message(message["role"]):
36
+ st.markdown(message["content"])
37
 
38
  if prompt := st.chat_input("What is up?"):
 
39
  with st.chat_message("user"):
40
  st.markdown(prompt)
41
+
42
+ with st.chat_message("assistant"):
43
+ response = generate_response(prompt)
44
+ st.markdown(response)
45
+
46
+ st.session_state.messages.append({"role": "user", "content": prompt})
47
+ st.session_state.messages.append({"role": "assistant", "content": response})
48
+
49
+ if __name__ == "__main__":
50
+ st.run()