Samagra07 commited on
Commit
62bf7bb
·
verified ·
1 Parent(s): 5a351f7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +45 -44
app.py CHANGED
@@ -1,45 +1,46 @@
1
- import streamlit as st
2
- import google.generativeai as genai
3
- from dotenv import load_dotenv
4
- import os
5
- load_dotenv()
6
-
7
- genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
8
-
9
- st.title("Chat - Gemini Bot")
10
-
11
- model = genai.GenerativeModel(
12
- model_name="gemini-pro"
13
- )
14
-
15
- if "messages" not in st.session_state:
16
- st.session_state.messages = [
17
- {
18
- "role": "assistant",
19
- "content": "Ask me anything"
20
- }
21
- ]
22
-
23
- for message in st.session_state.messages:
24
- with st.chat_message(message['role']):
25
- st.markdown(message['content'])
26
-
27
- def llm_function(query):
28
- response = model.generate_content(query)
29
- with st.chat_message("assistant"):
30
- st.markdown(response.text)
31
-
32
- st.session_state.messages.append(
33
- {
34
- "role": "assistant",
35
- "content": response.text
36
- }
37
- )
38
-
39
- query = st.chat_input("Type your query here...")
40
-
41
- if query:
42
- with st.chat_message("user"):
43
- st.markdown(query)
44
-
 
45
  llm_function(query=query)
 
1
+ import streamlit as st
2
+ import google.generativeai as genai
3
+ from dotenv import load_dotenv
4
+ import os
5
+
6
+ load_dotenv()
7
+
8
+ genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
9
+
10
+ st.title("Chat - Gemini Bot")
11
+
12
+ model = genai.GenerativeModel(
13
+ model_name="gemini-pro"
14
+ )
15
+
16
+ if "messages" not in st.session_state:
17
+ st.session_state.messages = [
18
+ {
19
+ "role": "assistant",
20
+ "content": "Ask me anything"
21
+ }
22
+ ]
23
+
24
+ for message in st.session_state.messages:
25
+ with st.chat_message(message['role']):
26
+ st.markdown(message['content'])
27
+
28
+ def llm_function(query):
29
+ response = model.generate_content(query)
30
+ with st.chat_message("assistant"):
31
+ st.markdown(response.text)
32
+
33
+ st.session_state.messages.append(
34
+ {
35
+ "role": "assistant",
36
+ "content": response.text
37
+ }
38
+ )
39
+
40
+ query = st.chat_input("Type your question here...")
41
+
42
+ if query:
43
+ with st.chat_message("user"):
44
+ st.markdown(query)
45
+
46
  llm_function(query=query)