euracle commited on
Commit
4f918b0
·
verified ·
1 Parent(s): b366d00

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +79 -106
app.py CHANGED
@@ -10,12 +10,12 @@ from langchain.chains import RetrievalQA
10
  from langchain.document_loaders import PyPDFLoader
11
  from langchain.text_splitter import RecursiveCharacterTextSplitter
12
  from langchain.chains import LLMChain
13
- from langchain.memory import ConversationBufferMemory
14
 
15
  # Set persistent storage path
16
  PERSISTENT_DIR = "vector_db"
17
 
18
  def initialize_vector_db():
 
19
  if os.path.exists(PERSISTENT_DIR) and os.listdir(PERSISTENT_DIR):
20
  embeddings = HuggingFaceEmbeddings()
21
  vector_db = Chroma(persist_directory=PERSISTENT_DIR, embedding_function=embeddings)
@@ -46,15 +46,6 @@ def initialize_vector_db():
46
  vector_db.persist()
47
  return documents, vector_db
48
 
49
- # Initialize session state for chat history and memory
50
- if "chat_history" not in st.session_state:
51
- st.session_state.chat_history = []
52
- if "memory" not in st.session_state:
53
- st.session_state.memory = ConversationBufferMemory(
54
- memory_key="chat_history",
55
- return_messages=True
56
- )
57
-
58
  # System instructions for the LLM
59
  system_prompt = """You are an expert organic farming consultant with specialization in Agro-Homeopathy. When providing suggestions and remedies:
60
  1. Always specify medicine potency as 6c unless the uploaded text mentions some other value explicitly
@@ -64,25 +55,36 @@ system_prompt = """You are an expert organic farming consultant with specializat
64
 
65
  api_key1 = os.getenv("api_key")
66
 
67
- # Page configuration
68
  start_time = time.time()
69
  st.set_page_config(page_title="Dr. Radha: The Agro-Homeopath", page_icon="🚀", layout="wide")
70
 
71
- # CSS styling
72
  st.markdown("""
73
  <style>
 
74
  .stApp {
75
  background-color: #1B4D3E !important;
76
  color: white !important;
77
  }
 
 
78
  .stTextInput>div>div>input {
79
  color: black !important;
80
  background-color: rgba(255,255,255,0.1) !important;
81
  }
 
 
82
  .stButton>button {
83
  color: black !important;
84
  background-color: yellow !important;
85
  }
 
 
 
 
 
 
 
86
  #the-title {
87
  text-align: center;
88
  font-size: 24px;
@@ -91,25 +93,10 @@ st.markdown("""
91
  </style>
92
  """, unsafe_allow_html=True)
93
 
94
- # Sidebar
95
- st.sidebar.title("Chat History")
96
-
97
- # Clear Session button
98
- if st.sidebar.button("Clear Session"):
99
- st.session_state.chat_history = []
100
- st.session_state.memory.clear()
101
- st.experimental_rerun()
102
-
103
- # Display chat history in sidebar
104
- for i, chat in enumerate(st.session_state.chat_history):
105
- st.sidebar.text(f"Q{i+1}: {chat['question']}")
106
- if st.sidebar.button(f"Go to Q{i+1}", key=f"btn_{i}"):
107
- st.session_state.query = chat['question']
108
-
109
- # Main content
110
  st.title("🌿 Dr. Radha: AI-Powered Organic Farming Consultant")
111
  st.subheader("Specializing in Agro-Homeopathy | Free Consultation")
112
 
 
113
  st.markdown("""
114
  Please provide complete details about the issue, including:
115
  - Detailed description of plant problem
@@ -119,7 +106,7 @@ Please provide complete details about the issue, including:
119
  human_image = "human.png"
120
  robot_image = "bot.jpg"
121
 
122
- # Set up Groq API
123
  llm = ChatGroq(
124
  api_key=api_key1,
125
  max_tokens=None,
@@ -130,16 +117,18 @@ llm = ChatGroq(
130
  )
131
 
132
  embeddings = HuggingFaceEmbeddings()
 
 
133
 
134
- # Initialize session state for documents and vector_db
135
  if "documents" not in st.session_state:
136
  st.session_state["documents"] = None
137
  if "vector_db" not in st.session_state:
138
  st.session_state["vector_db"] = None
139
  if "query" not in st.session_state:
140
  st.session_state["query"] = ""
141
-
142
- # Initialize vector database
143
  if st.session_state["documents"] is None or st.session_state["vector_db"] is None:
144
  with st.spinner("Loading data..."):
145
  documents, vector_db = initialize_vector_db()
@@ -149,13 +138,16 @@ else:
149
  documents = st.session_state["documents"]
150
  vector_db = st.session_state["vector_db"]
151
 
 
 
 
 
152
  retriever = vector_db.as_retriever()
153
 
154
- # Prompt templates
155
  prompt_template = """As an expert organic farming consultant with specialization in Agro-Homeopathy, analyze the following context and question to provide a clear, structured response.
156
 
157
  Context: {context}
158
- Chat History: {chat_history}
159
  Question: {question}
160
 
161
  Provide your response in the following format:
@@ -189,15 +181,27 @@ Remember to maintain a professional, clear tone and ensure all medicine recommen
189
 
190
  Answer:"""
191
 
 
 
 
 
 
 
 
 
 
 
 
 
192
 
 
193
  fallback_template = """As an expert organic farming consultant with specialization in Agro-Homeopathy, analyze the following context and question to provide a clear, structured response.
194
 
195
- Chat History: {chat_history}
196
  Question: {question}
197
 
198
- Provide your response in the following format:
199
  Analysis: Analyze the described plant condition
200
- Treatment: Recommend relevant organic farming principles and specific homeopathic medicine(s) with exact potency and repetition frequency. Suggest a maximum of 4 medicines in the order of relevance for any single problem.
201
  Instructions for Use:
202
  Small Plots or Gardens: Make sure your dispensing equipment is not contaminated with
203
  other chemicals or fertilisers as these may antidote the energetic effects of the treatment—
@@ -214,95 +218,64 @@ water or steam clean before use if necessary. Avoid using other chemicals or fer
214
  10 days following treatment.
215
  Dosage rates are approximate and may vary according to different circumstances and
216
  experiences. Suggested doses are:
217
- 10 pills or 10ml/10 litre on small areas,
218
- 500 pills or 125ml/500l per hectare,
219
- 1000 pills or 250ml/500l per hectare,
220
- 2500 pills or 500ml/500l per hectare,
221
  Add pills or liquid to your water and mix (with a stick if necessary for large containers).
222
 
223
  Recommendations: Provide couple of key pertinent recommendations based on the query
224
 
225
- Remember to maintain a professional, clear tone and ensure all medicine recommendations include specific potency.
226
 
227
  Answer:"""
228
 
229
- # Create QA chain with memory
230
- qa = RetrievalQA.from_chain_type(
231
- llm=llm,
232
- chain_type="stuff",
233
- retriever=retriever,
234
- memory=st.session_state.memory,
235
- chain_type_kwargs={
236
- "prompt": PromptTemplate(
237
- template=prompt_template,
238
- input_variables=["context", "question", "chat_history"]
239
- )
240
- }
241
- )
242
 
243
- # Create fallback chain with memory
244
- fallback_prompt = PromptTemplate(
245
- template=fallback_template,
246
- input_variables=["question", "chat_history"]
247
- )
248
- fallback_chain = LLMChain(
249
- llm=llm,
250
- prompt=fallback_prompt,
251
- memory=st.session_state.memory
252
- )
253
-
254
- # Chat container
255
  chat_container = st.container()
256
 
257
- # Query form
 
 
 
 
 
 
 
 
258
  with st.form(key='query_form'):
259
- query = st.text_input("Ask your question:", value=st.session_state.get("query", ""))
260
  submit_button = st.form_submit_button(label='Submit')
261
 
262
- # Handle form submission
 
 
 
263
  if submit_button and query:
264
  with st.spinner("Generating response..."):
265
- # Add query to chat history
266
- st.session_state.chat_history.append({
267
- "question": query,
268
- "timestamp": time.time()
269
- })
270
-
271
- # Get response with memory context
272
- result = qa({
273
- "question": query,
274
- "chat_history": st.session_state.memory.chat_memory.messages if st.session_state.memory else []
275
- })
276
-
277
  if result['result'].strip() == "":
 
278
  fallback_result = fallback_chain.run(query)
279
  response = fallback_result
280
  else:
281
  response = result['result']
282
-
283
- # Store the interaction in memory
284
- st.session_state.memory.save_context(
285
- {"input": query},
286
- {"output": response}
287
- )
288
-
289
- # Display chat messages
290
- for chat in st.session_state.chat_history:
291
- col1, col2 = st.columns([1, 10])
292
- with col1:
293
- st.image(human_image, width=80)
294
- with col2:
295
- st.markdown(f"{chat['question']}")
296
-
297
- col1, col2 = st.columns([1, 10])
298
- with col1:
299
- st.image(robot_image, width=80)
300
- with col2:
301
- if chat['question'] == query:
302
- st.markdown(f"{response}")
303
- else:
304
- prev_response = st.session_state.memory.load_memory_variables({})
305
- st.markdown(f"{prev_response.get('chat_history', '')}")
306
 
 
 
 
 
 
 
 
 
 
 
 
307
  st.markdown("---")
 
308
  st.session_state["query"] = ""
 
 
 
 
10
  from langchain.document_loaders import PyPDFLoader
11
  from langchain.text_splitter import RecursiveCharacterTextSplitter
12
  from langchain.chains import LLMChain
 
13
 
14
  # Set persistent storage path
15
  PERSISTENT_DIR = "vector_db"
16
 
17
  def initialize_vector_db():
18
+ # Check if vector database already exists in persistent storage
19
  if os.path.exists(PERSISTENT_DIR) and os.listdir(PERSISTENT_DIR):
20
  embeddings = HuggingFaceEmbeddings()
21
  vector_db = Chroma(persist_directory=PERSISTENT_DIR, embedding_function=embeddings)
 
46
  vector_db.persist()
47
  return documents, vector_db
48
 
 
 
 
 
 
 
 
 
 
49
  # System instructions for the LLM
50
  system_prompt = """You are an expert organic farming consultant with specialization in Agro-Homeopathy. When providing suggestions and remedies:
51
  1. Always specify medicine potency as 6c unless the uploaded text mentions some other value explicitly
 
55
 
56
  api_key1 = os.getenv("api_key")
57
 
 
58
  start_time = time.time()
59
  st.set_page_config(page_title="Dr. Radha: The Agro-Homeopath", page_icon="🚀", layout="wide")
60
 
61
+ # CSS for dark green background and white text
62
  st.markdown("""
63
  <style>
64
+ /* Set background color for entire app */
65
  .stApp {
66
  background-color: #1B4D3E !important;
67
  color: white !important;
68
  }
69
+
70
+ /* Style input fields */
71
  .stTextInput>div>div>input {
72
  color: black !important;
73
  background-color: rgba(255,255,255,0.1) !important;
74
  }
75
+
76
+ /* Style buttons */
77
  .stButton>button {
78
  color: black !important;
79
  background-color: yellow !important;
80
  }
81
+
82
+ }
83
+ </style>
84
+ """, unsafe_allow_html=True)
85
+
86
+ st.markdown("""
87
+ <style>
88
  #the-title {
89
  text-align: center;
90
  font-size: 24px;
 
93
  </style>
94
  """, unsafe_allow_html=True)
95
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
96
  st.title("🌿 Dr. Radha: AI-Powered Organic Farming Consultant")
97
  st.subheader("Specializing in Agro-Homeopathy | Free Consultation")
98
 
99
+ # Add information request message
100
  st.markdown("""
101
  Please provide complete details about the issue, including:
102
  - Detailed description of plant problem
 
106
  human_image = "human.png"
107
  robot_image = "bot.jpg"
108
 
109
+ # Set up Groq API with temperature 0.7
110
  llm = ChatGroq(
111
  api_key=api_key1,
112
  max_tokens=None,
 
117
  )
118
 
119
  embeddings = HuggingFaceEmbeddings()
120
+ end_time = time.time()
121
+ print(f"Setting up Groq LLM & Embeddings took {end_time - start_time:.4f} seconds")
122
 
123
+ # Initialize session state
124
  if "documents" not in st.session_state:
125
  st.session_state["documents"] = None
126
  if "vector_db" not in st.session_state:
127
  st.session_state["vector_db"] = None
128
  if "query" not in st.session_state:
129
  st.session_state["query"] = ""
130
+
131
+ start_time = time.time()
132
  if st.session_state["documents"] is None or st.session_state["vector_db"] is None:
133
  with st.spinner("Loading data..."):
134
  documents, vector_db = initialize_vector_db()
 
138
  documents = st.session_state["documents"]
139
  vector_db = st.session_state["vector_db"]
140
 
141
+ end_time = time.time()
142
+ print(f"Loading and processing PDFs & vector database took {end_time - start_time:.4f} seconds")
143
+
144
+ start_time = time.time()
145
  retriever = vector_db.as_retriever()
146
 
 
147
  prompt_template = """As an expert organic farming consultant with specialization in Agro-Homeopathy, analyze the following context and question to provide a clear, structured response.
148
 
149
  Context: {context}
150
+
151
  Question: {question}
152
 
153
  Provide your response in the following format:
 
181
 
182
  Answer:"""
183
 
184
+ # Create the QA chain with correct variables
185
+ qa = RetrievalQA.from_chain_type(
186
+ llm=llm,
187
+ chain_type="stuff",
188
+ retriever=retriever,
189
+ chain_type_kwargs={
190
+ "prompt": PromptTemplate(
191
+ template=prompt_template,
192
+ input_variables=["context", "question"]
193
+ )
194
+ }
195
+ )
196
 
197
+ # Create a separate LLMChain for fallback
198
  fallback_template = """As an expert organic farming consultant with specialization in Agro-Homeopathy, analyze the following context and question to provide a clear, structured response.
199
 
 
200
  Question: {question}
201
 
202
+ Format your response as follows:
203
  Analysis: Analyze the described plant condition
204
+ Treatment: Recommend relevant organic farming principles and specific homeopathic medicine(s) with exact potency and repetition frequency. Suggest a maximum of 4 medicines in the order of relevance for any single problem.
205
  Instructions for Use:
206
  Small Plots or Gardens: Make sure your dispensing equipment is not contaminated with
207
  other chemicals or fertilisers as these may antidote the energetic effects of the treatment—
 
218
  10 days following treatment.
219
  Dosage rates are approximate and may vary according to different circumstances and
220
  experiences. Suggested doses are:
221
+ 10 pills or 10ml/10 litre on small areas
222
+ 500 pills or 125ml/500l per hectare
223
+ 1000 pills or 250ml/500l per hectare
224
+ 2500 pills or 500ml/500l per hectare
225
  Add pills or liquid to your water and mix (with a stick if necessary for large containers).
226
 
227
  Recommendations: Provide couple of key pertinent recommendations based on the query
228
 
229
+ Maintain a professional tone and ensure all medicine recommendations include specific potency.
230
 
231
  Answer:"""
232
 
233
+ fallback_prompt = PromptTemplate(template=fallback_template, input_variables=["question"])
234
+ fallback_chain = LLMChain(llm=llm, prompt=fallback_prompt)
 
 
 
 
 
 
 
 
 
 
 
235
 
 
 
 
 
 
 
 
 
 
 
 
 
236
  chat_container = st.container()
237
 
238
+ st.markdown("""
239
+ <style>
240
+ .stButton>button {
241
+ color: black !important;
242
+ background-color: yellow !important;
243
+ }
244
+ </style>
245
+ """, unsafe_allow_html=True)
246
+
247
  with st.form(key='query_form'):
248
+ query = st.text_input("Ask your question:", value="")
249
  submit_button = st.form_submit_button(label='Submit')
250
 
251
+ end_time = time.time()
252
+ #print(f"Setting up retrieval chain took {end_time - start_time:.4f} seconds")
253
+ start_time = time.time()
254
+
255
  if submit_button and query:
256
  with st.spinner("Generating response..."):
257
+ result = qa({"query": query})
 
 
 
 
 
 
 
 
 
 
 
258
  if result['result'].strip() == "":
259
+ # If no result from PDF, use fallback chain
260
  fallback_result = fallback_chain.run(query)
261
  response = fallback_result
262
  else:
263
  response = result['result']
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
264
 
265
+ col1, col2 = st.columns([1, 10])
266
+ with col1:
267
+ st.image(human_image, width=80)
268
+ with col2:
269
+ st.markdown(f"{query}")
270
+ col1, col2 = st.columns([1, 10])
271
+ with col1:
272
+ st.image(robot_image, width=80)
273
+ with col2:
274
+ st.markdown(f"{response}")
275
+
276
  st.markdown("---")
277
+
278
  st.session_state["query"] = ""
279
+
280
+ end_time = time.time()
281
+ print(f"Actual query took {end_time - start_time:.4f} seconds")