euracle commited on
Commit
554ffe9
Β·
verified Β·
1 Parent(s): c1f1fb3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -17
app.py CHANGED
@@ -251,7 +251,7 @@ Answer:"""
251
  # Create the QA chain with correct variables
252
  memory = ConversationBufferMemory(
253
  memory_key="chat_history",
254
- input_key="question",
255
  output_key="answer"
256
  )
257
 
@@ -302,7 +302,7 @@ Answer:"""
302
 
303
  fallback_prompt = PromptTemplate(
304
  template=fallback_template,
305
- input_variables=["question", "chat_history"]
306
  )
307
  fallback_chain = LLMChain(
308
  llm=llm,
@@ -344,17 +344,13 @@ if submit_button and query:
344
  st.write("πŸ” Retrieving relevant information...")
345
  st.write("πŸ“ Generating personalized response...")
346
  chat_history = st.session_state.conversation_memory.load_memory_variables({}).get("chat_history", "")
347
- # Generate response
348
- result = qa({
349
- "query": query,
350
- })
351
-
352
- if result['result'].strip() == "":
353
- response = fallback_chain.run(
354
- question=query,
355
- chat_history=st.session_state.conversation_memory.load_memory_variables({})["chat_history"])
356
- else:
357
- response = result['result']
358
 
359
  st.session_state.conversation_memory.save_context(
360
  {"input": query},
@@ -363,10 +359,8 @@ if submit_button and query:
363
 
364
  # Display final response
365
  st.markdown(response)
366
-
367
- # Add assistant response to history
368
- st.session_state.messages.append({"role": "assistant", "content": response})
369
-
370
  # Clear the form
371
  st.session_state["query"] = ""
372
 
 
251
  # Create the QA chain with correct variables
252
  memory = ConversationBufferMemory(
253
  memory_key="chat_history",
254
+ input_key="query",
255
  output_key="answer"
256
  )
257
 
 
302
 
303
  fallback_prompt = PromptTemplate(
304
  template=fallback_template,
305
+ input_variables=["query", "chat_history"]
306
  )
307
  fallback_chain = LLMChain(
308
  llm=llm,
 
344
  st.write("πŸ” Retrieving relevant information...")
345
  st.write("πŸ“ Generating personalized response...")
346
  chat_history = st.session_state.conversation_memory.load_memory_variables({}).get("chat_history", "")
347
+ try:
348
+ result = qa({
349
+ "query": query # Changed from "query" to "question"
350
+ })
351
+ response = result['result'] if result['result'].strip() != "" else fallback_chain.run(query=query, chat_history=st.session_state.conversation_memory.load_memory_variables({})["chat_history"])
352
+ except Exception as e:
353
+ response = fallback_chain.run(query=query, chat_history=st.session_state.conversation_memory.load_memory_variables({})["chat_history"])
 
 
 
 
354
 
355
  st.session_state.conversation_memory.save_context(
356
  {"input": query},
 
359
 
360
  # Display final response
361
  st.markdown(response)
362
+ st.session_state.messages.append({"role": "assistant", "content": response})
363
+
 
 
364
  # Clear the form
365
  st.session_state["query"] = ""
366