Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -251,7 +251,7 @@ Answer:"""
|
|
251 |
# Create the QA chain with correct variables
|
252 |
memory = ConversationBufferMemory(
|
253 |
memory_key="chat_history",
|
254 |
-
input_key="
|
255 |
output_key="answer"
|
256 |
)
|
257 |
|
@@ -302,7 +302,7 @@ Answer:"""
|
|
302 |
|
303 |
fallback_prompt = PromptTemplate(
|
304 |
template=fallback_template,
|
305 |
-
input_variables=["
|
306 |
)
|
307 |
fallback_chain = LLMChain(
|
308 |
llm=llm,
|
@@ -344,17 +344,13 @@ if submit_button and query:
|
|
344 |
st.write("π Retrieving relevant information...")
|
345 |
st.write("π Generating personalized response...")
|
346 |
chat_history = st.session_state.conversation_memory.load_memory_variables({}).get("chat_history", "")
|
347 |
-
|
348 |
-
|
349 |
-
|
350 |
-
|
351 |
-
|
352 |
-
|
353 |
-
response = fallback_chain.run(
|
354 |
-
question=query,
|
355 |
-
chat_history=st.session_state.conversation_memory.load_memory_variables({})["chat_history"])
|
356 |
-
else:
|
357 |
-
response = result['result']
|
358 |
|
359 |
st.session_state.conversation_memory.save_context(
|
360 |
{"input": query},
|
@@ -363,10 +359,8 @@ if submit_button and query:
|
|
363 |
|
364 |
# Display final response
|
365 |
st.markdown(response)
|
366 |
-
|
367 |
-
|
368 |
-
st.session_state.messages.append({"role": "assistant", "content": response})
|
369 |
-
|
370 |
# Clear the form
|
371 |
st.session_state["query"] = ""
|
372 |
|
|
|
251 |
# Create the QA chain with correct variables
|
252 |
memory = ConversationBufferMemory(
|
253 |
memory_key="chat_history",
|
254 |
+
input_key="query",
|
255 |
output_key="answer"
|
256 |
)
|
257 |
|
|
|
302 |
|
303 |
fallback_prompt = PromptTemplate(
|
304 |
template=fallback_template,
|
305 |
+
input_variables=["query", "chat_history"]
|
306 |
)
|
307 |
fallback_chain = LLMChain(
|
308 |
llm=llm,
|
|
|
344 |
st.write("π Retrieving relevant information...")
|
345 |
st.write("π Generating personalized response...")
|
346 |
chat_history = st.session_state.conversation_memory.load_memory_variables({}).get("chat_history", "")
|
347 |
+
try:
|
348 |
+
result = qa({
|
349 |
+
"query": query # Changed from "query" to "question"
|
350 |
+
})
|
351 |
+
response = result['result'] if result['result'].strip() != "" else fallback_chain.run(query=query, chat_history=st.session_state.conversation_memory.load_memory_variables({})["chat_history"])
|
352 |
+
except Exception as e:
|
353 |
+
response = fallback_chain.run(query=query, chat_history=st.session_state.conversation_memory.load_memory_variables({})["chat_history"])
|
|
|
|
|
|
|
|
|
354 |
|
355 |
st.session_state.conversation_memory.save_context(
|
356 |
{"input": query},
|
|
|
359 |
|
360 |
# Display final response
|
361 |
st.markdown(response)
|
362 |
+
st.session_state.messages.append({"role": "assistant", "content": response})
|
363 |
+
|
|
|
|
|
364 |
# Clear the form
|
365 |
st.session_state["query"] = ""
|
366 |
|