Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -239,7 +239,8 @@ qa = RetrievalQA.from_chain_type(
|
|
239 |
"prompt": PromptTemplate(
|
240 |
template=prompt_template,
|
241 |
input_variables=["context", "question", "chat_history"]
|
242 |
-
)
|
|
|
243 |
}
|
244 |
)
|
245 |
|
@@ -334,17 +335,17 @@ if submit_button and query:
|
|
334 |
# Show user message
|
335 |
with st.chat_message("user", avatar="π€"):
|
336 |
st.markdown(query)
|
337 |
-
|
338 |
# Show typing indicator while generating response "πΏ"
|
339 |
with st.chat_message("assistant", avatar=robot_image):
|
340 |
with st.status("Analyzing your query...", expanded=True):
|
341 |
st.write("π Retrieving relevant information...")
|
342 |
st.write("π Generating personalized response...")
|
343 |
-
|
344 |
# Generate response
|
345 |
result = qa({
|
346 |
"query": query,
|
347 |
-
"chat_history":
|
348 |
})
|
349 |
|
350 |
if result['result'].strip() == "":
|
|
|
239 |
"prompt": PromptTemplate(
|
240 |
template=prompt_template,
|
241 |
input_variables=["context", "question", "chat_history"]
|
242 |
+
),
|
243 |
+
"memory": st.session_state.conversation_memory,
|
244 |
}
|
245 |
)
|
246 |
|
|
|
335 |
# Show user message
|
336 |
with st.chat_message("user", avatar="π€"):
|
337 |
st.markdown(query)
|
338 |
+
|
339 |
# Show typing indicator while generating response "πΏ"
|
340 |
with st.chat_message("assistant", avatar=robot_image):
|
341 |
with st.status("Analyzing your query...", expanded=True):
|
342 |
st.write("π Retrieving relevant information...")
|
343 |
st.write("π Generating personalized response...")
|
344 |
+
chat_history = st.session_state.conversation_memory.load_memory_variables({}).get("chat_history", "")
|
345 |
# Generate response
|
346 |
result = qa({
|
347 |
"query": query,
|
348 |
+
"chat_history": chat_history if chat_history else ""
|
349 |
})
|
350 |
|
351 |
if result['result'].strip() == "":
|