Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -254,35 +254,60 @@ question_answer_chain = create_stuff_documents_chain(llm, qa_prompt)
|
|
254 |
|
255 |
rag_chain = create_retrieval_chain(history_aware_retriever, question_answer_chain)
|
256 |
|
257 |
-
chat_history = [
|
258 |
-
|
|
|
|
|
|
|
|
|
259 |
|
260 |
-
|
261 |
-
#
|
262 |
-
|
263 |
-
|
|
|
|
|
264 |
|
265 |
-
|
266 |
-
ai_message = rag_chain.invoke({"input": question, "chat_history": chat_history})
|
267 |
-
chat_history.extend([HumanMessage(content=question), ai_message["answer"]])
|
268 |
document_links = []
|
269 |
-
|
270 |
-
|
271 |
-
|
272 |
-
|
273 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
274 |
if document_links:
|
275 |
document_links_text = "\n".join(document_links)
|
276 |
-
|
277 |
-
|
278 |
-
|
|
|
|
|
279 |
|
280 |
-
return ai_message['answer'] + "\n" + links_text
|
281 |
|
282 |
-
demo = gr.ChatInterface(fn=ask, title="UNTE ChatBot",theme=gr.themes.Soft())
|
283 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
284 |
|
|
|
|
|
|
|
285 |
|
286 |
-
|
287 |
-
|
288 |
-
demo.launch(share = False)
|
|
|
254 |
|
255 |
rag_chain = create_retrieval_chain(history_aware_retriever, question_answer_chain)
|
256 |
|
257 |
+
chat_history = []
|
258 |
+
|
259 |
+
def ask(question, chat_history):
|
260 |
+
# Prepend a phrase to the question to ensure relevance to Moodle
|
261 |
+
prepended_phrase = "using platform Moodle :"
|
262 |
+
modified_question = prepended_phrase + question
|
263 |
|
264 |
+
|
265 |
+
# Invoke the chain to get the response
|
266 |
+
ai_message = rag_chain.invoke({"input": modified_question, "chat_history": chat_history})
|
267 |
+
chat_history.append(("user", question))
|
268 |
+
|
269 |
+
answer = ai_message["answer"]
|
270 |
|
271 |
+
# Prepare document links if available
|
|
|
|
|
272 |
document_links = []
|
273 |
+
for doc in ai_message.get('context', []):
|
274 |
+
if 'url' in doc.metadata:
|
275 |
+
document_links.append(doc.metadata['url'])
|
276 |
+
|
277 |
+
# Append the question and answer to the chat history (without sources)
|
278 |
+
|
279 |
+
chat_history.append(("assistant", answer))
|
280 |
+
|
281 |
+
# For display purposes, format the chat history without labels
|
282 |
+
display_chat_history = []
|
283 |
+
for role, content in chat_history:
|
284 |
+
if role == "user":
|
285 |
+
display_chat_history.append((None, content)) # User question on the right
|
286 |
+
else:
|
287 |
+
display_chat_history.append((content, None)) # Assistant answer on the left
|
288 |
+
|
289 |
+
# Add sources to the last assistant message for display purposes only
|
290 |
if document_links:
|
291 |
document_links_text = "\n".join(document_links)
|
292 |
+
display_chat_history[-1] = (display_chat_history[-1][0] + f"\nSources: {document_links_text}", None)
|
293 |
+
|
294 |
+
# Return display history for the UI, and the actual chat history for internal use
|
295 |
+
return display_chat_history, chat_history, ""
|
296 |
+
|
297 |
|
|
|
298 |
|
|
|
299 |
|
300 |
+
# Initialize the Gradio interface
|
301 |
+
with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
302 |
+
chatbot = gr.Chatbot()
|
303 |
+
clear_button = gr.Button("Clear")
|
304 |
+
#clear = gr.Button("Clear")
|
305 |
+
question = gr.Textbox(placeholder="Ask me anything about Moodle...")
|
306 |
+
chat_history = gr.State([])
|
307 |
|
308 |
+
question.submit(ask, [question, chat_history], [chatbot, chat_history, question])
|
309 |
+
clear_button.click(lambda: ([], [], ""), None, [chatbot, chat_history, question], queue=False)
|
310 |
+
#clear.click(lambda: ("", []), None, [chatbot, chat_history, question], queue=False)
|
311 |
|
312 |
+
demo.queue()
|
313 |
+
demo.launch(share=False)
|
|