Spaces:
Running
Running
implement basic loading state
Browse files
app.py
CHANGED
@@ -83,6 +83,8 @@ async def on_message(message: cl.Message):
|
|
83 |
final_answer = cl.Message(content="")
|
84 |
await final_answer.send()
|
85 |
|
|
|
|
|
86 |
# Stream the response
|
87 |
async for chunk in graph.astream(
|
88 |
{"messages": [HumanMessage(content=message.content)]},
|
@@ -91,9 +93,19 @@ async def on_message(message: cl.Message):
|
|
91 |
for node, values in chunk.items():
|
92 |
if values.get("messages"):
|
93 |
last_message = values["messages"][-1]
|
94 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
95 |
# Only stream AI messages, skip tool outputs
|
96 |
-
|
|
|
|
|
|
|
97 |
await final_answer.stream_token(last_message.content)
|
98 |
|
99 |
await final_answer.send()
|
|
|
83 |
final_answer = cl.Message(content="")
|
84 |
await final_answer.send()
|
85 |
|
86 |
+
loading_msg = None # Initialize reference to loading message
|
87 |
+
|
88 |
# Stream the response
|
89 |
async for chunk in graph.astream(
|
90 |
{"messages": [HumanMessage(content=message.content)]},
|
|
|
93 |
for node, values in chunk.items():
|
94 |
if values.get("messages"):
|
95 |
last_message = values["messages"][-1]
|
96 |
+
# Check for tool calls in additional_kwargs
|
97 |
+
if hasattr(last_message, "additional_kwargs") and last_message.additional_kwargs.get("tool_calls"):
|
98 |
+
tool_name = last_message.additional_kwargs["tool_calls"][0]["function"]["name"]
|
99 |
+
loading_msg = cl.Message(
|
100 |
+
content=f"🔍 Using {tool_name}...",
|
101 |
+
author="Tool"
|
102 |
+
)
|
103 |
+
await loading_msg.send()
|
104 |
# Only stream AI messages, skip tool outputs
|
105 |
+
elif isinstance(last_message, AIMessage):
|
106 |
+
if loading_msg:
|
107 |
+
await loading_msg.remove()
|
108 |
+
loading_msg = None
|
109 |
await final_answer.stream_token(last_message.content)
|
110 |
|
111 |
await final_answer.send()
|