reorganize code
Browse files- ChatAtomicFlow.py +22 -7
ChatAtomicFlow.py
CHANGED
@@ -357,15 +357,18 @@ class ChatAtomicFlow(AtomicFlow):
|
|
357 |
|
358 |
self._state_update_add_chat_message(role=self.flow_config["user_name"],
|
359 |
content=user_message_content)
|
360 |
-
|
361 |
-
def run(self,input_message: FlowMessage):
|
362 |
-
""" This method runs the flow. It processes the input, calls the backend and updates the state of the flow.
|
363 |
|
364 |
-
|
365 |
-
|
|
|
|
|
|
|
|
|
|
|
366 |
"""
|
367 |
-
|
368 |
-
|
|
|
369 |
self._process_input(input_data)
|
370 |
|
371 |
# ~~~ Call ~~~
|
@@ -378,6 +381,18 @@ class ChatAtomicFlow(AtomicFlow):
|
|
378 |
content=answer
|
379 |
)
|
380 |
response = response if len(response) > 1 or len(response) == 0 else response[0]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
381 |
|
382 |
reply_message = self._package_output_message(input_message, response = {"api_output": response})
|
383 |
|
|
|
357 |
|
358 |
self._state_update_add_chat_message(role=self.flow_config["user_name"],
|
359 |
content=user_message_content)
|
|
|
|
|
|
|
360 |
|
361 |
+
def query_llm(self,input_data: Dict[str, Any]):
|
362 |
+
""" This method queries the LLM. It processes the input, calls the backend and returns the response.
|
363 |
+
|
364 |
+
:param input_data: The input data of the flow.
|
365 |
+
:type input_data: Dict[str, Any]
|
366 |
+
:return: The response of the LLM to the input.
|
367 |
+
:rtype: Union[str, List[str]]
|
368 |
"""
|
369 |
+
|
370 |
+
|
371 |
+
# ~~~ Process input ~~~
|
372 |
self._process_input(input_data)
|
373 |
|
374 |
# ~~~ Call ~~~
|
|
|
381 |
content=answer
|
382 |
)
|
383 |
response = response if len(response) > 1 or len(response) == 0 else response[0]
|
384 |
+
return response
|
385 |
+
|
386 |
+
def run(self,input_message: FlowMessage):
|
387 |
+
""" This method runs the flow. It processes the input, calls the backend and updates the state of the flow.
|
388 |
+
|
389 |
+
:param input_message: The input data of the flow.
|
390 |
+
:type input_message: aiflows.messages.FlowMessage
|
391 |
+
"""
|
392 |
+
input_data = input_message.data
|
393 |
+
|
394 |
+
response = self.query_llm(input_data=input_data)
|
395 |
+
|
396 |
|
397 |
reply_message = self._package_output_message(input_message, response = {"api_output": response})
|
398 |
|