File size: 3,424 Bytes
1b7e88c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0b0cf33
1b7e88c
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
from pathlib import Path
from typing import Iterator, List

from omagent_core.advanced_components.workflow.dnc.schemas.dnc_structure import \
    TaskTree
from omagent_core.engine.worker.base import BaseWorker
from omagent_core.memories.ltms.ltm import LTM
from omagent_core.models.llms.base import BaseLLMBackend
from omagent_core.models.llms.prompt import PromptTemplate
from omagent_core.utils.logger import logging
from omagent_core.utils.registry import registry
from openai import Stream
from pydantic import Field

CURRENT_PATH = root_path = Path(__file__).parents[0]


@registry.register_worker()
class WebpageConclude(BaseLLMBackend, BaseWorker):
    prompts: List[PromptTemplate] = Field(
        default=[
            PromptTemplate.from_file(
                CURRENT_PATH.joinpath("sys_prompt.prompt"), role="system"
            ),
            PromptTemplate.from_file(
                CURRENT_PATH.joinpath("user_prompt.prompt"), role="user"
            ),
        ]
    )

    def _run(self, dnc_structure: dict, last_output: str, *args, **kwargs):
        """A conclude node that summarizes and completes the root task.

        This component acts as the final node that:
        - Takes the root task and its execution results
        - Generates a final conclusion/summary of the entire task execution
        - Formats and presents the final output in a clear way
        - Cleans up any temporary state/memory used during execution

        The conclude node is responsible for providing a coherent final response that
        addresses the original root task objective based on all the work done by
        previous nodes.

        Args:
            agent_task (dict): The task tree containing the root task and results
            last_output (str): The final output from previous task execution
            *args: Additional arguments
            **kwargs: Additional keyword arguments

        Returns:
            dict: Final response containing the conclusion/summary
        """
        task = TaskTree(**dnc_structure)
        self.callback.info(
            agent_id=self.workflow_instance_id,
            progress=f"Conclude",
            message=f"{task.get_current_node().task}",
        )
        chat_complete_res = self.simple_infer(
            task=task.get_root().task,
            result=str(last_output),
            img_placeholders="".join(
                list(self.stm(self.workflow_instance_id).get("image_cache", {}).keys())
            ),
        )
        if isinstance(chat_complete_res, Iterator):
            last_output = "Answer: "
            for chunk in chat_complete_res:
                if len(chunk.choices) > 0:
                    current_msg = chunk.choices[0].delta.content if chunk.choices[0].delta.content is not None else ''
                    last_output += current_msg
            self.callback.send_answer(agent_id=self.workflow_instance_id, msg=last_output)
        else:
            last_output = chat_complete_res["choices"][0]["message"]["content"]
            self.callback.send_answer(
                agent_id=self.workflow_instance_id,
                msg=f'Answer: {chat_complete_res["choices"][0]["message"]["content"]}',
            )
        # self.callback.send_answer(agent_id=self.workflow_instance_id, msg=f"Token usage: {self.token_usage}")
        self.stm(self.workflow_instance_id).clear()
        return {"last_output": last_output}