Configure and enable final report generation from journal.
Browse files- aide/journal2report.py +6 -5
- aide/run.py +10 -2
- aide/utils/config.py +2 -1
- aide/utils/config.yaml +6 -0
aide/journal2report.py
CHANGED
|
@@ -1,8 +1,8 @@
|
|
| 1 |
-
from backend import query
|
| 2 |
-
from journal import Journal
|
|
|
|
| 3 |
|
| 4 |
-
|
| 5 |
-
def journal2report(journal: Journal, task_desc: dict):
|
| 6 |
"""
|
| 7 |
Generate a report from a journal, the report will be in markdown format.
|
| 8 |
"""
|
|
@@ -24,6 +24,7 @@ def journal2report(journal: Journal, task_desc: dict):
|
|
| 24 |
return query(
|
| 25 |
system_message=system_prompt_dict,
|
| 26 |
user_message=context_prompt,
|
| 27 |
-
model=
|
|
|
|
| 28 |
max_tokens=4096,
|
| 29 |
)
|
|
|
|
| 1 |
+
from .backend import query
|
| 2 |
+
from .journal import Journal
|
| 3 |
+
from .utils.config import StageConfig
|
| 4 |
|
| 5 |
+
def journal2report(journal: Journal, task_desc: dict, rcfg: StageConfig):
|
|
|
|
| 6 |
"""
|
| 7 |
Generate a report from a journal, the report will be in markdown format.
|
| 8 |
"""
|
|
|
|
| 24 |
return query(
|
| 25 |
system_message=system_prompt_dict,
|
| 26 |
user_message=context_prompt,
|
| 27 |
+
model=rcfg.model,
|
| 28 |
+
temperature=rcfg.temp,
|
| 29 |
max_tokens=4096,
|
| 30 |
)
|
aide/run.py
CHANGED
|
@@ -8,6 +8,7 @@ from .utils import tree_export
|
|
| 8 |
from .agent import Agent
|
| 9 |
from .interpreter import Interpreter
|
| 10 |
from .journal import Journal, Node
|
|
|
|
| 11 |
from omegaconf import OmegaConf
|
| 12 |
from rich.columns import Columns
|
| 13 |
from rich.console import Group
|
|
@@ -130,8 +131,15 @@ def run():
|
|
| 130 |
live.update(generate_live())
|
| 131 |
interpreter.cleanup_session()
|
| 132 |
|
| 133 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 134 |
|
| 135 |
|
| 136 |
if __name__ == "__main__":
|
| 137 |
-
run()
|
|
|
|
| 8 |
from .agent import Agent
|
| 9 |
from .interpreter import Interpreter
|
| 10 |
from .journal import Journal, Node
|
| 11 |
+
from .journal2report import journal2report
|
| 12 |
from omegaconf import OmegaConf
|
| 13 |
from rich.columns import Columns
|
| 14 |
from rich.console import Group
|
|
|
|
| 131 |
live.update(generate_live())
|
| 132 |
interpreter.cleanup_session()
|
| 133 |
|
| 134 |
+
if cfg.generate_report:
|
| 135 |
+
print("Generating final report from journal...")
|
| 136 |
+
report = journal2report(journal, task_desc, cfg.report)
|
| 137 |
+
print(report)
|
| 138 |
+
report_file_path = cfg.log_dir / 'report.md'
|
| 139 |
+
with open(report_file_path, "w") as f:
|
| 140 |
+
f.write(report)
|
| 141 |
+
print('Report written to file:', report_file_path)
|
| 142 |
|
| 143 |
|
| 144 |
if __name__ == "__main__":
|
| 145 |
+
run()
|
aide/utils/config.py
CHANGED
|
@@ -58,7 +58,6 @@ class ExecConfig:
|
|
| 58 |
agent_file_name: str
|
| 59 |
format_tb_ipython: bool
|
| 60 |
|
| 61 |
-
|
| 62 |
@dataclass
|
| 63 |
class Config(Hashable):
|
| 64 |
data_dir: Path
|
|
@@ -76,6 +75,8 @@ class Config(Hashable):
|
|
| 76 |
exp_name: str
|
| 77 |
|
| 78 |
exec: ExecConfig
|
|
|
|
|
|
|
| 79 |
agent: AgentConfig
|
| 80 |
|
| 81 |
|
|
|
|
| 58 |
agent_file_name: str
|
| 59 |
format_tb_ipython: bool
|
| 60 |
|
|
|
|
| 61 |
@dataclass
|
| 62 |
class Config(Hashable):
|
| 63 |
data_dir: Path
|
|
|
|
| 75 |
exp_name: str
|
| 76 |
|
| 77 |
exec: ExecConfig
|
| 78 |
+
generate_report: bool
|
| 79 |
+
report: StageConfig
|
| 80 |
agent: AgentConfig
|
| 81 |
|
| 82 |
|
aide/utils/config.yaml
CHANGED
|
@@ -24,6 +24,12 @@ exec:
|
|
| 24 |
agent_file_name: runfile.py
|
| 25 |
format_tb_ipython: False
|
| 26 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 27 |
# agent hyperparams
|
| 28 |
agent:
|
| 29 |
# how many improvement iterations to run
|
|
|
|
| 24 |
agent_file_name: runfile.py
|
| 25 |
format_tb_ipython: False
|
| 26 |
|
| 27 |
+
generate_report: True
|
| 28 |
+
# LLM settings for final report from journal
|
| 29 |
+
report:
|
| 30 |
+
model: gpt-4-turbo
|
| 31 |
+
temp: 1.0
|
| 32 |
+
|
| 33 |
# agent hyperparams
|
| 34 |
agent:
|
| 35 |
# how many improvement iterations to run
|