Upload 3 files
Browse files- agentic/langgraph_agent.py +3 -1
- agentic/tools.py +13 -8
- app.py +54 -7
agentic/langgraph_agent.py
CHANGED
@@ -37,6 +37,7 @@ class LangGraphAgent4GAIA:
|
|
37 |
subtract,
|
38 |
divide,
|
39 |
modulo,
|
|
|
40 |
web_search,
|
41 |
arxiv_search,
|
42 |
wiki_search
|
@@ -67,7 +68,7 @@ class LangGraphAgent4GAIA:
|
|
67 |
|
68 |
|
69 |
# 2. Bind tools to LLM
|
70 |
-
self.llm_with_tools = llm.bind_tools(tools)
|
71 |
|
72 |
builder = StateGraph(MessagesState)
|
73 |
builder.add_node("assistant", self.assistant)
|
@@ -82,6 +83,7 @@ class LangGraphAgent4GAIA:
|
|
82 |
# Compile graph
|
83 |
return builder.compile()
|
84 |
|
|
|
85 |
if __name__ == "__main__":
|
86 |
from langchain_core.runnables.graph import MermaidDrawMethod
|
87 |
|
|
|
37 |
subtract,
|
38 |
divide,
|
39 |
modulo,
|
40 |
+
add_list,
|
41 |
web_search,
|
42 |
arxiv_search,
|
43 |
wiki_search
|
|
|
68 |
|
69 |
|
70 |
# 2. Bind tools to LLM
|
71 |
+
self.llm_with_tools = llm.bind_tools(tools, parallel_tool_calls=False)
|
72 |
|
73 |
builder = StateGraph(MessagesState)
|
74 |
builder.add_node("assistant", self.assistant)
|
|
|
83 |
# Compile graph
|
84 |
return builder.compile()
|
85 |
|
86 |
+
|
87 |
if __name__ == "__main__":
|
88 |
from langchain_core.runnables.graph import MermaidDrawMethod
|
89 |
|
agentic/tools.py
CHANGED
@@ -2,6 +2,7 @@ from langchain_core.tools import tool
|
|
2 |
from langchain_community.tools import DuckDuckGoSearchResults
|
3 |
from langchain_community.tools.tavily_search import TavilySearchResults
|
4 |
from langchain_community.document_loaders import ArxivLoader, WikipediaLoader
|
|
|
5 |
|
6 |
@tool
|
7 |
def add(a: int, b: int) -> int:
|
@@ -59,6 +60,13 @@ def modulo(a: int, b: int) -> int:
|
|
59 |
"""
|
60 |
return a % b
|
61 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
62 |
|
63 |
@tool
|
64 |
def web_search(query: str) -> str:
|
@@ -66,15 +74,14 @@ def web_search(query: str) -> str:
|
|
66 |
|
67 |
Args:
|
68 |
query: The search query."""
|
69 |
-
# search_docs = DuckDuckGoSearchResults(max_results=3).invoke(query
|
70 |
-
search_docs = TavilySearchResults(max_results=3).invoke(query
|
71 |
formatted_search_docs = "\n\n---\n\n".join(
|
72 |
[
|
73 |
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
|
74 |
for doc in search_docs
|
75 |
])
|
76 |
-
|
77 |
-
return formatted_search_docs
|
78 |
|
79 |
|
80 |
@tool
|
@@ -89,8 +96,7 @@ def arxiv_search(query: str) -> str:
|
|
89 |
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content[:1000]}\n</Document>'
|
90 |
for doc in search_docs
|
91 |
])
|
92 |
-
|
93 |
-
return formatted_search_docs
|
94 |
|
95 |
|
96 |
@tool
|
@@ -105,5 +111,4 @@ def wiki_search(query: str) -> str:
|
|
105 |
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
|
106 |
for doc in search_docs
|
107 |
])
|
108 |
-
|
109 |
-
return formatted_search_docs
|
|
|
2 |
from langchain_community.tools import DuckDuckGoSearchResults
|
3 |
from langchain_community.tools.tavily_search import TavilySearchResults
|
4 |
from langchain_community.document_loaders import ArxivLoader, WikipediaLoader
|
5 |
+
from typing import List
|
6 |
|
7 |
@tool
|
8 |
def add(a: int, b: int) -> int:
|
|
|
60 |
"""
|
61 |
return a % b
|
62 |
|
63 |
+
def add_list(lst: List[float]) -> float:
|
64 |
+
"""Sum up a list of numbers.
|
65 |
+
|
66 |
+
Args:
|
67 |
+
lst: A list of numbers.
|
68 |
+
"""
|
69 |
+
return sum(lst)
|
70 |
|
71 |
@tool
|
72 |
def web_search(query: str) -> str:
|
|
|
74 |
|
75 |
Args:
|
76 |
query: The search query."""
|
77 |
+
# search_docs = DuckDuckGoSearchResults(max_results=3).invoke({'query': query})
|
78 |
+
search_docs = TavilySearchResults(max_results=3).invoke({'query': query})
|
79 |
formatted_search_docs = "\n\n---\n\n".join(
|
80 |
[
|
81 |
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
|
82 |
for doc in search_docs
|
83 |
])
|
84 |
+
return f"web_results:\n{formatted_search_docs}"
|
|
|
85 |
|
86 |
|
87 |
@tool
|
|
|
96 |
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content[:1000]}\n</Document>'
|
97 |
for doc in search_docs
|
98 |
])
|
99 |
+
return f"arxiv_content:\n{formatted_search_docs}"
|
|
|
100 |
|
101 |
|
102 |
@tool
|
|
|
111 |
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
|
112 |
for doc in search_docs
|
113 |
])
|
114 |
+
return f"wikipedia_content:\n{formatted_search_docs}"
|
|
app.py
CHANGED
@@ -16,13 +16,14 @@ hence prompt it well ! The GAIA team shared a prompting example for your agent h
|
|
16 |
submission, just make your agent reply with the answer and nothing else).
|
17 |
"""
|
18 |
import os
|
|
|
19 |
import gradio as gr
|
20 |
import requests
|
21 |
import pandas as pd
|
22 |
from omegaconf import OmegaConf
|
23 |
from langchain_core.messages import HumanMessage
|
24 |
from agentic import LangGraphAgent4GAIA
|
25 |
-
|
26 |
|
27 |
def load_config(config_path: str):
|
28 |
config = OmegaConf.load(config_path)
|
@@ -50,14 +51,57 @@ class Agent:
|
|
50 |
)
|
51 |
print("LangGraphAgent4GAIA initialized.")
|
52 |
|
53 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
54 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
55 |
-
messages =
|
56 |
-
result = self.agent_manager.graph.invoke({"messages": messages}, {"recursion_limit":
|
57 |
answer = result['messages'][-1].content
|
58 |
final_answer = answer.split('FINAL ANSWER: ')[-1].strip()
|
59 |
-
return final_answer
|
60 |
-
|
61 |
|
62 |
def run_and_submit_all( profile: gr.OAuthProfile | None):
|
63 |
"""
|
@@ -116,11 +160,14 @@ def run_and_submit_all( profile: gr.OAuthProfile | None):
|
|
116 |
for item in questions_data:
|
117 |
task_id = item.get("task_id")
|
118 |
question_text = item.get("question")
|
|
|
119 |
if not task_id or question_text is None:
|
120 |
print(f"Skipping item with missing task_id or question: {item}")
|
121 |
continue
|
122 |
try:
|
123 |
-
submitted_answer = agent(question_text)
|
|
|
|
|
124 |
answers_payload.append({"task_id": task_id, "submitted_answer": submitted_answer})
|
125 |
results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": submitted_answer})
|
126 |
except Exception as e:
|
|
|
16 |
submission, just make your agent reply with the answer and nothing else).
|
17 |
"""
|
18 |
import os
|
19 |
+
import base64
|
20 |
import gradio as gr
|
21 |
import requests
|
22 |
import pandas as pd
|
23 |
from omegaconf import OmegaConf
|
24 |
from langchain_core.messages import HumanMessage
|
25 |
from agentic import LangGraphAgent4GAIA
|
26 |
+
from typing import List, Any
|
27 |
|
28 |
def load_config(config_path: str):
|
29 |
config = OmegaConf.load(config_path)
|
|
|
51 |
)
|
52 |
print("LangGraphAgent4GAIA initialized.")
|
53 |
|
54 |
+
@staticmethod
|
55 |
+
def _load_image(file_location: str) -> Any:
|
56 |
+
file_location = file_location.split(".")[0]
|
57 |
+
file_location = f"https://agents-course-unit4-scoring.hf.space/files/{file_location}"
|
58 |
+
|
59 |
+
response = requests.get(file_location)
|
60 |
+
contents = response.content
|
61 |
+
image_base64 = base64.b64encode(contents).decode("utf-8")
|
62 |
+
return image_base64
|
63 |
+
|
64 |
+
@staticmethod
|
65 |
+
def _concat_question_image(question: str, image_base64: Any) -> List[Any]:
|
66 |
+
messages = [
|
67 |
+
HumanMessage(
|
68 |
+
content=[
|
69 |
+
{
|
70 |
+
"type": "text",
|
71 |
+
"text": question
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"type": "image_url",
|
75 |
+
"image_url": {
|
76 |
+
"url": f"data:image/png;base64,{image_base64}"
|
77 |
+
},
|
78 |
+
},
|
79 |
+
]
|
80 |
+
)
|
81 |
+
]
|
82 |
+
return messages
|
83 |
+
|
84 |
+
def _format_message(self, question, file_name):
|
85 |
+
if file_name != "":
|
86 |
+
suffix = file_name.split(".")[-1]
|
87 |
+
if suffix == "png":
|
88 |
+
image_base64 = self._load_image(file_name)
|
89 |
+
messages = self._concat_question_image(question, image_base64)
|
90 |
+
else:
|
91 |
+
question = f"{question} (FilePath: {file_name})"
|
92 |
+
messages = [HumanMessage(content=question)]
|
93 |
+
else:
|
94 |
+
messages = [HumanMessage(content=question)]
|
95 |
+
return messages
|
96 |
+
|
97 |
+
def __call__(self, question: str, file_name: str) -> str:
|
98 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
99 |
+
messages = self._format_message(question, file_name)
|
100 |
+
result = self.agent_manager.graph.invoke({"messages": messages}, {"recursion_limit": 30})
|
101 |
answer = result['messages'][-1].content
|
102 |
final_answer = answer.split('FINAL ANSWER: ')[-1].strip()
|
103 |
+
return result, final_answer
|
104 |
+
|
105 |
|
106 |
def run_and_submit_all( profile: gr.OAuthProfile | None):
|
107 |
"""
|
|
|
160 |
for item in questions_data:
|
161 |
task_id = item.get("task_id")
|
162 |
question_text = item.get("question")
|
163 |
+
file_name = item.get("file_name")
|
164 |
if not task_id or question_text is None:
|
165 |
print(f"Skipping item with missing task_id or question: {item}")
|
166 |
continue
|
167 |
try:
|
168 |
+
results, submitted_answer = agent(question_text, file_name)
|
169 |
+
for msg in results["messages"]:
|
170 |
+
msg.pretty_print()
|
171 |
answers_payload.append({"task_id": task_id, "submitted_answer": submitted_answer})
|
172 |
results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": submitted_answer})
|
173 |
except Exception as e:
|