gsgoncalves commited on
Commit
f4a5714
·
verified ·
1 Parent(s): 980d8c8

Delete agents.py

Browse files
Files changed (1) hide show
  1. agents.py +0 -81
agents.py DELETED
@@ -1,81 +0,0 @@
1
- from ast import main
2
- import os
3
- from typing import TypedDict, List, Dict, Any, Optional
4
- from langgraph.graph import StateGraph, START, END
5
- from langchain_core.messages import HumanMessage, AIMessage, SystemMessage
6
- from langchain_google_genai import ChatGoogleGenerativeAI
7
- from langchain_core.rate_limiters import InMemoryRateLimiter
8
-
9
-
10
- GAIA_PROMPT = "You are a general AI assistant. I will ask you a question. Report your thoughts, and finish your answer with the following template: FINAL ANSWER: [YOUR FINAL ANSWER]. YOUR FINAL ANSWER should be a number OR as few words as possible OR a comma separated list of numbers and/or strings. If you are asked for a number, don't use comma to write your number neither use units such as $ or percent sign unless specified otherwise. If you are asked for a string, don't use articles, neither abbreviations (e.g. for cities), and write the digits in plain text unless specified otherwise. If you are asked for a comma separated list, apply the above rules depending of whether the element to be put in the list is a number or a string."
11
-
12
- # Initialize our LLM
13
-
14
-
15
- # Data
16
- class GAIAAgentState(TypedDict):
17
- """State of the GAIA agent."""
18
-
19
- task_id: str
20
- question: str
21
- file_id: Optional[str]
22
- answer: Optional[str]
23
- thought: Optional[str]
24
- # TODO add file binary fields
25
-
26
-
27
- class BasicAgent:
28
- def __init__(self):
29
- # Set up the rate limiter
30
- self.rate_limiter = InMemoryRateLimiter(
31
- requests_per_second=0.2 # 12 requests per minute
32
- )
33
- self.model = ChatGoogleGenerativeAI(
34
- model="gemini-2.0-flash",
35
- temperature=0,
36
- max_tokens=None,
37
- timeout=None,
38
- max_retries=2,
39
- google_api_key=os.environ["GEMINI_API_KEY"],
40
- rate_limiter=self.rate_limiter,
41
- )
42
- print("BasicAgent initialized.")
43
-
44
- def __call__(self, question: str) -> str:
45
- print(f"Agent received question (first 50 chars): {question[:50]}...")
46
- messages = [
47
- ("system", GAIA_PROMPT),
48
- ("human", question),
49
- ]
50
-
51
- # Pass the messages to the model
52
- ai_msg = self.model.invoke(messages)
53
-
54
- # Extract and return the AI's response
55
- print(f"Agent returning response: {ai_msg.content}")
56
- return (
57
- str(ai_msg.content)
58
- if not isinstance(ai_msg.content, str)
59
- else ai_msg.content
60
- )
61
-
62
-
63
- class GraphManager:
64
- def __init__(self):
65
- self.graph = StateGraph(GAIAAgentState)
66
- print("GraphManager initialized.")
67
-
68
- def read_question_and_define_gaia_state(
69
- self, state: GAIAAgentState
70
- ) -> GAIAAgentState:
71
- pass # TODO: Implement the logic to read the question and define the GAIA state
72
-
73
- def build_graph(self) -> StateGraph:
74
- # Add nodes
75
- self.graph.add_node(
76
- "read_question_and_define_gaia_state",
77
- self.read_question_and_define_gaia_state,
78
- )
79
- # Add edges
80
- self.graph.add_edge(START, "read_question_and_define_gaia_state")
81
- return self.graph