roger33303 commited on
Commit
56015a3
·
verified ·
1 Parent(s): c77a68c

Upload 4 files

Browse files
Files changed (4) hide show
  1. agents.py +106 -0
  2. app.py +171 -171
  3. custom_tools.py +371 -0
  4. requirements.txt +12 -0
agents.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langgraph.graph import StateGraph, MessagesState, START, END
2
+ from langmem.short_term import SummarizationNode
3
+ from langchain_core.messages.utils import count_tokens_approximately
4
+ from langgraph.prebuilt.chat_agent_executor import AgentState
5
+ from langgraph.checkpoint.memory import InMemorySaver
6
+ from typing import Any
7
+ import os
8
+ from langchain.chat_models import init_chat_model
9
+ from langgraph.prebuilt import create_react_agent
10
+ from langgraph_supervisor import create_supervisor
11
+ import custom_tools as custom_tools
12
+ from dotenv import load_dotenv
13
+
14
+ load_dotenv()
15
+
16
+ supervisor_prompt = os.getenv("SUPERVISOR_PROMPT")
17
+
18
+ def build_graph():
19
+ a_debug = False
20
+ model = init_chat_model("llama3-70b-8192", model_provider="groq")
21
+ summarization_node = SummarizationNode(token_counter=count_tokens_approximately,
22
+ model=model,
23
+ max_tokens=2048,
24
+ max_summary_tokens=1024,
25
+ output_messages_key="llm_input_messages",)
26
+
27
+ class State(AgentState):
28
+ context: dict[str, Any]
29
+
30
+ checkpointer = InMemorySaver()
31
+
32
+
33
+ agent0= create_react_agent(model=model,
34
+ tools= [custom_tools.stock_data_tool],
35
+ prompt=("Give the company name to the stock tool produce the stock fundamentals. Summarize all the information in one paragraph in 200 words."),
36
+ name="Stock Analyst",
37
+ pre_model_hook= summarization_node,
38
+ state_schema=State,
39
+ checkpointer=checkpointer,
40
+ debug=a_debug)
41
+
42
+ agent1= create_react_agent(model=model,
43
+ tools= [custom_tools.web_search],
44
+ prompt='''You are a geek that can find anything from the internet. Do the research and provide the summery in 250 words and in only one paragraph.''',
45
+ name="Web Surfer",
46
+ pre_model_hook= summarization_node,
47
+ state_schema=State,
48
+ checkpointer=checkpointer,
49
+ debug=a_debug)
50
+
51
+ agent2= create_react_agent(model=model,
52
+ tools= [custom_tools.reddit_search_tool],
53
+ prompt='''You are a social media analyst. your job is to search social media and find out the sentiments of public on any topic. Do the research and provide the summery in 250 words and in only one paragraph.''',
54
+ name="Social Media Analyst",
55
+ pre_model_hook= summarization_node,
56
+ state_schema=State,
57
+ checkpointer=checkpointer,
58
+ debug=a_debug)
59
+
60
+ agent3= create_react_agent(model=model,
61
+ tools= [custom_tools.tech_news_tool],
62
+ prompt='''You are a Technology news analyst. You find the latest news related to a topic and summerize it to findout the overall picture. Do the research and provide the summery in 250 words and in only one paragraph.''',
63
+ name="Tech Journalist",
64
+ pre_model_hook= summarization_node,
65
+ state_schema=State,
66
+ checkpointer=checkpointer,
67
+ debug=a_debug)
68
+
69
+ agent4= create_react_agent(model=model,
70
+ tools= [custom_tools.politics_news_tool],
71
+ prompt='''You are a Politics news analyst. You find the latest news related to a topic and summerize it to findout the overall picture. Do the research and provide the summery in 250 words and in only one paragraph.''',
72
+ name="Political Journalist",
73
+ pre_model_hook= summarization_node,
74
+ state_schema=State,
75
+ checkpointer=checkpointer,
76
+ debug=a_debug)
77
+
78
+ agent5= create_react_agent(model=model,
79
+ tools= [custom_tools.business_news_tool],
80
+ prompt='''You are a Business news analyst. You find the latest news related to a topic and summerize it to findout the overall picture. Do the research and provide the summery in 250 words and in only one paragraph.''',
81
+ name="Business Journalist",
82
+ pre_model_hook= summarization_node,
83
+ state_schema=State,
84
+ checkpointer=checkpointer,
85
+ debug=a_debug)
86
+
87
+ agent6= create_react_agent(model=model,
88
+ tools= [custom_tools.world_news_tool],
89
+ prompt='''You are a Geopolitical news analyst. You find the latest news related to a topic and summerize it to findout the overall picture. Do the research and provide the summery in 250 words and in only one paragraph.''',
90
+ name="Geopolitical Journalist",
91
+ pre_model_hook= summarization_node,
92
+ state_schema=State,
93
+ checkpointer=checkpointer,
94
+ debug=a_debug)
95
+
96
+
97
+ supervisor_prompt = '''
98
+
99
+ '''
100
+
101
+
102
+ graph = create_supervisor(agents=[agent0,agent1,agent2,agent3,agent4,agent5,agent6],
103
+ model=model,
104
+ prompt= supervisor_prompt)
105
+ return graph.compile()
106
+
app.py CHANGED
@@ -1,171 +1,171 @@
1
- import gradio as gr
2
- from gradio import ChatMessage
3
- from agents import build_graph
4
- import os
5
- import random
6
- from dotenv import dotenv_values
7
-
8
- js_func = """
9
- function refresh() {
10
- const url = new URL(window.location);
11
-
12
- if (url.searchParams.get('__theme') !== 'dark') {
13
- url.searchParams.set('__theme', 'dark');
14
- window.location.href = url.href;
15
- }
16
- }
17
- """
18
-
19
-
20
- def run_agent_stream(history, question, groq_key, reddit_id, reddit_secret, reddit_agent, news_key):
21
- os.environ["GROQ_API_KEY"] = groq_key or ""
22
- os.environ["REDDIT_CLIENT_ID"] = reddit_id or ""
23
- os.environ["REDDIT_CLIENT_SECRET"] = reddit_secret or ""
24
- os.environ["REDDIT_USER_AGENT"] = reddit_agent or ""
25
- os.environ["NEWS_API"] = news_key or ""
26
-
27
- try:
28
- graph = build_graph()
29
- state = {"messages": [{"role": "user", "content": question}]}
30
- history = history or []
31
- history.append(ChatMessage(role="user", content=question))
32
- history.append(ChatMessage(role="assistant", content="**Supervisor:**\n I am working with my agents to get a result. Sit tight, I’ll get back with a detailed report."))
33
- yield history, "<div class='loader'></div>", gr.update(interactive=False)
34
-
35
- for chunk in graph.stream(state):
36
- agent_key = next(iter(chunk))
37
- messages = chunk[agent_key]["messages"]
38
- for msg in messages:
39
- if msg.__class__.__name__ == "HumanMessage":
40
- continue
41
- name = getattr(msg, "name", agent_key)
42
- if str(name).split('_')[0] == 'transfer':
43
- continue
44
- content = msg.content.strip()
45
-
46
- if not content or "Transferring back" in content or "Successfully transferred" in content:
47
- continue
48
-
49
- is_final = msg.response_metadata.get("finish_reason") == "stop"
50
- is_supervisor = msg.name == "supervisor"
51
- if is_final and is_supervisor:
52
- history.append(ChatMessage(role="assistant", content=content, metadata={"title": "✅ Final Report"}))
53
- yield history, "", gr.update(interactive=True)
54
- else:
55
- emojis = {
56
- "Stock Analyst": "🧠",
57
- "Web Surfer": "🌐",
58
- "Social Media Analyst": "🎦",
59
- "Business Journalist": "📊"
60
- }
61
- emoji = emojis.get(name, random.choice(["🏹", "🧭", "🚩"]))
62
- title = f"{emoji} {name}" if name else ""
63
- his = ChatMessage(role="assistant", content=content, metadata={"title": title})
64
- if his not in history:
65
- history.append(his)
66
- yield history, "<div class='loader'></div>", gr.update(interactive=False)
67
- except Exception as e:
68
- print("Error: ", e)
69
-
70
-
71
- def load_env_file(file):
72
- try:
73
- values = dotenv_values(file.name)
74
- return (
75
- values.get("GROQ_API_KEY", ""),
76
- values.get("REDDIT_CLIENT_ID", ""),
77
- values.get("REDDIT_CLIENT_SECRET", ""),
78
- values.get("REDDIT_USER_AGENT", ""),
79
- values.get("NEWS_API", "")
80
- )
81
- except Exception as e:
82
- print("Failed to load .env file:", e)
83
- return "", "", "", "", ""
84
-
85
-
86
- custom_css = """
87
- <style>
88
- .loader {
89
- border: 4px solid #f3f3f3;
90
- border-top: 4px solid #3498db;
91
- border-radius: 50%;
92
- width: 20px;
93
- height: 20px;
94
- animation: spin 1s linear infinite;
95
- margin-top: 10px;
96
- }
97
- @keyframes spin {
98
- 0% { transform: rotate(0deg); }
99
- 100% { transform: rotate(360deg); }
100
- }
101
- #toggle-btn {
102
- position: fixed;
103
- top: 10px;
104
- left: 10px;
105
- z-index: 1000;
106
- width: 28px;
107
- height: 28px;
108
- padding: 0;
109
- font-size: 18px;
110
- font-weight: bold;
111
- line-height: 1;
112
- text-align: center;
113
- background: #eee;
114
- border: 1px solid #ccc;
115
- border-radius: 4px;
116
- }
117
- </style>
118
- """
119
-
120
- def toggle_sidebar(is_visible):
121
- return (
122
- gr.update(visible=not is_visible),
123
- not is_visible,
124
- "+" if is_visible else "×"
125
- )
126
-
127
- with gr.Blocks(title="Stock Market AI", js=js_func) as demo:
128
- gr.HTML(custom_css)
129
- gr.Markdown("## 🧠 Stock Market AI Agent\nAnalyze a stock using multiple AI agents (fundamentals, news, sentiment, etc).")
130
-
131
- sidebar_state = gr.State(True)
132
- toggle_btn = gr.Button("×", elem_id="toggle-btn")
133
-
134
- with gr.Row():
135
- with gr.Column(scale=1) as sidebar:
136
- gr.Markdown("### 🔐 Enter Your API Keys or Upload a .env File")
137
- file_upload = gr.File(label="Upload .env or txt file", file_types=[".txt", ".env"])
138
- groq_key = gr.Textbox(label="GROQ_API_KEY", placeholder="Paste your GROQ API Key here", type="password")
139
- reddit_id = gr.Textbox(label="REDDIT_CLIENT_ID (Visit: https://www.reddit.com/prefs/apps)", placeholder="Your Reddit App Client ID", type="password")
140
- reddit_secret = gr.Textbox(label="REDDIT_CLIENT_SECRET", placeholder="Your Reddit App Secret", type="password")
141
- reddit_agent = gr.Textbox(label="REDDIT_USER_AGENT", placeholder="Your Reddit User Agent")
142
- news_key = gr.Textbox(label="NEWS_API", placeholder="Your Newsdata.io API Key", type="password")
143
-
144
- with gr.Column(scale=4):
145
- chatbot = gr.Chatbot(label="Agent Chat", type="messages", resizable=True, show_copy_button=True)
146
- msg = gr.Textbox(label="Your Stock Question", placeholder="Should I invest in Tesla?")
147
- spinner = gr.HTML("")
148
- clear = gr.Button("Clear")
149
-
150
- file_upload.change(
151
- fn=load_env_file,
152
- inputs=file_upload,
153
- outputs=[groq_key, reddit_id, reddit_secret, reddit_agent, news_key]
154
- )
155
-
156
- toggle_btn.click(
157
- toggle_sidebar,
158
- inputs=[sidebar_state],
159
- outputs=[sidebar, sidebar_state, toggle_btn]
160
- )
161
-
162
- msg.submit(
163
- run_agent_stream,
164
- inputs=[chatbot, msg, groq_key, reddit_id, reddit_secret, reddit_agent, news_key],
165
- outputs=[chatbot, spinner, msg],
166
- queue=True
167
- )
168
-
169
- clear.click(lambda: ([], "", gr.update(interactive=True)), outputs=[chatbot, spinner, msg])
170
-
171
- demo.launch(height=100, show_error=True)
 
1
+ import gradio as gr
2
+ from gradio import ChatMessage
3
+ from agents import build_graph
4
+ import os
5
+ import random
6
+ from dotenv import dotenv_values
7
+
8
+ js_func = """
9
+ function refresh() {
10
+ const url = new URL(window.location);
11
+
12
+ if (url.searchParams.get('__theme') !== 'dark') {
13
+ url.searchParams.set('__theme', 'dark');
14
+ window.location.href = url.href;
15
+ }
16
+ }
17
+ """
18
+
19
+
20
+ def run_agent_stream(history, question, groq_key, reddit_id, reddit_secret, reddit_agent, news_key):
21
+ os.environ["GROQ_API_KEY"] = groq_key or ""
22
+ os.environ["REDDIT_CLIENT_ID"] = reddit_id or ""
23
+ os.environ["REDDIT_CLIENT_SECRET"] = reddit_secret or ""
24
+ os.environ["REDDIT_USER_AGENT"] = reddit_agent or ""
25
+ os.environ["NEWS_API"] = news_key or ""
26
+
27
+ try:
28
+ graph = build_graph()
29
+ state = {"messages": [{"role": "user", "content": question}]}
30
+ history = history or []
31
+ history.append(ChatMessage(role="user", content=question))
32
+ history.append(ChatMessage(role="assistant", content="**Supervisor:**\n I am working with my agents to get a result. Sit tight, I’ll get back with a detailed report."))
33
+ yield history, "<div class='loader'></div>", gr.update(interactive=False)
34
+
35
+ for chunk in graph.stream(state):
36
+ agent_key = next(iter(chunk))
37
+ messages = chunk[agent_key]["messages"]
38
+ for msg in messages:
39
+ if msg.__class__.__name__ == "HumanMessage":
40
+ continue
41
+ name = getattr(msg, "name", agent_key)
42
+ if str(name).split('_')[0] == 'transfer':
43
+ continue
44
+ content = msg.content.strip()
45
+
46
+ if not content or "Transferring back" in content or "Successfully transferred" in content:
47
+ continue
48
+
49
+ is_final = msg.response_metadata.get("finish_reason") == "stop"
50
+ is_supervisor = msg.name == "supervisor"
51
+ if is_final and is_supervisor:
52
+ history.append(ChatMessage(role="assistant", content=content, metadata={"title": "✅ Final Report"}))
53
+ yield history, "", gr.update(interactive=True)
54
+ else:
55
+ emojis = {
56
+ "Stock Analyst": "🧠",
57
+ "Web Surfer": "🌐",
58
+ "Social Media Analyst": "🎦",
59
+ "Business Journalist": "📊"
60
+ }
61
+ emoji = emojis.get(name, random.choice(["🏹", "🧭", "🚩"]))
62
+ title = f"{emoji} {name}" if name else ""
63
+ his = ChatMessage(role="assistant", content=content, metadata={"title": title})
64
+ if his not in history:
65
+ history.append(his)
66
+ yield history, "<div class='loader'></div>", gr.update(interactive=False)
67
+ except Exception as e:
68
+ print("Error: ", e)
69
+
70
+
71
+ def load_env_file(file):
72
+ try:
73
+ values = dotenv_values(file.name)
74
+ return (
75
+ values.get("GROQ_API_KEY", ""),
76
+ values.get("REDDIT_CLIENT_ID", ""),
77
+ values.get("REDDIT_CLIENT_SECRET", ""),
78
+ values.get("REDDIT_USER_AGENT", ""),
79
+ values.get("NEWS_API", "")
80
+ )
81
+ except Exception as e:
82
+ print("Failed to load .env file:", e)
83
+ return "", "", "", "", ""
84
+
85
+
86
+ custom_css = """
87
+ <style>
88
+ .loader {
89
+ border: 4px solid #f3f3f3;
90
+ border-top: 4px solid #3498db;
91
+ border-radius: 50%;
92
+ width: 20px;
93
+ height: 20px;
94
+ animation: spin 1s linear infinite;
95
+ margin-top: 10px;
96
+ }
97
+ @keyframes spin {
98
+ 0% { transform: rotate(0deg); }
99
+ 100% { transform: rotate(360deg); }
100
+ }
101
+ #toggle-btn {
102
+ position: fixed;
103
+ top: 10px;
104
+ left: 10px;
105
+ z-index: 1000;
106
+ width: 28px;
107
+ height: 28px;
108
+ padding: 0;
109
+ font-size: 18px;
110
+ font-weight: bold;
111
+ line-height: 1;
112
+ text-align: center;
113
+ background: #eee;
114
+ border: 1px solid #ccc;
115
+ border-radius: 4px;
116
+ }
117
+ </style>
118
+ """
119
+
120
+ def toggle_sidebar(is_visible):
121
+ return (
122
+ gr.update(visible=not is_visible),
123
+ not is_visible,
124
+ "+" if is_visible else "×"
125
+ )
126
+
127
+ with gr.Blocks(title="Stock Market AI", js=js_func) as demo:
128
+ gr.HTML(custom_css)
129
+ gr.Markdown("## 🧠 Stock Market AI Agent\nAnalyze a stock using multiple AI agents (fundamentals, news, sentiment, etc).")
130
+
131
+ sidebar_state = gr.State(True)
132
+ toggle_btn = gr.Button("×", elem_id="toggle-btn")
133
+
134
+ with gr.Row():
135
+ with gr.Column(scale=1) as sidebar:
136
+ gr.Markdown("### 🔐 Enter Your API Keys or Upload a .env File")
137
+ file_upload = gr.File(label="Upload .env or txt file", file_types=[".txt", ".env"])
138
+ groq_key = gr.Textbox(label="GROQ_API_KEY", placeholder="Paste your GROQ API Key here", type="password")
139
+ reddit_id = gr.Textbox(label="REDDIT_CLIENT_ID (Visit: https://www.reddit.com/prefs/apps)", placeholder="Your Reddit App Client ID", type="password")
140
+ reddit_secret = gr.Textbox(label="REDDIT_CLIENT_SECRET", placeholder="Your Reddit App Secret", type="password")
141
+ reddit_agent = gr.Textbox(label="REDDIT_USER_AGENT", placeholder="Your Reddit User Agent")
142
+ news_key = gr.Textbox(label="NEWS_API", placeholder="Your Newsdata.io API Key", type="password")
143
+
144
+ with gr.Column(scale=4):
145
+ chatbot = gr.Chatbot(label="Agent Chat", type="messages", resizable=True, show_copy_button=True)
146
+ msg = gr.Textbox(label="Your Stock Question", placeholder="Should I invest in Tesla?")
147
+ spinner = gr.HTML("")
148
+ clear = gr.Button("Clear")
149
+
150
+ file_upload.change(
151
+ fn=load_env_file,
152
+ inputs=file_upload,
153
+ outputs=[groq_key, reddit_id, reddit_secret, reddit_agent, news_key]
154
+ )
155
+
156
+ toggle_btn.click(
157
+ toggle_sidebar,
158
+ inputs=[sidebar_state],
159
+ outputs=[sidebar, sidebar_state, toggle_btn]
160
+ )
161
+
162
+ msg.submit(
163
+ run_agent_stream,
164
+ inputs=[chatbot, msg, groq_key, reddit_id, reddit_secret, reddit_agent, news_key],
165
+ outputs=[chatbot, spinner, msg],
166
+ queue=True
167
+ )
168
+
169
+ clear.click(lambda: ([], "", gr.update(interactive=True)), outputs=[chatbot, spinner, msg])
170
+
171
+ demo.launch(height=100, show_error=True)
custom_tools.py ADDED
@@ -0,0 +1,371 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from langchain_community.tools import DuckDuckGoSearchResults, RedditSearchRun
3
+ from langchain_community.utilities.reddit_search import RedditSearchAPIWrapper
4
+ from langchain_community.tools.reddit_search.tool import RedditSearchSchema
5
+ from langchain_community.utilities import DuckDuckGoSearchAPIWrapper
6
+ from langchain.tools import Tool , tool
7
+ from pydantic import BaseModel
8
+ from time import sleep
9
+ import re
10
+
11
+ groq_api= os.getenv('GROQ_API_KEY')
12
+ Onews_api = os.getenv('NEWS_API')
13
+
14
+
15
+ from newsdataapi import NewsDataApiClient
16
+ import yfinance as yf
17
+ import pandas as pd
18
+
19
+
20
+ class RedditInput(BaseModel):
21
+ query: str
22
+ sort: str = "new"
23
+ time_filter: str = "week"
24
+ subreddit: str = "stocks"
25
+ limit: str = "5"
26
+
27
+ class WebSearchInput(BaseModel):
28
+ query: str
29
+
30
+ class StanderdNewsSearchProtocol(BaseModel):
31
+ topic: str
32
+
33
+
34
+ class StockFundamentals(BaseModel):
35
+ company_name: str
36
+
37
+
38
+
39
+ @tool(args_schema=RedditInput)
40
+ def reddit_search_tool(query: str, sort: str, time_filter: str, subreddit: str, limit: str) -> str:
41
+ """
42
+ Search Reddit for a given query. Provide query and optionally sort, time_filter, subreddit, and limit.
43
+ """
44
+ sleep(1)
45
+ try:
46
+ search = RedditSearchRun(api_wrapper=RedditSearchAPIWrapper())
47
+ search_params = RedditSearchSchema(
48
+ query=query,
49
+ sort=sort,
50
+ time_filter=time_filter,
51
+ subreddit=subreddit,
52
+ limit=limit
53
+ )
54
+ result = search.run(tool_input=search_params.model_dump())
55
+ except Exception as e:
56
+ result = "There was an error in ruuning the tool. try again or skip the tool"
57
+
58
+ sleep(1)
59
+ return result
60
+
61
+
62
+ def resolve_ticker(company_name: str) -> str:
63
+ """
64
+ Resolves the correct stock ticker for a given company name using web search.
65
+ Example: 'Apple' -> 'AAPL', 'Tesla' -> 'TSLA'
66
+ """
67
+ try:
68
+ wrapper = DuckDuckGoSearchAPIWrapper(max_results=1)
69
+ search = DuckDuckGoSearchResults(api_wrapper=wrapper)
70
+ query = f"{company_name} stock ticker site:finance.yahoo.com"
71
+ results = search.invoke(query)
72
+ match = re.search(r"finance\.yahoo\.com/quote/([^/?]+)", results)
73
+ if match:
74
+ return match.group(1).strip()
75
+ else : return f"Not able to find the correct stocks name for {company_name}. Trying again..."
76
+ except :
77
+ return "Not able to run the tool successfuly."
78
+
79
+
80
+
81
+ @tool(args_schema=StockFundamentals)
82
+ def fetch_stock_summary(company_name: str) -> str:
83
+ """
84
+ Fetches a comprehensive stock summary including technical indicators, daily stats for the last 4 days,
85
+ 1-month summary, and quarterly trends.
86
+ Args: company_name: Full name of the company.
87
+ """
88
+ sleep(1)
89
+ try:
90
+ ticker = resolve_ticker(company_name=company_name)
91
+ stock = yf.Ticker(ticker)
92
+ info = stock.info
93
+ current_price = info.get("currentPrice", "N/A")
94
+ market_cap = info.get("marketCap", "N/A")
95
+ pe_ratio = info.get("trailingPE", "N/A")
96
+ sector = info.get("sector", "N/A")
97
+ industry = info.get("industry", "N/A")
98
+ summary = info.get("longBusinessSummary", "N/A")
99
+
100
+ last_4_days = stock.history(period="5d")
101
+ last_4 = last_4_days.tail(4).copy()
102
+ daily_info = "\nLast 4 Days:\n"
103
+ for date, row in last_4.iterrows():
104
+ change = ((row['Close'] - row['Open']) / row['Open']) * 100
105
+ daily_info += f"- {date.date()}: Close ${row['Close']:.2f}, Vol: {int(row['Volume'])}, Change: {change:+.2f}%\n"
106
+
107
+ month_df = stock.history(period="1mo")
108
+ avg_close = month_df['Close'].mean()
109
+ high_close = month_df['Close'].max()
110
+ low_close = month_df['Close'].min()
111
+ total_volume = month_df['Volume'].sum()
112
+ month_summary = (
113
+ f"\n1-Month Summary:\n"
114
+ f"- Avg Close: ${avg_close:.2f}\n"
115
+ f"- High: ${high_close:.2f} | Low: ${low_close:.2f}\n"
116
+ f"- Total Volume: {int(total_volume)}"
117
+ )
118
+
119
+ quarter_df = stock.history(period="3mo")
120
+ start_price = quarter_df['Close'].iloc[0]
121
+ end_price = quarter_df['Close'].iloc[-1]
122
+ pct_change = ((end_price - start_price) / start_price) * 100
123
+ high_q = quarter_df['Close'].max()
124
+ low_q = quarter_df['Close'].min()
125
+ avg_vol_q = quarter_df['Volume'].mean()
126
+ quarter_summary = (
127
+ f"\nQuarterly Summary (3mo):\n"
128
+ f"- Start Price: ${start_price:.2f} | End Price: ${end_price:.2f}\n"
129
+ f"- % Change: {pct_change:.2f}%\n"
130
+ f"- High: ${high_q:.2f} | Low: ${low_q:.2f}\n"
131
+ f"- Avg Volume: {int(avg_vol_q)}"
132
+ )
133
+
134
+ df = month_df.copy()
135
+ df['SMA_10'] = df['Close'].rolling(10).mean()
136
+ df['EMA_10'] = df['Close'].ewm(span=10).mean()
137
+ delta = df['Close'].diff()
138
+ gain = delta.where(delta > 0, 0.0)
139
+ loss = -delta.where(delta < 0, 0.0)
140
+ avg_gain = gain.rolling(window=14).mean()
141
+ avg_loss = loss.rolling(window=14).mean()
142
+ rs = avg_gain / avg_loss
143
+ df['RSI_14'] = 100 - (100 / (1 + rs))
144
+ ema_12 = df['Close'].ewm(span=12, adjust=False).mean()
145
+ ema_26 = df['Close'].ewm(span=26, adjust=False).mean()
146
+ df['MACD'] = ema_12 - ema_26
147
+ df['MACD_Signal'] = df['MACD'].ewm(span=9, adjust=False).mean()
148
+ df['BB_Middle'] = df['Close'].rolling(20).mean()
149
+ df['BB_Upper'] = df['BB_Middle'] + 2 * df['Close'].rolling(20).std()
150
+ df['BB_Lower'] = df['BB_Middle'] - 2 * df['Close'].rolling(20).std()
151
+ df['ATR_14'] = df[['High', 'Low', 'Close']].apply(lambda x: max(x['High'] - x['Low'], abs(x['High'] - x['Close']), abs(x['Low'] - x['Close'])), axis=1).rolling(14).mean()
152
+ df['Volatility'] = df['Close'].pct_change().rolling(14).std()
153
+ latest = df.iloc[-1]
154
+
155
+ indicators = (
156
+ f"\nTechnical Indicators:\n"
157
+ f"- SMA(10): {latest['SMA_10']:.2f} | EMA(10): {latest['EMA_10']:.2f}\n"
158
+ f"- RSI(14): {latest['RSI_14']:.2f}\n"
159
+ f"- MACD: {latest['MACD']:.2f} | Signal: {latest['MACD_Signal']:.2f}\n"
160
+ f"- Bollinger Bands: Upper={latest['BB_Upper']:.2f}, Lower={latest['BB_Lower']:.2f}\n"
161
+ f"- ATR(14): {latest['ATR_14']:.2f}\n"
162
+ f"- Volatility (14-day): {latest['Volatility']:.4f}"
163
+ )
164
+
165
+ output = (
166
+ f"{ticker.upper()} Summary:\n"
167
+ f"- Current Price: ${current_price}\n"
168
+ f"- Market Cap: {market_cap}\n"
169
+ f"- Sector: {sector} | Industry: {industry}\n"
170
+ f"- PE Ratio: {pe_ratio}\n"
171
+ f"{daily_info}"
172
+ f"{month_summary}"
173
+ f"{quarter_summary}"
174
+ f"{indicators}"
175
+ f"\n\nCompany Overview:\n{summary}"
176
+ )
177
+
178
+ return output
179
+
180
+ except Exception as e:
181
+ return f"Error fetching stock data for {company_name}: {str(e)}"
182
+
183
+ @tool(args_schema=WebSearchInput)
184
+ def web_search(query: str) -> str:
185
+ """
186
+ This function allows to search anything on internet. A big query with more details will only give a high quality result.
187
+ Args: query: Search query.
188
+ """
189
+ sleep(1)
190
+ try:
191
+ wrapper = DuckDuckGoSearchAPIWrapper(max_results=2)
192
+ search = DuckDuckGoSearchResults(api_wrapper=wrapper)
193
+ return search.invoke(query)
194
+ except:
195
+ return "Error in running the tool."
196
+
197
+ @tool(args_schema=StanderdNewsSearchProtocol)
198
+ def tech_news(topic:str) -> str:
199
+ """
200
+ Fetches recent UK-based technology news headlines and descriptions from NewsData.io
201
+ with a focus on the given topic (matched in the article title).
202
+
203
+ Args:
204
+ topic (str): The keyword to search for in technology news article titles.
205
+
206
+ Returns:
207
+ str: A concatenated string of news summaries with topic-specific tech news.
208
+ """
209
+ sleep(1)
210
+ try:
211
+ client = NewsDataApiClient(apikey=Onews_api,
212
+ debug=True,
213
+ folder_path="./news_output")
214
+ content = client.latest_api(category="technology", language="en", country="gb", size=3,qInTitle=topic)
215
+ content = content['results']
216
+ tech_news= ""
217
+ for i, j in enumerate(content):
218
+ full_news = f"tech_news {i+1}: "+ j["description"]
219
+ tech_news += full_news
220
+ return tech_news
221
+ except:
222
+ return "There was an error. Can't run the tool"
223
+
224
+ @tool(args_schema=StanderdNewsSearchProtocol)
225
+ def politics_news(topic:str) -> str:
226
+ """
227
+ Fetches recent UK-based politics news headlines and descriptions from NewsData.io
228
+ with a focus on the given topic (matched in the article title).
229
+
230
+ Args:
231
+ topic (str): The keyword to search for in politics news article titles.
232
+
233
+ Returns:
234
+ str: A concatenated string of news summaries with topic-specific political news.
235
+ """
236
+ sleep(1)
237
+ try:
238
+
239
+ client = NewsDataApiClient(apikey=Onews_api,
240
+ debug=True,
241
+ folder_path="./news_output")
242
+ content = client.latest_api(category="politics", language="en", country="gb", size=3,qInTitle=topic)
243
+ content = content['results']
244
+ p_news= ""
245
+ for i, j in enumerate(content):
246
+ full_news = f"politics_news {i+1}: "+ j["description"]
247
+ p_news += full_news
248
+ return p_news
249
+ except:
250
+ return "There was an error. Can't run the tool"
251
+
252
+ @tool(args_schema=StanderdNewsSearchProtocol)
253
+ def business_news(topic:str) -> str:
254
+ """
255
+ Fetches recent UK-based business news headlines and descriptions from NewsData.io
256
+ with a focus on the given topic (matched in the article title).
257
+
258
+ Args:
259
+ topic (str): The keyword to search for in business news article titles.
260
+
261
+ Returns:
262
+ str: A concatenated string of news summaries with topic-specific business news.
263
+ """
264
+ sleep(1)
265
+ try:
266
+ client = NewsDataApiClient(apikey=Onews_api,
267
+ debug=True,
268
+ folder_path="./news_output")
269
+ content = client.latest_api(category="business", language="en", country="gb", size=3,qInTitle=topic)
270
+ content = content['results']
271
+ b_news= ""
272
+ for i, j in enumerate(content):
273
+ full_news = f"business_news {i+1}: "+ j["description"]
274
+ b_news += full_news
275
+ return b_news
276
+ except:
277
+ return "There was an error. Can't run the tool"
278
+
279
+ @tool(args_schema=StanderdNewsSearchProtocol)
280
+ def world_news(topic:str) -> str:
281
+ """
282
+ Fetches recent world news headlines related to UK and descriptions from NewsData.io
283
+ with a focus on the given topic (matched in the article title).
284
+
285
+ Args:
286
+ topic (str): The keyword to search for in World news article titles.
287
+
288
+ Returns:
289
+ str: A concatenated string of news summaries with topic-specific world news.
290
+ """
291
+ sleep(1)
292
+ try:
293
+ client = NewsDataApiClient(apikey=Onews_api,
294
+ debug=True,
295
+ folder_path="./news_output")
296
+ content = client.latest_api(category="world", language="en", country="gb", size=3,qInTitle=topic)
297
+ content = content['results']
298
+ w_news= ""
299
+ for i, j in enumerate(content):
300
+ full_news = f"world_news {i+1}: "+ j["description"]
301
+ w_news += full_news
302
+ return w_news
303
+ except:
304
+ return "There was an error. Can't run the tool"
305
+
306
+ stock_data_tool = Tool(
307
+ name="Stock Market Data",
308
+ func=fetch_stock_summary,
309
+ description=(
310
+ "Use this tool to get current stock market data like price, market cap, "
311
+ "and historical trend for a specific stock ticker (e.g., AAPL, NVDA, TSLA)."
312
+ )
313
+ )
314
+
315
+ web_search = Tool(
316
+ name="Web Search",
317
+ func=web_search,
318
+ description=(
319
+ "Use this tool to Search and get any general information from the Internet about the stock. This tool takes a query and returns the result."
320
+ "For high Quality results provide a good length query with as much details as posible."
321
+ )
322
+ )
323
+
324
+ reddit_search_tool = Tool(
325
+ name="Reddit Search",
326
+ func=reddit_search_tool,
327
+ description=(
328
+ "Use this tool to search Reddit for recent discussions and sentiments about a stock, event, or topic."
329
+ "Input should be a search query (e.g., 'Do you like tesla?', 'what do you think about Tesla products?' , 'Tesla is a scam')."
330
+ "Args: query (str): The search query (e.g., 'Tesla stock'). sort (str): Sort order ('new', 'hot', etc.). Defaults to 'new'. time_filter (str): Time range ('hour', 'day', 'week', 'month', 'year', 'all'). Defaults to 'week'. subreddit (str): type of subreddit ('stocks', 'products', 'car', 'bikes'). limit (str): Maximum number of results to return. Defaults to '10'."
331
+ )
332
+ )
333
+
334
+
335
+ tech_news_tool = Tool(
336
+ name="Technology News Search",
337
+ func=tech_news,
338
+ description=("Use this tool to get the latest technology news articles from the UK that match a topic (e.g., AI, robotics, fintech, Apple, Meta, Tesla).")
339
+ )
340
+
341
+
342
+ politics_news_tool = Tool(
343
+ name="Politics News Search",
344
+ func=politics_news,
345
+ description=("Use this tool to get the latest politicial news articles from the UK that match a topic (e.g., AI, robotics, fintech, Apple, Meta, Tesla).")
346
+ )
347
+
348
+ business_news_tool = Tool(
349
+ name="Business News Search",
350
+ func=business_news,
351
+ description=("Use this tool to get the latest Business news articles from the UK that match a topic (e.g., AI, robotics, fintech, Apple, Meta, Tesla).")
352
+ )
353
+
354
+ world_news_tool = Tool(
355
+ name="World News Search",
356
+ func=world_news,
357
+ description=("Use this tool to get the latest World news (geopolitical) articles from the UK that match a topic (e.g., AI, robotics, fintech, Apple, Meta, Tesla).")
358
+ )
359
+
360
+
361
+
362
+ def get_tools():
363
+ return [
364
+ stock_data_tool,
365
+ reddit_search_tool,
366
+ web_search,
367
+ tech_news_tool,
368
+ business_news_tool,
369
+ politics_news_tool,
370
+ world_news_tool
371
+ ]
requirements.txt ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ langchain
2
+ langchain-community
3
+ langgraph
4
+ langchain-groq
5
+ langgraph-supervisor
6
+ python-dotenv
7
+ yfinance
8
+ duckduckgo-search
9
+ praw
10
+ newsdataapi
11
+ langmem
12
+ gradio