Spaces:
Runtime error
Runtime error
Add prompt_templates
Browse files- app.py +9 -4
- pyproject.toml +1 -0
app.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
import os
|
2 |
import time
|
3 |
import random
|
|
|
4 |
import traceback
|
5 |
from loguru import logger
|
6 |
from Gradio_UI import GradioUI
|
@@ -61,6 +62,8 @@ class LyricsSearchTool(Tool):
|
|
61 |
# TODO: Implement lyrics search functionality
|
62 |
return "Lyrics search not implemented yet"
|
63 |
|
|
|
|
|
64 |
|
65 |
@tool
|
66 |
def analyze_lyrics_tool(song_title: str, artist: str, lyrics: str) -> str:
|
@@ -163,7 +166,6 @@ def _make_api_call_with_retry(model: str, prompt: str) -> str:
|
|
163 |
|
164 |
# TODO: use DuckDuckGoSearchTool to find related information
|
165 |
# for explanation in case the LLM itself is not confident or doesn't know
|
166 |
-
#
|
167 |
|
168 |
# Check if we need to use Anthropic for local testing
|
169 |
use_anthropic = os.getenv("USE_ANTHROPIC", "false").lower() == "true"
|
@@ -183,8 +185,9 @@ web_agent = CodeAgent(
|
|
183 |
name="lyrics_search_agent",
|
184 |
description="Browses the web to find original full lyrics and scrape them. Excels at building effective search queries",
|
185 |
additional_authorized_imports=["numpy", "bs4"],
|
186 |
-
max_steps=
|
187 |
verbosity_level=2,
|
|
|
188 |
)
|
189 |
|
190 |
|
@@ -194,8 +197,9 @@ analysis_agent = CodeAgent(
|
|
194 |
name="lyrics_analysis_agent",
|
195 |
description="You are a Song Analysis Expert with deep knowledge of music theory, lyrical interpretation, cultural contexts, and music history. Your role is to analyze song lyrics to uncover their deeper meaning, artistic significance, and historical context.",
|
196 |
additional_authorized_imports=["numpy", "bs4"],
|
197 |
-
max_steps=
|
198 |
verbosity_level=2,
|
|
|
199 |
)
|
200 |
|
201 |
|
@@ -210,11 +214,12 @@ manager_agent = CodeAgent(
|
|
210 |
planning_interval=5,
|
211 |
verbosity_level=2,
|
212 |
max_steps=15,
|
|
|
213 |
)
|
214 |
|
215 |
logger.info("Initializing Gradio UI and launching server")
|
216 |
GradioUI(manager_agent).launch(
|
217 |
debug=True, share=True,
|
218 |
-
|
219 |
)
|
220 |
logger.success("Server started successfully")
|
|
|
1 |
import os
|
2 |
import time
|
3 |
import random
|
4 |
+
import yaml
|
5 |
import traceback
|
6 |
from loguru import logger
|
7 |
from Gradio_UI import GradioUI
|
|
|
62 |
# TODO: Implement lyrics search functionality
|
63 |
return "Lyrics search not implemented yet"
|
64 |
|
65 |
+
with open("prompts.yaml", 'r') as stream:
|
66 |
+
prompt_templates = yaml.safe_load(stream)
|
67 |
|
68 |
@tool
|
69 |
def analyze_lyrics_tool(song_title: str, artist: str, lyrics: str) -> str:
|
|
|
166 |
|
167 |
# TODO: use DuckDuckGoSearchTool to find related information
|
168 |
# for explanation in case the LLM itself is not confident or doesn't know
|
|
|
169 |
|
170 |
# Check if we need to use Anthropic for local testing
|
171 |
use_anthropic = os.getenv("USE_ANTHROPIC", "false").lower() == "true"
|
|
|
185 |
name="lyrics_search_agent",
|
186 |
description="Browses the web to find original full lyrics and scrape them. Excels at building effective search queries",
|
187 |
additional_authorized_imports=["numpy", "bs4"],
|
188 |
+
max_steps=50,
|
189 |
verbosity_level=2,
|
190 |
+
prompt_templates=prompt_templates
|
191 |
)
|
192 |
|
193 |
|
|
|
197 |
name="lyrics_analysis_agent",
|
198 |
description="You are a Song Analysis Expert with deep knowledge of music theory, lyrical interpretation, cultural contexts, and music history. Your role is to analyze song lyrics to uncover their deeper meaning, artistic significance, and historical context.",
|
199 |
additional_authorized_imports=["numpy", "bs4"],
|
200 |
+
max_steps=5,
|
201 |
verbosity_level=2,
|
202 |
+
prompt_templates=prompt_templates
|
203 |
)
|
204 |
|
205 |
|
|
|
214 |
planning_interval=5,
|
215 |
verbosity_level=2,
|
216 |
max_steps=15,
|
217 |
+
prompt_templates=prompt_templates
|
218 |
)
|
219 |
|
220 |
logger.info("Initializing Gradio UI and launching server")
|
221 |
GradioUI(manager_agent).launch(
|
222 |
debug=True, share=True,
|
223 |
+
server_name="127.0.0.1", server_port=3000
|
224 |
)
|
225 |
logger.success("Server started successfully")
|
pyproject.toml
CHANGED
@@ -9,5 +9,6 @@ dependencies = [
|
|
9 |
"huggingface-hub>=0.29.1",
|
10 |
"litellm>=1.61.20",
|
11 |
"loguru>=0.7.3",
|
|
|
12 |
"smolagents>=1.9.2",
|
13 |
]
|
|
|
9 |
"huggingface-hub>=0.29.1",
|
10 |
"litellm>=1.61.20",
|
11 |
"loguru>=0.7.3",
|
12 |
+
"pyyaml>=6.0.2",
|
13 |
"smolagents>=1.9.2",
|
14 |
]
|