LyricsAnalyzerAgent / config.py
tonko22's picture
Update agent prompt, model config
92894aa
"""
Configuration parameters for the Lyrics Analyzer Agent.
This module separates configuration from implementation,
making it easier to modify settings without changing code.
"""
import os
import yaml
from loguru import logger
# Logger configuration
def setup_logger():
"""Configure loguru logger with custom formatting."""
logger.remove() # Remove default handlers
logger.add(
lambda msg: print(msg, end=""),
level="INFO",
format="<green>{time:YYYY-MM-DD HH:mm:ss}</green> | <level>{level: <8}</level> | <cyan>{message}</cyan>"
)
# API configuration
def load_api_keys():
"""Load API keys from environment variables."""
# Gemini API is the default
os.environ["GEMINI_API_KEY"] = os.getenv("GEMINI_API_KEY")
def get_model_id(use_local=True, provider="gemini"):
"""Get the appropriate model ID based on configuration.
Args:
use_local: If True, use test configuration (local development).
If False, use production configuration.
provider: Model provider ('ollama', 'gemini', 'openrouter')
Returns:
String with model ID for the specified provider.
"""
if provider == "ollama":
return "ollama/gemma3:4b" # Using local Ollama with Gemma 3:4B
elif provider == "gemini":
return "gemini/gemini-2.0-flash"
elif provider == "openrouter":
return "openrouter/google/gemini-2.0-flash-lite-preview-02-05:free" # OpenRouter Claude 3 Opus
else:
# Default fallback
return "ollama/gemma3:4b" if use_local else "gemini/gemini-2.0-flash"
def get_ollama_api_base():
"""Get the API base URL for Ollama."""
return "http://localhost:11434"
# Load prompts from YAML
def load_prompt_templates():
"""Load prompt templates from YAML file."""
try:
with open("prompts/prompts_hf.yaml", 'r') as stream:
return yaml.safe_load(stream)
except (FileNotFoundError, yaml.YAMLError) as e:
logger.error(f"Error loading prompts.yaml: {e}")
return {} # Return empty dict to avoid breaking the application
# Tool configuration
SEARCH_TOOL_CONFIG = {
"min_delay": 3.0,
"max_delay": 7.0
}
# Gradio UI configuration
def get_gradio_config(is_test=True):
"""Get the appropriate Gradio UI configuration based on environment.
Args:
is_test: If True, use test configuration (local development).
If False, use production configuration (HuggingFace).
Returns:
Dictionary with Gradio configuration parameters.
"""
if is_test:
# Configuration for local development/testing
return {
"debug": True,
"share": False,
"server_name": "127.0.0.1",
"server_port": 3000
}
else:
# Configuration for production (HuggingFace)
return {
"debug": True,
"share": False
# No server_name or server_port for HuggingFace deployment
}