File size: 3,015 Bytes
ce0ec3b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a8e40a3
ce0ec3b
92894aa
23f553c
 
 
 
 
 
 
 
 
 
 
 
 
ce0ec3b
23f553c
 
 
 
 
ce0ec3b
0e9bb01
 
 
 
ce0ec3b
 
 
 
0e9bb01
ce0ec3b
 
 
 
 
 
 
 
 
 
 
 
 
652f14d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
"""
Configuration parameters for the Lyrics Analyzer Agent.

This module separates configuration from implementation,
making it easier to modify settings without changing code.
"""

import os
import yaml
from loguru import logger

# Logger configuration
def setup_logger():
    """Configure loguru logger with custom formatting."""
    logger.remove()  # Remove default handlers
    logger.add(
        lambda msg: print(msg, end=""), 
        level="INFO", 
        format="<green>{time:YYYY-MM-DD HH:mm:ss}</green> | <level>{level: <8}</level> | <cyan>{message}</cyan>"
    )

# API configuration
def load_api_keys():
    """Load API keys from environment variables."""
    # Gemini API is the default
    os.environ["GEMINI_API_KEY"] = os.getenv("GEMINI_API_KEY")
    

def get_model_id(use_local=True, provider="gemini"):
    """Get the appropriate model ID based on configuration.
    
    Args:
        use_local: If True, use test configuration (local development).
               If False, use production configuration.
        provider: Model provider ('ollama', 'gemini', 'openrouter')
    
    Returns:
        String with model ID for the specified provider.
    """
    if provider == "ollama":
        return "ollama/gemma3:4b"  # Using local Ollama with Gemma 3:4B
    elif provider == "gemini":
        return "gemini/gemini-2.0-flash"
    elif provider == "openrouter":
        return "openrouter/google/gemini-2.0-flash-lite-preview-02-05:free"  # OpenRouter Claude 3 Opus
    else:
        # Default fallback
        return "ollama/gemma3:4b" if use_local else "gemini/gemini-2.0-flash"

def get_ollama_api_base():
    """Get the API base URL for Ollama."""
    return "http://localhost:11434"

# Load prompts from YAML
def load_prompt_templates():
    """Load prompt templates from YAML file."""
    try:
        with open("prompts/prompts_hf.yaml", 'r') as stream:
            return yaml.safe_load(stream)
    except (FileNotFoundError, yaml.YAMLError) as e:
        logger.error(f"Error loading prompts.yaml: {e}")
        return {}  # Return empty dict to avoid breaking the application

# Tool configuration
SEARCH_TOOL_CONFIG = {
    "min_delay": 3.0,
    "max_delay": 7.0
}


# Gradio UI configuration
def get_gradio_config(is_test=True):
    """Get the appropriate Gradio UI configuration based on environment.
    
    Args:
        is_test: If True, use test configuration (local development).
               If False, use production configuration (HuggingFace).
    
    Returns:
        Dictionary with Gradio configuration parameters.
    """
    if is_test:
        # Configuration for local development/testing
        return {
            "debug": True,
            "share": False,
            "server_name": "127.0.0.1",
            "server_port": 3000
        }
    else:
        # Configuration for production (HuggingFace)
        return {
            "debug": True,
            "share": False
            # No server_name or server_port for HuggingFace deployment
        }