tonko22 commited on
Commit
8fca8f3
·
1 Parent(s): 1841510

Fix test-env initialisation

Browse files
Files changed (1) hide show
  1. app.py +11 -8
app.py CHANGED
@@ -6,13 +6,19 @@ This module serves as the entry point for the Lyrics Analyzer application, which
6
  uses a system of specialized agents to search for and analyze song lyrics.
7
  """
8
  import os
 
9
  from loguru import logger
10
- from Gradio_UI import GradioUI
11
  from smolagents import LiteLLMModel
12
 
13
- from config import setup_logger, load_api_keys, get_model_id, get_gradio_config
14
- from agents.manager_agent import create_manager_agent
15
  from agents.single_agent import create_single_agent
 
 
 
 
 
 
 
 
16
 
17
 
18
  def main():
@@ -28,13 +34,11 @@ def main():
28
 
29
  is_test = os.environ.get('SPACE_ID') is None
30
 
 
31
  # Initialize the LLM model based on configuration
32
  model_id = get_model_id(is_test=is_test)
33
  logger.info(f"Initializing with model: {model_id}")
34
-
35
- # If using Ollama, we need to specify the API base URL
36
- if "ollama" in model_id:
37
- from config import get_ollama_api_base
38
  api_base = get_ollama_api_base()
39
  logger.info(f"Using Ollama API base: {api_base}")
40
  model = LiteLLMModel(model_id=model_id, api_base=api_base)
@@ -50,7 +54,6 @@ def main():
50
  # Determine if we're in test mode (local) or production (HuggingFace)
51
  # HuggingFace environment has SPACE_ID environment variable
52
 
53
-
54
  gradio_config = get_gradio_config(is_test)
55
 
56
  # Launch with appropriate configuration
 
6
  uses a system of specialized agents to search for and analyze song lyrics.
7
  """
8
  import os
9
+
10
  from loguru import logger
 
11
  from smolagents import LiteLLMModel
12
 
 
 
13
  from agents.single_agent import create_single_agent
14
+ from config import (
15
+ get_gradio_config,
16
+ get_model_id,
17
+ get_ollama_api_base,
18
+ load_api_keys,
19
+ setup_logger,
20
+ )
21
+ from Gradio_UI import GradioUI
22
 
23
 
24
  def main():
 
34
 
35
  is_test = os.environ.get('SPACE_ID') is None
36
 
37
+ # If using Ollama, we need to specify the API base URL
38
  # Initialize the LLM model based on configuration
39
  model_id = get_model_id(is_test=is_test)
40
  logger.info(f"Initializing with model: {model_id}")
41
+ if is_test:
 
 
 
42
  api_base = get_ollama_api_base()
43
  logger.info(f"Using Ollama API base: {api_base}")
44
  model = LiteLLMModel(model_id=model_id, api_base=api_base)
 
54
  # Determine if we're in test mode (local) or production (HuggingFace)
55
  # HuggingFace environment has SPACE_ID environment variable
56
 
 
57
  gradio_config = get_gradio_config(is_test)
58
 
59
  # Launch with appropriate configuration