Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -11,60 +11,42 @@ from urllib.parse import urlparse
|
|
| 11 |
import mimetypes
|
| 12 |
import subprocess # For yt-dlp
|
| 13 |
|
| 14 |
-
from huggingface_hub import get_space_runtime
|
| 15 |
|
| 16 |
# --- Global Variables for Startup Status ---
|
| 17 |
-
# These will be populated in __main__ and accessed by the demo.load function
|
| 18 |
missing_vars_startup_list_global = []
|
| 19 |
agent_pre_init_status_msg_global = "Agent status will be determined at startup."
|
| 20 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
|
| 22 |
-
#
|
| 23 |
-
try:
|
| 24 |
-
from PyPDF2 import PdfReader
|
| 25 |
-
PYPDF2_AVAILABLE = True
|
| 26 |
-
except ImportError:
|
| 27 |
-
PYPDF2_AVAILABLE = False
|
| 28 |
-
|
| 29 |
-
try:
|
| 30 |
-
from PIL import Image
|
| 31 |
-
import pytesseract
|
| 32 |
-
PIL_TESSERACT_AVAILABLE = True
|
| 33 |
-
except ImportError:
|
| 34 |
-
PIL_TESSERACT_AVAILABLE = False
|
| 35 |
-
|
| 36 |
-
try:
|
| 37 |
-
import whisper
|
| 38 |
-
WHISPER_AVAILABLE = True
|
| 39 |
-
except ImportError:
|
| 40 |
-
WHISPER_AVAILABLE = False
|
| 41 |
-
|
| 42 |
-
# Google Generative AI types
|
| 43 |
from google.generativeai.types import HarmCategory, HarmBlockThreshold
|
| 44 |
-
|
| 45 |
-
# LangChain Core components
|
| 46 |
from langchain_core.messages import HumanMessage, AIMessage, SystemMessage, ToolMessage, AnyMessage
|
| 47 |
from langchain.prompts import PromptTemplate
|
| 48 |
from langchain.tools import BaseTool, tool as lc_tool_decorator
|
| 49 |
from langchain_google_genai import ChatGoogleGenerativeAI
|
| 50 |
from langchain.agents import AgentExecutor, create_react_agent
|
| 51 |
-
|
| 52 |
-
# Prebuilt Tools
|
| 53 |
from langchain_community.tools import DuckDuckGoSearchRun
|
| 54 |
from langchain_experimental.tools import PythonREPLTool
|
| 55 |
|
| 56 |
-
#
|
| 57 |
if TYPE_CHECKING:
|
| 58 |
from langgraph.graph import StateGraph as StateGraphAliasedForHinting
|
| 59 |
from langgraph.prebuilt import ToolExecutor as ToolExecutorAliasedForHinting
|
| 60 |
from typing_extensions import TypedDict
|
| 61 |
from langgraph.checkpoint.base import BaseCheckpointSaver
|
| 62 |
|
| 63 |
-
# LangGraph Imports
|
| 64 |
LANGGRAPH_FLAVOR_AVAILABLE = False
|
| 65 |
LG_StateGraph: Optional[Type[Any]] = None
|
| 66 |
LG_ToolExecutor: Optional[Type[Any]] = None
|
| 67 |
-
LG_END: Optional[Any] = None
|
| 68 |
LG_ToolInvocation: Optional[Type[Any]] = None
|
| 69 |
add_messages: Optional[Any] = None
|
| 70 |
MemorySaver_Class: Optional[Type[Any]] = None
|
|
@@ -75,7 +57,7 @@ LLM_INSTANCE: Optional[ChatGoogleGenerativeAI] = None
|
|
| 75 |
LANGGRAPH_MEMORY_SAVER: Optional[Any] = None
|
| 76 |
|
| 77 |
try:
|
| 78 |
-
from langgraph.graph import StateGraph, END #
|
| 79 |
from langgraph.prebuilt import ToolExecutor, ToolInvocation as LGToolInvocationActual
|
| 80 |
from langgraph.graph.message import add_messages as lg_add_messages
|
| 81 |
from langgraph.checkpoint.memory import MemorySaver as LGMemorySaver
|
|
@@ -98,8 +80,7 @@ WHISPER_MODEL: Optional[Any] = None
|
|
| 98 |
|
| 99 |
# --- Environment Variables & API Keys ---
|
| 100 |
GOOGLE_API_KEY = os.environ.get("GOOGLE_API_KEY")
|
| 101 |
-
HUGGINGFACE_TOKEN = os.environ.get("HF_TOKEN")
|
| 102 |
-
HF_USERNAME_FOR_SUBMISSION = os.environ.get("HF_USERNAME")
|
| 103 |
|
| 104 |
# --- Setup Logging ---
|
| 105 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(name)s - %(module)s:%(lineno)d - %(message)s')
|
|
@@ -205,7 +186,7 @@ def _download_file(file_identifier: str, task_id_for_file: Optional[str] = None)
|
|
| 205 |
name_without_ext, current_ext = os.path.splitext(effective_save_path)
|
| 206 |
if not current_ext:
|
| 207 |
content_type_header = r.headers.get('content-type', '')
|
| 208 |
-
content_type_val = content_type_header.split(';').strip() if content_type_header else ''
|
| 209 |
if content_type_val:
|
| 210 |
guessed_ext = mimetypes.guess_extension(content_type_val)
|
| 211 |
if guessed_ext: effective_save_path += guessed_ext; logger.info(f"Added guessed ext: {guessed_ext}")
|
|
@@ -397,15 +378,24 @@ def construct_prompt_for_agent(q: Dict[str,Any]) -> str:
|
|
| 397 |
return f"Task ID:{tid}{level}{files_info}\n\nQuestion:{q_str}"
|
| 398 |
|
| 399 |
# --- Main Submission Logic ---
|
| 400 |
-
def run_and_submit_all():
|
| 401 |
-
global AGENT_INSTANCE
|
| 402 |
-
space_id
|
| 403 |
-
|
| 404 |
-
|
| 405 |
-
if
|
| 406 |
-
|
| 407 |
-
|
| 408 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 409 |
|
| 410 |
agent_code=f"https://huggingface.co/spaces/{space_id}/tree/main" if space_id else "local_dev"
|
| 411 |
q_url,s_url=f"{DEFAULT_API_URL}/questions",f"{DEFAULT_API_URL}/submit"
|
|
@@ -418,7 +408,7 @@ def run_and_submit_all():
|
|
| 418 |
except Exception as e:logger.error(f"Fetch questions error: {e}",exc_info=True);return f"Fetch questions error:{e}",None
|
| 419 |
|
| 420 |
res_log,ans_payload=[],[]
|
| 421 |
-
logger.info(f"Running agent on {len(q_data)} questions for user '{
|
| 422 |
for i,item in enumerate(q_data):
|
| 423 |
tid,q_txt=item.get("task_id"),item.get("question")
|
| 424 |
if not tid or q_txt is None:logger.warning(f"Skipping item: {item}");continue
|
|
@@ -434,12 +424,12 @@ def run_and_submit_all():
|
|
| 434 |
res_log.append({"Task ID":tid,"Question":q_txt,"Full Agent Prompt":prompt,"Raw Agent Output":err_ans,"Submitted Answer":"N/A [AGENT_ERROR]"})
|
| 435 |
|
| 436 |
if not ans_payload:return "Agent no answers.",pd.DataFrame(res_log)
|
| 437 |
-
sub_data={"username":
|
| 438 |
-
logger.info(f"Submitting {len(ans_payload)} answers to {s_url} for user '{
|
| 439 |
sub_h={"Content-Type":"application/json",**auth_h}
|
| 440 |
try:
|
| 441 |
r=requests.post(s_url,json=sub_data,headers=sub_h,timeout=120);r.raise_for_status();res_data=r.json()
|
| 442 |
-
msg=(f"User:{res_data.get('username',
|
| 443 |
logger.info(f"Submission OK! {msg}");return f"Submission OK!\n{msg}",pd.DataFrame(res_log,columns=["Task ID","Question","Full Agent Prompt","Raw Agent Output","Submitted Answer"])
|
| 444 |
except requests.exceptions.HTTPError as e:
|
| 445 |
err=f"HTTP {e.response.status_code}. Detail:{e.response.text[:200]}"; logger.error(f"Submit Fail:{err}",exc_info=True); return f"Submit Fail:{err}",pd.DataFrame(res_log)
|
|
@@ -447,24 +437,28 @@ def run_and_submit_all():
|
|
| 447 |
|
| 448 |
# --- Build Gradio Interface ---
|
| 449 |
with gr.Blocks(css=".gradio-container {max-width:1280px !important;margin:auto !important;}",theme=gr.themes.Soft()) as demo:
|
| 450 |
-
gr.Markdown("# GAIA Agent Challenge Runner
|
| 451 |
gr.Markdown(f"""**Instructions:**
|
| 452 |
-
1.
|
| 453 |
2. Click 'Run Evaluation & Submit' to process GAIA questions (typically 20).
|
| 454 |
3. **Goal: 30%+ (6/20).** Agent uses Gemini 2.5 Pro ({GEMINI_MODEL_NAME}), Web Search, Python, PDF, OCR, Audio/YouTube.
|
| 455 |
-
4.
|
|
|
|
| 456 |
|
| 457 |
agent_status_display = gr.Markdown("**Agent Status:** Initializing...")
|
| 458 |
missing_secrets_display = gr.Markdown("")
|
| 459 |
|
|
|
|
| 460 |
run_button = gr.Button("Run Evaluation & Submit All Answers")
|
| 461 |
status_output = gr.Textbox(label="Run Status / Submission Result", lines=7, interactive=False)
|
| 462 |
results_table = gr.DataFrame(label="Q&A Log", headers=["Task ID","Question","Prompt","Raw","Submitted"], wrap=True)
|
|
|
|
|
|
|
|
|
|
| 463 |
run_button.click(fn=run_and_submit_all, outputs=[status_output,results_table], api_name="run_evaluation")
|
| 464 |
|
| 465 |
def update_ui_on_load_fn_within_context():
|
| 466 |
-
|
| 467 |
-
global missing_vars_startup_list_global, agent_pre_init_status_msg_global
|
| 468 |
secrets_msg_md = ""
|
| 469 |
if missing_vars_startup_list_global:
|
| 470 |
secrets_msg_md = f"<font color='red'>**⚠️ Secrets Missing:** {', '.join(missing_vars_startup_list_global)}.</font>"
|
|
@@ -485,18 +479,17 @@ with gr.Blocks(css=".gradio-container {max-width:1280px !important;margin:auto !
|
|
| 485 |
demo.load(update_ui_on_load_fn_within_context, [], [agent_status_display, missing_secrets_display])
|
| 486 |
|
| 487 |
if __name__ == "__main__":
|
| 488 |
-
logger.info("Application starting up (
|
| 489 |
if not PYPDF2_AVAILABLE: logger.warning("PyPDF2 (PDF tool) NOT AVAILABLE.")
|
| 490 |
if not PIL_TESSERACT_AVAILABLE: logger.warning("Pillow/Pytesseract (OCR tool) NOT AVAILABLE.")
|
| 491 |
if not WHISPER_AVAILABLE: logger.warning("Whisper (Audio tool) NOT AVAILABLE.")
|
| 492 |
if LANGGRAPH_FLAVOR_AVAILABLE: logger.info("Core LangGraph (StateGraph, END) loaded.")
|
| 493 |
else: logger.warning("Core LangGraph FAILED import. ReAct fallback. Check requirements.txt (langgraph, langchain-core, typing-extensions) & Space build logs for errors.")
|
| 494 |
|
| 495 |
-
|
| 496 |
-
missing_vars_startup_list_global.clear() # Clear in case of script reload in some environments
|
| 497 |
if not GOOGLE_API_KEY: missing_vars_startup_list_global.append("GOOGLE_API_KEY")
|
| 498 |
-
if not HUGGINGFACE_TOKEN: missing_vars_startup_list_global.append("HUGGINGFACE_TOKEN")
|
| 499 |
-
|
| 500 |
|
| 501 |
try:
|
| 502 |
logger.info("Pre-initializing agent...")
|
|
|
|
| 11 |
import mimetypes
|
| 12 |
import subprocess # For yt-dlp
|
| 13 |
|
| 14 |
+
from huggingface_hub import get_space_runtime # Keep for agent_code_url, though not for username
|
| 15 |
|
| 16 |
# --- Global Variables for Startup Status ---
|
|
|
|
| 17 |
missing_vars_startup_list_global = []
|
| 18 |
agent_pre_init_status_msg_global = "Agent status will be determined at startup."
|
| 19 |
|
| 20 |
+
# File Processing Libs
|
| 21 |
+
try: from PyPDF2 import PdfReader; PYPDF2_AVAILABLE = True
|
| 22 |
+
except ImportError: PYPDF2_AVAILABLE = False
|
| 23 |
+
try: from PIL import Image; import pytesseract; PIL_TESSERACT_AVAILABLE = True
|
| 24 |
+
except ImportError: PIL_TESSERACT_AVAILABLE = False
|
| 25 |
+
try: import whisper; WHISPER_AVAILABLE = True
|
| 26 |
+
except ImportError: WHISPER_AVAILABLE = False
|
| 27 |
|
| 28 |
+
# Google GenAI
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29 |
from google.generativeai.types import HarmCategory, HarmBlockThreshold
|
| 30 |
+
# LangChain
|
|
|
|
| 31 |
from langchain_core.messages import HumanMessage, AIMessage, SystemMessage, ToolMessage, AnyMessage
|
| 32 |
from langchain.prompts import PromptTemplate
|
| 33 |
from langchain.tools import BaseTool, tool as lc_tool_decorator
|
| 34 |
from langchain_google_genai import ChatGoogleGenerativeAI
|
| 35 |
from langchain.agents import AgentExecutor, create_react_agent
|
|
|
|
|
|
|
| 36 |
from langchain_community.tools import DuckDuckGoSearchRun
|
| 37 |
from langchain_experimental.tools import PythonREPLTool
|
| 38 |
|
| 39 |
+
# LangGraph Conditional Imports
|
| 40 |
if TYPE_CHECKING:
|
| 41 |
from langgraph.graph import StateGraph as StateGraphAliasedForHinting
|
| 42 |
from langgraph.prebuilt import ToolExecutor as ToolExecutorAliasedForHinting
|
| 43 |
from typing_extensions import TypedDict
|
| 44 |
from langgraph.checkpoint.base import BaseCheckpointSaver
|
| 45 |
|
|
|
|
| 46 |
LANGGRAPH_FLAVOR_AVAILABLE = False
|
| 47 |
LG_StateGraph: Optional[Type[Any]] = None
|
| 48 |
LG_ToolExecutor: Optional[Type[Any]] = None
|
| 49 |
+
LG_END: Optional[Any] = None # Must be imported if used
|
| 50 |
LG_ToolInvocation: Optional[Type[Any]] = None
|
| 51 |
add_messages: Optional[Any] = None
|
| 52 |
MemorySaver_Class: Optional[Type[Any]] = None
|
|
|
|
| 57 |
LANGGRAPH_MEMORY_SAVER: Optional[Any] = None
|
| 58 |
|
| 59 |
try:
|
| 60 |
+
from langgraph.graph import StateGraph, END # Import END here
|
| 61 |
from langgraph.prebuilt import ToolExecutor, ToolInvocation as LGToolInvocationActual
|
| 62 |
from langgraph.graph.message import add_messages as lg_add_messages
|
| 63 |
from langgraph.checkpoint.memory import MemorySaver as LGMemorySaver
|
|
|
|
| 80 |
|
| 81 |
# --- Environment Variables & API Keys ---
|
| 82 |
GOOGLE_API_KEY = os.environ.get("GOOGLE_API_KEY")
|
| 83 |
+
HUGGINGFACE_TOKEN = os.environ.get("HF_TOKEN") # For GAIA API auth, NOT for username
|
|
|
|
| 84 |
|
| 85 |
# --- Setup Logging ---
|
| 86 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(name)s - %(module)s:%(lineno)d - %(message)s')
|
|
|
|
| 186 |
name_without_ext, current_ext = os.path.splitext(effective_save_path)
|
| 187 |
if not current_ext:
|
| 188 |
content_type_header = r.headers.get('content-type', '')
|
| 189 |
+
content_type_val = content_type_header.split(';').strip() if content_type_header else ''
|
| 190 |
if content_type_val:
|
| 191 |
guessed_ext = mimetypes.guess_extension(content_type_val)
|
| 192 |
if guessed_ext: effective_save_path += guessed_ext; logger.info(f"Added guessed ext: {guessed_ext}")
|
|
|
|
| 378 |
return f"Task ID:{tid}{level}{files_info}\n\nQuestion:{q_str}"
|
| 379 |
|
| 380 |
# --- Main Submission Logic ---
|
| 381 |
+
def run_and_submit_all(profile: Optional[gr.OAuthProfile] = None): # Re-added profile as per Gradio standard
|
| 382 |
+
global AGENT_INSTANCE
|
| 383 |
+
space_id = os.getenv("SPACE_ID")
|
| 384 |
+
username_for_submission = None
|
| 385 |
+
|
| 386 |
+
if profile and hasattr(profile, 'username') and profile.username:
|
| 387 |
+
username_for_submission = profile.username
|
| 388 |
+
logger.info(f"Username from OAuth profile: {username_for_submission}")
|
| 389 |
+
else:
|
| 390 |
+
# This case means user is not logged in via HF Login Button, or OAuth is not configured for the Space
|
| 391 |
+
logger.warning("OAuth profile not available or username missing. Submission might fail or be attributed to a default/fallback if allowed by API.")
|
| 392 |
+
# As per strict template, we should stop if no profile.
|
| 393 |
+
return "Hugging Face login required. Please use the login button and try again.", None
|
| 394 |
+
|
| 395 |
+
if AGENT_INSTANCE is None:
|
| 396 |
+
try: logger.info("Agent not pre-initialized. Initializing now for run..."); initialize_agent_and_tools()
|
| 397 |
+
except Exception as e: return f"Agent on-demand initialization failed: {e}", None
|
| 398 |
+
if AGENT_INSTANCE is None: return "Agent is still None after on-demand initialization attempt.", None
|
| 399 |
|
| 400 |
agent_code=f"https://huggingface.co/spaces/{space_id}/tree/main" if space_id else "local_dev"
|
| 401 |
q_url,s_url=f"{DEFAULT_API_URL}/questions",f"{DEFAULT_API_URL}/submit"
|
|
|
|
| 408 |
except Exception as e:logger.error(f"Fetch questions error: {e}",exc_info=True);return f"Fetch questions error:{e}",None
|
| 409 |
|
| 410 |
res_log,ans_payload=[],[]
|
| 411 |
+
logger.info(f"Running agent on {len(q_data)} questions for user '{username_for_submission}'...")
|
| 412 |
for i,item in enumerate(q_data):
|
| 413 |
tid,q_txt=item.get("task_id"),item.get("question")
|
| 414 |
if not tid or q_txt is None:logger.warning(f"Skipping item: {item}");continue
|
|
|
|
| 424 |
res_log.append({"Task ID":tid,"Question":q_txt,"Full Agent Prompt":prompt,"Raw Agent Output":err_ans,"Submitted Answer":"N/A [AGENT_ERROR]"})
|
| 425 |
|
| 426 |
if not ans_payload:return "Agent no answers.",pd.DataFrame(res_log)
|
| 427 |
+
sub_data={"username":username_for_submission.strip(),"agent_code":agent_code,"answers":ans_payload}
|
| 428 |
+
logger.info(f"Submitting {len(ans_payload)} answers to {s_url} for user '{username_for_submission}'...")
|
| 429 |
sub_h={"Content-Type":"application/json",**auth_h}
|
| 430 |
try:
|
| 431 |
r=requests.post(s_url,json=sub_data,headers=sub_h,timeout=120);r.raise_for_status();res_data=r.json()
|
| 432 |
+
msg=(f"User:{res_data.get('username',username_for_submission)}\nScore:{res_data.get('score','N/A')}% ({res_data.get('correct_count','?')}/{res_data.get('total_attempted','?')})\nMsg:{res_data.get('message','N/A')}")
|
| 433 |
logger.info(f"Submission OK! {msg}");return f"Submission OK!\n{msg}",pd.DataFrame(res_log,columns=["Task ID","Question","Full Agent Prompt","Raw Agent Output","Submitted Answer"])
|
| 434 |
except requests.exceptions.HTTPError as e:
|
| 435 |
err=f"HTTP {e.response.status_code}. Detail:{e.response.text[:200]}"; logger.error(f"Submit Fail:{err}",exc_info=True); return f"Submit Fail:{err}",pd.DataFrame(res_log)
|
|
|
|
| 437 |
|
| 438 |
# --- Build Gradio Interface ---
|
| 439 |
with gr.Blocks(css=".gradio-container {max-width:1280px !important;margin:auto !important;}",theme=gr.themes.Soft()) as demo:
|
| 440 |
+
gr.Markdown("# GAIA Agent Challenge Runner v7 (OAuth for Username)")
|
| 441 |
gr.Markdown(f"""**Instructions:**
|
| 442 |
+
1. **Login with Hugging Face** using the button below. Your HF username will be used for submission.
|
| 443 |
2. Click 'Run Evaluation & Submit' to process GAIA questions (typically 20).
|
| 444 |
3. **Goal: 30%+ (6/20).** Agent uses Gemini 2.5 Pro ({GEMINI_MODEL_NAME}), Web Search, Python, PDF, OCR, Audio/YouTube.
|
| 445 |
+
4. Ensure `GOOGLE_API_KEY` and `HUGGINGFACE_TOKEN` are Space secrets.
|
| 446 |
+
5. Check Space logs for details. LangGraph is attempted (ReAct fallback).""")
|
| 447 |
|
| 448 |
agent_status_display = gr.Markdown("**Agent Status:** Initializing...")
|
| 449 |
missing_secrets_display = gr.Markdown("")
|
| 450 |
|
| 451 |
+
gr.LoginButton() # Added back as per template standard for username
|
| 452 |
run_button = gr.Button("Run Evaluation & Submit All Answers")
|
| 453 |
status_output = gr.Textbox(label="Run Status / Submission Result", lines=7, interactive=False)
|
| 454 |
results_table = gr.DataFrame(label="Q&A Log", headers=["Task ID","Question","Prompt","Raw","Submitted"], wrap=True)
|
| 455 |
+
|
| 456 |
+
# The `profile` argument in `run_and_submit_all` will be populated by Gradio
|
| 457 |
+
# if the user is logged in via the `gr.LoginButton()` flow.
|
| 458 |
run_button.click(fn=run_and_submit_all, outputs=[status_output,results_table], api_name="run_evaluation")
|
| 459 |
|
| 460 |
def update_ui_on_load_fn_within_context():
|
| 461 |
+
global missing_vars_startup_list_global, agent_pre_init_status_msg_global
|
|
|
|
| 462 |
secrets_msg_md = ""
|
| 463 |
if missing_vars_startup_list_global:
|
| 464 |
secrets_msg_md = f"<font color='red'>**⚠️ Secrets Missing:** {', '.join(missing_vars_startup_list_global)}.</font>"
|
|
|
|
| 479 |
demo.load(update_ui_on_load_fn_within_context, [], [agent_status_display, missing_secrets_display])
|
| 480 |
|
| 481 |
if __name__ == "__main__":
|
| 482 |
+
logger.info("Application starting up (v7)...")
|
| 483 |
if not PYPDF2_AVAILABLE: logger.warning("PyPDF2 (PDF tool) NOT AVAILABLE.")
|
| 484 |
if not PIL_TESSERACT_AVAILABLE: logger.warning("Pillow/Pytesseract (OCR tool) NOT AVAILABLE.")
|
| 485 |
if not WHISPER_AVAILABLE: logger.warning("Whisper (Audio tool) NOT AVAILABLE.")
|
| 486 |
if LANGGRAPH_FLAVOR_AVAILABLE: logger.info("Core LangGraph (StateGraph, END) loaded.")
|
| 487 |
else: logger.warning("Core LangGraph FAILED import. ReAct fallback. Check requirements.txt (langgraph, langchain-core, typing-extensions) & Space build logs for errors.")
|
| 488 |
|
| 489 |
+
missing_vars_startup_list_global.clear()
|
|
|
|
| 490 |
if not GOOGLE_API_KEY: missing_vars_startup_list_global.append("GOOGLE_API_KEY")
|
| 491 |
+
if not HUGGINGFACE_TOKEN: missing_vars_startup_list_global.append("HUGGINGFACE_TOKEN (for GAIA API)")
|
| 492 |
+
# HF_USERNAME_FOR_SUBMISSION is no longer checked here as OAuth is primary
|
| 493 |
|
| 494 |
try:
|
| 495 |
logger.info("Pre-initializing agent...")
|