File size: 3,721 Bytes
7dae5d7 328500b be96b46 a9baea4 b484368 d50acc2 d546522 c28529c d546522 7dae5d7 7dcc866 453dcab b484368 3ad86cf 7914072 3ad86cf 6aa678c b484368 1326d25 f3ed293 7dcc866 453dcab 3ad86cf e47d1c9 f3ed293 7dcc866 453dcab f3ed293 03bd826 b484368 50954b4 f7e408f 9bf7227 2e281ca 50954b4 f7e408f d6b79a6 8a64a1b 9743460 b62f248 d6b79a6 8a64a1b a5b24c7 6449014 574eab7 aa6b132 574eab7 798f236 d6b79a6 c8ff2ad 8e9eaa5 b62f248 c8ff2ad f7e408f d50acc2 d546522 f7e408f c71216f 0995edb f7e408f f222e1e 50a639b 4c111c1 f222e1e 50a639b 4c111c1 f222e1e 50a639b 098f840 f7e408f 4f70742 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 |
import os, threading
import gradio as gr
from crew import run_crew
from utils import get_questions
def _run(question, openai_api_key, gemini_api_key, file_name = ""):
"""
Run GAIA General AI Assistant to answer a question.
Args:
question (str): The question to answer
openai_api_key (str): OpenAI API key
gemini_api_key (str): Gemini API key
file_name (str): Optional file name
Returns:
str: The answer to the question
"""
if not question:
raise gr.Error("Question is required.")
if not openai_api_key:
raise gr.Error("OpenAI API Key is required.")
if not gemini_api_key:
raise gr.Error("Gemini API Key is required.")
if file_name:
file_name = f"data/{file_name}"
lock = threading.Lock()
with lock:
answer = ""
try:
os.environ["OPENAI_API_KEY"] = openai_api_key
os.environ["GEMINI_API_KEY"] = gemini_api_key
answer = run_crew(question, file_name)
except Exception as e:
raise gr.Error(e)
finally:
del os.environ["OPENAI_API_KEY"]
del os.environ["GEMINI_API_KEY"]
return answer
gr.close_all()
with gr.Blocks() as gaia:
gr.Markdown("## General AI Assistant 🧠")
gr.Markdown(os.environ.get("DESCRIPTION"))
with gr.Row():
with gr.Column(scale=3):
with gr.Row():
question = gr.Textbox(
label="Question *"
)
with gr.Row():
level = gr.Radio(
choices=[1, 2, 3],
label="Level",
scale=1
)
ground_truth = gr.Textbox(
label="Ground Truth",
scale=1
)
file_name = gr.Textbox(
label="File Name",
scale=2
)
with gr.Row():
openai_api_key = gr.Textbox(
label="OpenAI API Key *",
type="password"
)
gemini_api_key = gr.Textbox(
label="Gemini API Key *",
type="password"
)
with gr.Row():
clear_btn = gr.ClearButton(
components=[question, level, ground_truth, file_name, openai_api_key, gemini_api_key]
)
submit_btn = gr.Button("Submit", variant="primary")
with gr.Column(scale=1):
answer = gr.Textbox(
label="Answer",
lines=1,
interactive=False
)
submit_btn.click(
fn=_run,
inputs=[question, openai_api_key, gemini_api_key, file_name],
outputs=answer
)
QUESTION_FILE_PATH = "data/gaia_validation.jsonl"
gr.Examples(
label="Level 1",
examples=get_questions(QUESTION_FILE_PATH, 1),
inputs=[question, level, ground_truth, file_name, openai_api_key, gemini_api_key],
outputs=answer,
cache_examples=False
)
gr.Examples(
label="Level 2",
examples=get_questions(QUESTION_FILE_PATH, 2),
inputs=[question, level, ground_truth, file_name, openai_api_key, gemini_api_key],
outputs=answer,
cache_examples=False
)
gr.Examples(
label="Level 3",
examples=get_questions(QUESTION_FILE_PATH, 3),
inputs=[question, level, ground_truth, file_name, openai_api_key, gemini_api_key],
outputs=answer,
cache_examples=False
)
gaia.launch(mcp_server=True) |