File size: 4,780 Bytes
7dae5d7 328500b be96b46 a9baea4 b484368 dd64275 d546522 0281228 d546522 e35f671 d546522 7dae5d7 7dcc866 453dcab b484368 e35f671 3ad86cf 7914072 3ad86cf 6aa678c 591c402 b484368 1326d25 f3ed293 7dcc866 453dcab 628c91c 3ad86cf e47d1c9 f3ed293 7dcc866 453dcab 628c91c f3ed293 03bd826 b484368 50954b4 f7e408f 6b83805 421c697 50954b4 f7e408f d6b79a6 8a64a1b 9743460 ac28ea9 6a48765 ac28ea9 d6b79a6 8a64a1b a5b24c7 bb14707 ac28ea9 a5b24c7 ac28ea9 a5b24c7 ac28ea9 a5b24c7 6449014 ac28ea9 cc3c980 ac28ea9 6449014 ac28ea9 6449014 e35f671 ac28ea9 cc3c980 ac28ea9 e35f671 6449014 574eab7 e35f671 574eab7 798f236 d6b79a6 c8ff2ad 8e9eaa5 b62f248 c8ff2ad f7e408f dd64275 e35f671 f7e408f c71216f 0995edb f7e408f a4921f6 50a639b e35f671 4c111c1 a4921f6 50a639b e35f671 4c111c1 a4921f6 50a639b e35f671 f7e408f 6b83805 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 |
import os, threading
import gradio as gr
from crew import run_crew
from utils import get_questions
def ask(question, openai_api_key, gemini_api_key, anthropic_api_key, file_name = ""):
"""
Ask General AI Assistant a question to answer.
Args:
question (str): The question to answer
openai_api_key (str): OpenAI API key
gemini_api_key (str): Gemini API key
anthropic_api_key (str): Anthropic API key
file_name (str): Optional file name
Returns:
str: The answer to the question
"""
if not question:
raise gr.Error("Question is required.")
if not openai_api_key:
raise gr.Error("OpenAI API Key is required.")
if not gemini_api_key:
raise gr.Error("Gemini API Key is required.")
if not anthropic_api_key:
raise gr.Error("Anthropic API Key is required.")
if file_name:
file_name = f"data/{file_name}"
lock = threading.Lock()
with lock:
answer = ""
try:
os.environ["OPENAI_API_KEY"] = openai_api_key
os.environ["GEMINI_API_KEY"] = gemini_api_key
os.environ["MODEL_API_KEY"] = anthropic_api_key
answer = run_crew(question, file_name)
except Exception as e:
raise gr.Error(e)
finally:
del os.environ["OPENAI_API_KEY"]
del os.environ["GEMINI_API_KEY"]
del os.environ["MODEL_API_KEY"]
return answer
gr.close_all()
with gr.Blocks() as grady:
gr.Markdown("## Grady - General AI Assistant")
gr.Markdown(os.environ.get("DESCRIPTION"))
with gr.Row():
with gr.Column(scale=3):
with gr.Row():
question = gr.Textbox(
label="Question *",
placeholder="In the 2025 Gradio Agents & MCP Hackathon, what percentage of participants submitted a solution during the last 24 hours?",
interactive=True
)
with gr.Row():
level = gr.Radio(
choices=[1, 2, 3],
label="GAIA Benchmark Level",
interactive=True,
scale=1
)
ground_truth = gr.Textbox(
label="Ground Truth",
interactive=True,
scale=1
)
file_name = gr.Textbox(
label="File Name",
interactive=True,
scale=2
)
with gr.Row():
openai_api_key = gr.Textbox(
label="OpenAI API Key *",
type="password",
placeholder="sk‑...",
interactive=True
)
gemini_api_key = gr.Textbox(
label="Gemini API Key *",
type="password",
interactive=True
)
anthropic_api_key = gr.Textbox(
label="Anthropic API Key *",
type="password",
placeholder="sk-ant-...",
interactive=True
)
with gr.Row():
clear_btn = gr.ClearButton(
components=[question, level, ground_truth, file_name]
)
submit_btn = gr.Button("Submit", variant="primary")
with gr.Column(scale=1):
answer = gr.Textbox(
label="Answer",
lines=1,
interactive=False
)
submit_btn.click(
fn=ask,
inputs=[question, openai_api_key, gemini_api_key, anthropic_api_key, file_name],
outputs=answer
)
QUESTION_FILE_PATH = "data/gaia_validation.jsonl"
gr.Examples(
label="GAIA Benchmark Level 1 Problems",
examples=get_questions(QUESTION_FILE_PATH, 1),
inputs=[question, level, ground_truth, file_name, openai_api_key, gemini_api_key, anthropic_api_key],
outputs=answer,
cache_examples=False
)
gr.Examples(
label="GAIA Benchmark Level 2 Problems",
examples=get_questions(QUESTION_FILE_PATH, 2),
inputs=[question, level, ground_truth, file_name, openai_api_key, gemini_api_key, anthropic_api_key],
outputs=answer,
cache_examples=False
)
gr.Examples(
label="GAIA Benchmark Level 3 Problems",
examples=get_questions(QUESTION_FILE_PATH, 3),
inputs=[question, level, ground_truth, file_name, openai_api_key, gemini_api_key, anthropic_api_key],
outputs=answer,
cache_examples=False
)
grady.launch(mcp_server=True) |