Spaces:
Sleeping
Sleeping
File size: 1,634 Bytes
8c29825 a6ae617 7fbf1db 807d3ab 7fbf1db a6ae617 8c29825 2a2021b 807d3ab 8c29825 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 |
import yaml
import os
from smolagents import GradioUI, CodeAgent, HfApiModel
from huggingface_hub import InferenceClient
hf_api = os.getenv("hf_api")
client = InferenceClient(
provider="hf-inference",
model='Qwen/Qwen2.5-Coder-32B-Instruct',
api_key=hf_api
)
# Get current directory path
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
from tools.web_search import DuckDuckGoSearchTool as WebSearch
from tools.visit_webpage import VisitWebpageTool as VisitWebpage
from tools.suggest_menu import SimpleTool as SuggestMenu
from tools.catering_service_tool import SimpleTool as CateringServiceTool
from tools.superhero_party_theme_generator import SuperheroPartyThemeTool as SuperheroPartyThemeGenerator
from tools.final_answer import FinalAnswerTool as FinalAnswer
model = HfApiModel(
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
provider='hf-inference',
token=hf_api,
)
web_search = WebSearch()
visit_webpage = VisitWebpage()
suggest_menu = SuggestMenu()
catering_service_tool = CateringServiceTool()
superhero_party_theme_generator = SuperheroPartyThemeGenerator()
final_answer = FinalAnswer()
with open(os.path.join(CURRENT_DIR, "prompts.yaml"), 'r') as stream:
prompt_templates = yaml.safe_load(stream)
agent = CodeAgent(
model=model,
tools=[web_search, visit_webpage, suggest_menu, catering_service_tool, superhero_party_theme_generator],
managed_agents=[],
max_steps=10,
verbosity_level=2,
grammar=None,
planning_interval=None,
name=None,
description=None,
prompt_templates=prompt_templates
)
if __name__ == "__main__":
GradioUI(agent).launch()
|