Aileenvl commited on
Commit
7130837
·
verified ·
1 Parent(s): 7329f53

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -30
app.py CHANGED
@@ -1,22 +1,22 @@
1
- from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
2
  import datetime
3
  import requests
4
  import pytz
5
  import yaml
6
  from tools.final_answer import FinalAnswerTool
7
-
8
  from Gradio_UI import GradioUI
9
 
10
- # Below is an example of a tool that does nothing. Amaze us with your creativity !
11
  @tool
12
- def my_custom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return type
13
- #Keep this format for the description / args / args description but feel free to modify the tool
14
- """A tool that does nothing yet
 
15
  Args:
16
  arg1: the first argument
17
  arg2: the second argument
18
  """
19
- return "What magic will you build ?"
20
 
21
  @tool
22
  def get_current_time_in_timezone(timezone: str) -> str:
@@ -36,61 +36,53 @@ def get_current_time_in_timezone(timezone: str) -> str:
36
  @tool
37
  def check_hf_model(model_name: str) -> str:
38
  """A tool that checks if a Hugging Face model is available and gets basic information.
39
-
40
  Args:
41
  model_name: The name of the model on Hugging Face (e.g., 'google/bert-base-uncased')
42
  """
43
  try:
44
  # Construct the HF API URL
45
  api_url = f"https://huggingface.co/api/models/{model_name}"
46
-
47
  # Make the request
48
  response = requests.get(api_url)
49
-
50
  if response.status_code == 200:
51
  data = response.json()
52
-
53
  # Extract useful information
54
  pipeline_tag = data.get('pipeline_tag', 'Not specified')
55
  downloads = data.get('downloads', 'Not available')
56
  likes = data.get('likes', 'Not available')
57
-
58
  return f"""Model '{model_name}' is available:
59
- - Task: {pipeline_tag}
60
- - Downloads: {downloads}
61
- - Likes: {likes}
62
- - URL: https://huggingface.co/{model_name}"""
63
  else:
64
  return f"Model '{model_name}' not found or not accessible."
65
-
66
  except Exception as e:
67
  return f"Error checking model '{model_name}': {str(e)}"
68
 
69
-
70
  final_answer = FinalAnswerTool()
71
 
72
- # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
73
- # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
74
-
75
  model = HfApiModel(
76
- max_tokens=2096,
77
- temperature=0.5,
78
- model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
79
- custom_role_conversions=None,
80
  )
81
 
82
-
83
  # Import tool from Hub
84
  image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
85
 
86
  with open("prompts.yaml", 'r') as stream:
87
  prompt_templates = yaml.safe_load(stream)
88
-
89
  agent = CodeAgent(
90
  model=model,
91
- tools=[final_answer,check_hf_model,get_current_time_in_timezone], ## add your tools here (don't remove final answer)
92
  max_steps=6,
93
- verbosity_level=1,
94
  grammar=None,
95
  planning_interval=None,
96
  name=None,
@@ -98,5 +90,12 @@ agent = CodeAgent(
98
  prompt_templates=prompt_templates
99
  )
100
 
 
 
 
101
 
102
- GradioUI(agent).launch()
 
 
 
 
 
1
+ from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel, load_tool, tool
2
  import datetime
3
  import requests
4
  import pytz
5
  import yaml
6
  from tools.final_answer import FinalAnswerTool
 
7
  from Gradio_UI import GradioUI
8
 
9
+ # Below is an example of a tool that does nothing. Amaze us with your creativity!
10
  @tool
11
+ def my_custom_tool(arg1: str, arg2: int) -> str:
12
+ # it's important to specify the return type
13
+ # Keep this format for the description / args / args description but feel free to modify the tool
14
+ """A tool that does nothing yet
15
  Args:
16
  arg1: the first argument
17
  arg2: the second argument
18
  """
19
+ return "What magic will you build?"
20
 
21
  @tool
22
  def get_current_time_in_timezone(timezone: str) -> str:
 
36
  @tool
37
  def check_hf_model(model_name: str) -> str:
38
  """A tool that checks if a Hugging Face model is available and gets basic information.
 
39
  Args:
40
  model_name: The name of the model on Hugging Face (e.g., 'google/bert-base-uncased')
41
  """
42
  try:
43
  # Construct the HF API URL
44
  api_url = f"https://huggingface.co/api/models/{model_name}"
 
45
  # Make the request
46
  response = requests.get(api_url)
 
47
  if response.status_code == 200:
48
  data = response.json()
 
49
  # Extract useful information
50
  pipeline_tag = data.get('pipeline_tag', 'Not specified')
51
  downloads = data.get('downloads', 'Not available')
52
  likes = data.get('likes', 'Not available')
 
53
  return f"""Model '{model_name}' is available:
54
+ - Task: {pipeline_tag}
55
+ - Downloads: {downloads}
56
+ - Likes: {likes}
57
+ - URL: https://huggingface.co/{model_name}"""
58
  else:
59
  return f"Model '{model_name}' not found or not accessible."
 
60
  except Exception as e:
61
  return f"Error checking model '{model_name}': {str(e)}"
62
 
 
63
  final_answer = FinalAnswerTool()
64
 
65
+ # If the agent does not answer, the model is overloaded, please use another model
66
+ # or the following Hugging Face Endpoint that also contains qwen2.5 coder:
67
+ # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
68
  model = HfApiModel(
69
+ max_tokens=2096,
70
+ temperature=0.5,
71
+ model_id='Qwen/Qwen2.5-Coder-32B-Instruct', # it is possible that this model may be overloaded
72
+ custom_role_conversions=None,
73
  )
74
 
 
75
  # Import tool from Hub
76
  image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
77
 
78
  with open("prompts.yaml", 'r') as stream:
79
  prompt_templates = yaml.safe_load(stream)
80
+
81
  agent = CodeAgent(
82
  model=model,
83
+ tools=[final_answer, check_hf_model, get_current_time_in_timezone], # add your tools here (don't remove final answer)
84
  max_steps=6,
85
+ verbosity_level=2, # increased from 1 to 2 for more debugging info
86
  grammar=None,
87
  planning_interval=None,
88
  name=None,
 
90
  prompt_templates=prompt_templates
91
  )
92
 
93
+ # Add error handling and logging
94
+ import logging
95
+ logging.basicConfig(level=logging.DEBUG)
96
 
97
+ try:
98
+ ui = GradioUI(agent)
99
+ ui.launch()
100
+ except Exception as e:
101
+ logging.error(f"Failed to launch Gradio UI: {str(e)}")