Spaces:
Sleeping
Sleeping
File size: 4,252 Bytes
9b5b26a c19d193 6aae614 8fe992b 9b5b26a 6c6cd2c 5208086 6c6cd2c 294ea9c 9b5b26a 294ea9c 5208086 9b5b26a 294ea9c 9b5b26a 294ea9c 9b5b26a 294ea9c 5208086 9b5b26a 294ea9c 9b5b26a 294ea9c 5208086 294ea9c 5208086 294ea9c 5208086 294ea9c 5208086 294ea9c 5208086 9b5b26a 294ea9c 8c01ffb 6aae614 ae7a494 e121372 bf6d34c 29ec968 fe328e0 13d500a 8c01ffb 9b5b26a 8c01ffb 861422e 9b5b26a 8c01ffb 8fe992b 294ea9c 8c01ffb 861422e 8fe992b 9b5b26a 8c01ffb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 |
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
import datetime
import requests
import pytz
import yaml
from tools.final_answer import FinalAnswerTool
from Gradio_UI import GradioUI
# from huggingface_hub import login
# login()
@tool
def lookup_wikipedia_page(search_query: str) -> str:
"""Looks up the exact Wikipedia page title for a given search query, allowing to get the exact page title for subsequent wikipedia views tool
Args:
search_query: The search term to find the Wikipedia page for
"""
try:
# Use Wikipedia's API to search for pages
url = "https://en.wikipedia.org/w/api.php"
params = {
"action": "query",
"format": "json",
"list": "search",
"srsearch": search_query,
"srlimit": 1
}
response = requests.get(url, params=params)
if response.status_code == 200:
data = response.json()
if data["query"]["search"]:
# Return the exact page title
return data["query"]["search"][0]["title"]
else:
return f"No Wikipedia pages found for '{search_query}'"
else:
return f"Error searching Wikipedia: {response.status_code}"
except Exception as e:
return f"Error processing request: {str(e)}"
@tool
def get_wikipedia_views(article_title: str, window_size_in_days: int) -> str:
"""Fetches view statistics for a Wikipedia article. The article title is the exact page title from the lookup_wikipedia_page tool.
Args:
article_title: The title of the Wikipedia article to get views for
window_size_in_days: The number of days to get views for
"""
try:
# Clean article title for URL
article_title = article_title.replace(' ', '_')
# Construct API URL
# First get the page ID using the API
id_url = "https://en.wikipedia.org/w/api.php"
id_params = {
"action": "query",
"format": "json",
"titles": article_title,
"prop": "pageviews",
"pvipdays": window_size_in_days
}
# Get the response data
id_response = requests.get(id_url, params=id_params)
if id_response.status_code != 200:
return f"Error getting page ID: {id_response.status_code}"
id_data = id_response.json()
# Check if pages data exists
if "pages" not in id_data["query"]:
return f"Could not find page '{article_title}'"
# Get the first page's data
page_data = next(iter(id_data["query"]["pages"].values()))
if "pageviews" not in page_data:
return f"No pageview data available for '{article_title}'"
# Calculate total views
total_views = sum(views for views in page_data["pageviews"].values() if views is not None)
return f"The article '{article_title}' had {total_views} views in the last {window_size_in_days} days"
except Exception as e:
return f"Error processing request: {str(e)}"
final_answer = FinalAnswerTool()
# If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
# model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
model = HfApiModel(
max_tokens=2096,
temperature=0.5,
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
custom_role_conversions=None,
)
# Import tool from Hub
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
with open("prompts.yaml", 'r') as stream:
prompt_templates = yaml.safe_load(stream)
agent = CodeAgent(
model=model,
tools=[final_answer, lookup_wikipedia_page, get_wikipedia_views], ## add your tools here (don't remove final answer)
max_steps=6,
verbosity_level=1,
grammar=None,
planning_interval=None,
name=None,
description=None,
prompt_templates=prompt_templates
)
GradioUI(agent).launch() |