File size: 5,541 Bytes
9b5b26a
 
 
 
c19d193
6aae614
bedf89f
8fe992b
9b5b26a
 
5df72d6
8aeb9ba
9b5b26a
4ea2a03
9b5b26a
bedf89f
 
9b5b26a
bedf89f
 
 
8aeb9ba
9b5b26a
bedf89f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8aeb9ba
 
4ea2a03
8aeb9ba
 
 
 
 
222a546
8aeb9ba
222a546
 
 
 
 
8aeb9ba
 
222a546
8aeb9ba
222a546
9b5b26a
222a546
 
 
 
 
 
93f7569
 
 
 
 
80dc2cf
 
 
 
 
 
93f7569
 
 
 
 
 
 
 
222a546
9b5b26a
 
 
 
 
 
 
 
 
 
 
 
 
 
8c01ffb
 
6aae614
ae7a494
 
 
 
e121372
bf6d34c
 
29ec968
fe328e0
13d500a
8c01ffb
 
9b5b26a
 
8c01ffb
861422e
 
9b5b26a
8c01ffb
8fe992b
4ea2a03
8c01ffb
 
 
 
 
 
861422e
8fe992b
 
9b5b26a
8c01ffb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
import datetime
import requests
import pytz
import yaml
from tools.final_answer import FinalAnswerTool
import random

from Gradio_UI import GradioUI

# Below is an example of a tool that does nothing. Amaze us with your creativity !
# In this case, the user should input the name of the character to get the quote :)
@tool
def get_star_trek_quotes(character: str)-> str: #it's import to specify the return type
    #Keep this format for the description / args / args description but feel free to modify the tool
    """
    Fetches a quote from a Star Trek character.
    Args:
        character: The name of the character whose quote is requested (e.g., "Seven of Nine", "Spock", "Picard", "Data").

    Returns: 
        A quote from the specified Star Trek character, or a message if the character is not found.
    """
    quotes = {
        "Seven of Nine": [
            "It is unsettling. You say that I am a human being and yet, I am also Borg... Part of me not unlike your replicator... Not unlike the Doctor. Will you one day choose to abandon me as well?",
            "Fun will now commence."
        ],
        "Spock": [
            "Live and prosper.",
            "The needs of the many outweigh the needs of the few."
        ],
        "Picard": [
            "Make it so",
            "Earth was once a violent planet too. At times, the chaos threatened the very fabric of life. But like you, we evolved. We found better ways to handle our conflicts. But I think noone can deny that the seed of violence remains within each of us, We must recognise that, because that violence is capable of consuming each of us."
        ],
        "Data": [
            "It is interesting that people try to find meaningful patterns in things that are essentially random. I have noticed that the images they percieve sometimes suggest what they are thinking about at that particular moment. Besides, it is clearly a bunny rabbit.",
            "One is my name. The other is not."
        ]
    }

    # If the character exists in the dictionary, return a quote
    if character in quotes:
        return quotes[character][0] # Fetch the first character
    else:
        return f"Sorry, no quotes available for {character}. Please provide a valid Star Trek character name."

@tool
def analyze_species_difference(species1: str, species2: str) -> str:
    """
    Analyze the physiological difference between two Star Trek species using a search engine.

    Args:
        species1: The first species (e.g., "Vulkan", "Human").
        species2: The second species (e.g., "Klingon", "Romulan").

    Returns:
        A detailed analysis based on the search of results.
    """
    query = f"Physiological differences between {species1} and {species2} in Star Trek"

    search_results = DuckDuckGoSearchTool(query)

    if search_results:
        return f"Based on the research, here's a comparison between {species1} and {species2}"
    else:
        return f"Sorry, I couldn't find information on the physiological differences between {species1} and {species2}. Please, try another query."

@tool
def generate_starship_image(starship_class: str) -> str:
    """
    Generate an image of a Star Trek starship based on the starship class.

    Args:
        starship_class: The class of the starship to generate (e.g.,"Defiant-class", "Galaxy-class").

    Returns:
        A URL to the generated image of the specified starship class.
    """
    prompt = f"Generate a digital image of Star Trek starship of the {starship_class} class."

    try:
        image_url = image_generation_tool(prompt)
        return f"Here is the image of a {starship_class} class starship: {image_url}"
    except Exception as e:
        return f"Error generating image for {starship_class} class starship: {str(e)}"
        
@tool
def get_current_time_in_timezone(timezone: str) -> str:
    """A tool that fetches the current local time in a specified timezone.
    Args:
        timezone: A string representing a valid timezone (e.g., 'America/New_York').
    """
    try:
        # Create timezone object
        tz = pytz.timezone(timezone)
        # Get current time in that timezone
        local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
        return f"The current local time in {timezone} is: {local_time}"
    except Exception as e:
        return f"Error fetching time for timezone '{timezone}': {str(e)}"


final_answer = FinalAnswerTool()

# If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
# model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud' 

model = HfApiModel(
max_tokens=2096,
temperature=0.5,
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
custom_role_conversions=None,
)


# Import tool from Hub
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)

with open("prompts.yaml", 'r') as stream:
    prompt_templates = yaml.safe_load(stream)
    
agent = CodeAgent(
    model=model,
    tools=[final_answer, get_star_trek_quotes, analyze_species_difference, generate_starship_image], ## add your tools here (don't remove final answer)
    max_steps=6,
    verbosity_level=1,
    grammar=None,
    planning_interval=None,
    name=None,
    description=None,
    prompt_templates=prompt_templates
)


GradioUI(agent).launch()