File size: 2,687 Bytes
145b27e
 
 
 
 
 
 
 
 
 
 
 
 
38812af
145b27e
 
38812af
145b27e
 
38812af
145b27e
 
38812af
145b27e
 
 
 
 
 
 
 
 
 
 
 
89b61e9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38812af
89b61e9
 
 
2bb6630
89b61e9
2bb6630
89b61e9
3e36b8e
89b61e9
3e36b8e
89b61e9
3e36b8e
 
89b61e9
 
3e36b8e
89b61e9
 
 
3e36b8e
89b61e9
 
 
 
 
 
 
3e36b8e
89b61e9
 
3e36b8e
 
89b61e9
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
# import gradio as gr
# import random
# from smolagents import GradioUI, CodeAgent, HfApiModel

# # Import our custom tools from their modules
# from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool
# from retriever import load_guest_dataset

# # Initialize the Hugging Face model
# model = HfApiModel()

# # Initialize the web search tool
# search_tool = DuckDuckGoSearchTool()

# # Initialize the weather tool
# weather_info_tool = WeatherInfoTool()

# # Initialize the Hub stats tool
# hub_stats_tool = HubStatsTool()

# # Load the guest dataset and initialize the guest info tool
# guest_info_tool = load_guest_dataset()

# # Create Alfred with all the tools
# alfred = CodeAgent(
#     tools=[guest_info_tool, weather_info_tool, hub_stats_tool, search_tool], 
#     model=model,
#     add_base_tools=True,  # Add any additional base tools
#     planning_interval=3   # Enable planning every 3 steps
# )

# if __name__ == "__main__":
#     GradioUI(alfred).launch()


from typing import TypedDict, Annotated
from langgraph.graph.message import add_messages
from langchain_core.messages import AnyMessage, HumanMessage, AIMessage
from langgraph.prebuilt import ToolNode
from langgraph.graph import START, StateGraph
from langgraph.prebuilt import tools_condition
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace

from tools import DuckDuckGoSearchRun, weather_info_tool, hub_stats_tool
from retriever import guest_info_tool

# Initialize the web search tool
search_tool = DuckDuckGoSearchRun()

# Generate the chat interface, including the tools
llm = HuggingFaceEndpoint(
    repo_id="Qwen/Qwen2.5-Coder-32B-Instruct",
    huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN,
)

chat = ChatHuggingFace(llm=llm, verbose=True)
tools = [guest_info_tool, search_tool, weather_info_tool, hub_stats_tool]
chat_with_tools = chat.bind_tools(tools)

# Generate the AgentState and Agent graph
class AgentState(TypedDict):
    messages: Annotated[list[AnyMessage], add_messages]

def assistant(state: AgentState):
    return {
        "messages": [chat_with_tools.invoke(state["messages"])],
    }

## The graph
builder = StateGraph(AgentState)

# Define nodes: these do the work
builder.add_node("assistant", assistant)
builder.add_node("tools", ToolNode(tools))

# Define edges: these determine how the control flow moves
builder.add_edge(START, "assistant")
builder.add_conditional_edges(
    "assistant",
    # If the latest message requires a tool, route to tools
    # Otherwise, provide a direct response
    tools_condition,
)
builder.add_edge("tools", "assistant")
alfred = builder.compile()

if __name__ == "__main__":
    GradioUI(alfred).launch()