File size: 10,228 Bytes
74228a1
 
 
 
 
 
 
 
af25f05
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a63ee98
74228a1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b8bef68
74228a1
 
 
 
b8bef68
74228a1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1db75e3
 
 
 
 
a4cbd1a
91e4626
a4cbd1a
 
 
 
 
 
1db75e3
6705813
f6f782f
a63ee98
1db75e3
74228a1
 
 
 
2a32664
74228a1
 
 
 
 
 
 
 
a63ee98
f6f782f
94c8fa8
 
f6f782f
 
91e4626
74228a1
 
 
 
 
 
 
 
 
 
91e4626
74228a1
 
91e4626
74228a1
 
ecafb2c
a63ee98
 
 
74228a1
 
c03c16d
 
 
 
 
 
 
 
 
 
 
 
 
74228a1
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
from langchain_huggingface import HuggingFacePipeline
from langchain.tools import Tool
from langchain.agents import create_react_agent
from langgraph.graph import StateGraph, END
from pydantic import BaseModel
import gradio as gr

import os
if os.environ.get("SPACES_ZERO_GPU") is not None:
    import spaces
else:
    class spaces:
        @staticmethod
        def GPU(func):
            def wrapper(*args, **kwargs):
                return func(*args, **kwargs)
            return wrapper

@spaces.GPU
def fake_gpu():
  pass
    

# ---------------------------------------
# Step 1: Define Hugging Face LLM (Qwen/Qwen2.5-7B-Instruct-1M)
# ---------------------------------------
def create_llm():
    model_name = "Qwen/Qwen2.5-7B-Instruct-1M"
    tokenizer = AutoTokenizer.from_pretrained(model_name)
    model = AutoModelForCausalLM.from_pretrained(model_name)
    
    llm_pipeline = pipeline(
        task="text-generation",
        model=model,
        tokenizer=tokenizer,
        device=-1,  # CPU mode, set to 0 for GPU
        max_new_tokens=200
    )
    return HuggingFacePipeline(pipeline=llm_pipeline)

# ---------------------------------------
# Step 2: Create Agents
# ---------------------------------------
llm = create_llm()

# Registration Agent
registration_agent = Tool(
    name="registration_check",
    description="Check if a patient is registered.",
    func=lambda details: registration_tool(details.get("visitor_name"), details.get("visitor_mobile"))
)

# Scheduling Agent
scheduling_agent = Tool(
    name="schedule_appointment",
    description="Fetch available time slots for a doctor.",
    func=lambda details: doctor_slots_tool(details.get("doctor_name"))
)

# Payment Agent
payment_agent = Tool(
    name="process_payment",
    description="Generate a payment link and confirm the payment.",
    func=lambda details: confirm_payment_tool(details.get("transaction_id"))
)

# Email Agent
email_agent = Tool(
    name="send_email",
    description="Send appointment confirmation email to the visitor.",
    func=lambda details: email_tool(
        details.get("visitor_email"),
        details.get("appointment_details"),
        details.get("hospital_location")
    )
)

# ---------------------------------------
# Step 3: Tools and Mock Functions
# ---------------------------------------
def registration_tool(visitor_name: str, visitor_mobile: str) -> bool:
    registered_visitors = [{"visitor_name": "John Doe", "visitor_mobile": "1234567890"}]
    return any(
        v["visitor_name"] == visitor_name and v["visitor_mobile"] == visitor_mobile
        for v in registered_visitors
    )

def register_visitor(visitor_name: str, visitor_mobile: str) -> bool:
    """Register a new user if not already registered."""
    return True  # Simulate successful registration

def doctor_slots_tool(doctor_name: str):
    available_slots = {
        "Dr. Smith": ["10:00 AM", "2:00 PM"],
        "Dr. Brown": ["12:00 PM"]
    }
    return available_slots.get(doctor_name, [])

def payment_tool(amount: float):
    """Generate a payment link."""
    return f"http://mock-payment-link.com/pay?amount={amount}"

def confirm_payment_tool(transaction_id: str) -> dict:
    """Confirm the payment."""
    if transaction_id == "TIMEOUT":
        return {"status": "FAILED", "reason_code": "timeout"}
    elif transaction_id == "SUCCESS":
        return {"status": "SUCCESS", "reason_code": None}
    else:
        return {"status": "FAILED", "reason_code": "other_error"}

def email_tool(visitor_email: str, appointment_details: str, hospital_location: str) -> bool:
    """Simulate sending an email to the visitor with appointment details."""
    print(f"Sending email to {visitor_email}...")
    print(f"Appointment Details: {appointment_details}")
    print(f"Hospital Location: {hospital_location}")
    # Simulate success
    return True

# ---------------------------------------
# Step 4: Define Workflow States
# ---------------------------------------
class VisitorState(BaseModel):
    visitor_name: str = ""
    visitor_mobile: str = ""
    visitor_email: str = ""
    doctor_name: str = ""
    department_name: str = ""
    selected_slot: str = ""
    messages: list = []
    payment_confirmed: bool = False
    email_sent: bool = False

def input_state(state: VisitorState):
    """InputState: Collect visitor details."""
    return {"messages": ["Please provide your name, mobile number, and email."], "next": "RegistrationState"}

def registration_state(state: VisitorState):
    """Registration State: Check and register visitor."""
    is_registered = registration_tool(state.visitor_name, state.visitor_mobile)
    print("The visitor named "+state.visitor_name+" and mobile number "+state.visitor_mobile+" registration is "+is_registered)
    if is_registered:
        return {"messages": ["Visitor is registered."], "next": "SchedulingState"}
    else:
        successfully_registered = register_visitor(state.visitor_name, state.visitor_mobile)
        print("Registration of the visitor named "+state.visitor_name+" and mobile number "+state.visitor_mobile+" registration is "+is_registered) 
        if successfully_registered:
            return {"messages": ["Visitor has been successfully registered."], "next": "SchedulingState"}
        else:
            return {"messages": ["Registration failed. Please try again later."], "next": END}

def scheduling_state(state: VisitorState):
    """SchedulingState: Fetch available slots for a doctor."""
    available_slots = doctor_slots_tool(state.doctor_name)
    if available_slots:
        state.selected_slot = available_slots[0]
        return {"messages": [f"Slot selected for {state.doctor_name}: {state.selected_slot}"], "next": "PaymentState"}
    else:
        return {"messages": [f"No available slots for {state.doctor_name}."], "next": END}

def payment_state(state: VisitorState):
    """PaymentState: Generate payment link and confirm."""
    payment_link = payment_tool(500)
    state.messages.append(f"Please proceed to pay at: {payment_link}")

    # Simulate payment confirmation
    payment_response = confirm_payment_tool("SUCCESS")
    if payment_response["status"] == "SUCCESS":
        state.payment_confirmed = True
        return {"messages": ["Payment successful. Appointment is being finalized."], "next": "FinalState"}
    elif payment_response["reason_code"] == "timeout":
        return {"messages": ["Payment timed out. Retrying payment..."], "next": "PaymentState"}
    else:
        return {"messages": ["Payment failed due to an error. Please try again later."], "next": END}

def final_state(state: VisitorState):
    """FinalState: Send email confirmation and finalize the appointment."""
    if state.payment_confirmed:
        appointment_details = f"Doctor: {state.doctor_name}\nTime: {state.selected_slot}"
        hospital_location = "123 Main St, Springfield, USA"
        email_success = email_tool(state.visitor_email, appointment_details, hospital_location)

        if email_success:
            state.email_sent = True
            return {"messages": [f"Appointment confirmed. Details sent to your email: {state.visitor_email}"], "next": END}
        else:
            return {"messages": ["Appointment confirmed, but failed to send email. Please contact support."], "next": END}
    else:
        return {"messages": ["Payment confirmation failed. Appointment could not be finalized."], "next": END}

# ---------------------------------------
# Step 5: Build Langgraph Workflow
# ---------------------------------------
workflow = StateGraph(VisitorState)

# Add nodes
workflow.add_node("InputState", input_state)
workflow.add_node("RegistrationState", registration_state)
workflow.add_node("SchedulingState", scheduling_state)
workflow.add_node("PaymentState", payment_state)
workflow.add_node("FinalState", final_state)

# Define edges
workflow.add_edge("InputState", "RegistrationState")
workflow.add_edge("RegistrationState", "SchedulingState")
workflow.add_edge("SchedulingState", "PaymentState")
workflow.add_edge("PaymentState", "FinalState")

# Entry Point
workflow.set_entry_point("InputState")
compiled_graph = workflow.compile()

visitor_name="Bob Joe"
visitor_mobile="123456789012"
visitor_email="[email protected]"
doctor_name="Normand Joseph"
department_name="Orthopedics"

gstate = VisitorState(
        visitor_name=visitor_name,
        visitor_mobile=visitor_mobile,
        visitor_email=visitor_email,
        doctor_name=doctor_name,
        department_name=department_name,
    )
# Execute workflow
#result = compiled_graph.invoke(gstate.dict())
#result = compiled_graph.invoke(gstate.model_dump())


# ---------------------------------------
# Step 6: Gradio Interface
# ---------------------------------------
def gradio_interface(visitor_name, visitor_mobile, visitor_email, doctor_name, department_name):
    #Interface for Gradio application.
    state = VisitorState(
        visitor_name=visitor_name,
        visitor_mobile=visitor_mobile,
        visitor_email=visitor_email,
        doctor_name=doctor_name,
        department_name=department_name,
    )
    # Execute workflow
    #result = compiled_graph.invoke(state.dict())
    result = compiled_graph.invoke(state.model_dump())
    print(state)
    print(result)
    return "\n".join(result["messages"])
    #return "Here returning a string for testing gradio interface!" + visitor_name


iface = gr.Interface(
    fn=gradio_interface,
    inputs=[
        gr.Textbox(label="Visitor Name"),
        gr.Textbox(label="Visitor Mobile Number"),
        gr.Textbox(label="Visitor Email"),
        gr.Textbox(label="Doctor Name"),
        gr.Textbox(label="Department Name"),
    ],
    outputs="textbox"
)


# Execute the Gradio interface
if __name__ == "__main__":
    iface.launch()

print(state)
print(result)

"""
import gradio as gr
import spaces
import torch

zero = torch.Tensor([0]).cuda()
print(zero.device) # <-- 'cpu' πŸ€”

@spaces.GPU
def greet(n):
    print(zero.device) # <-- 'cuda:0' πŸ€—
    return f"Hello {zero + n} Tensor"

demo = gr.Interface(fn=greet, inputs=gr.Number(), outputs=gr.Text())
demo.launch()
"""