ysharma HF Staff commited on
Commit
51ed9f4
·
verified ·
1 Parent(s): f001eb8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +460 -238
app.py CHANGED
@@ -2,13 +2,21 @@ import gradio as gr
2
  import asyncio
3
  import json
4
  import os
 
 
 
5
  from typing import List, Dict, Any, Optional
6
  from dataclasses import dataclass
7
  import aiohttp
8
- from mcp import ClientSession
9
- from mcp.client.sse import sse_client
10
  import anthropic
11
  from datetime import datetime
 
 
 
 
 
 
 
12
 
13
  # Configuration for MCP servers (Gradio apps acting as MCP servers)
14
  @dataclass
@@ -17,29 +25,9 @@ class MCPServerConfig:
17
  url: str
18
  description: str
19
 
20
- # Default MCP servers - you can add your own Gradio apps here
21
- DEFAULT_SERVERS = [
22
- MCPServerConfig(
23
- name="Letter Counter",
24
- url="https://your-letter-counter-space.hf.space/gradio_api/mcp/sse",
25
- description="Counts letters in text"
26
- ),
27
- MCPServerConfig(
28
- name="Weather Service",
29
- url="https://your-weather-space.hf.space/gradio_api/mcp/sse",
30
- description="Provides weather information"
31
- ),
32
- MCPServerConfig(
33
- name="Image Generator",
34
- url="https://your-image-gen-space.hf.space/gradio_api/mcp/sse",
35
- description="Generates images from text prompts"
36
- )
37
- ]
38
-
39
  class MCPClient:
40
  def __init__(self):
41
  self.servers: Dict[str, MCPServerConfig] = {}
42
- self.sessions: Dict[str, ClientSession] = {}
43
  self.available_tools: Dict[str, Dict] = {}
44
  self.anthropic_client = None
45
 
@@ -48,86 +36,120 @@ class MCPClient:
48
  self.anthropic_client = anthropic.Anthropic(
49
  api_key=os.getenv("ANTHROPIC_API_KEY")
50
  )
 
 
 
51
 
52
- async def add_server(self, config: MCPServerConfig) -> bool:
53
- """Add an MCP server to the client"""
54
  try:
55
  self.servers[config.name] = config
56
- await self._connect_server(config.name)
57
- return True
 
 
 
 
 
 
 
 
 
58
  except Exception as e:
59
- print(f"Failed to add server {config.name}: {e}")
60
- return False
 
61
 
62
- async def _connect_server(self, server_name: str) -> bool:
63
- """Connect to a specific MCP server"""
64
  try:
65
- config = self.servers[server_name]
 
 
 
66
 
67
- # Create SSE client connection
68
- async with sse_client(config.url) as (read, write):
69
- session = ClientSession(read, write)
70
- await session.initialize()
71
-
72
- # Get available tools from this server
73
- tools = await session.list_tools()
74
-
75
- # Store tools with server prefix to avoid conflicts
76
- for tool in tools.tools:
77
- tool_key = f"{server_name}::{tool.name}"
78
- self.available_tools[tool_key] = {
79
- "server": server_name,
80
- "tool": tool,
81
- "session": session
82
- }
83
-
84
- self.sessions[server_name] = session
85
- print(f"✅ Connected to {server_name}: {len(tools.tools)} tools available")
86
- return True
87
-
88
  except Exception as e:
89
- print(f" Failed to connect to {server_name}: {e}")
90
- return False
91
 
92
- async def list_all_tools(self) -> List[str]:
93
- """List all available tools from all connected servers"""
94
- return list(self.available_tools.keys())
95
-
96
- async def call_tool(self, tool_key: str, arguments: Dict[str, Any]) -> Any:
97
- """Call a tool on the appropriate MCP server"""
98
- if tool_key not in self.available_tools:
99
- raise ValueError(f"Tool {tool_key} not found")
100
-
101
- tool_info = self.available_tools[tool_key]
102
- session = tool_info["session"]
103
- tool = tool_info["tool"]
104
-
105
  try:
106
- result = await session.call_tool(tool.name, arguments)
107
- return result
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
108
  except Exception as e:
109
- raise Exception(f"Tool call failed: {e}")
110
 
111
- async def get_server_status(self) -> Dict[str, str]:
112
  """Get status of all configured servers"""
113
  status = {}
114
  for name, config in self.servers.items():
115
- if name in self.sessions:
116
- status[name] = "✅ Connected"
117
- else:
118
- status[name] = "❌ Disconnected"
119
  return status
120
 
121
  # Global MCP client instance
122
  mcp_client = MCPClient()
123
 
124
- async def initialize_default_servers():
125
- """Initialize connections to default MCP servers"""
126
- for server_config in DEFAULT_SERVERS:
127
- await mcp_client.add_server(server_config)
128
-
129
- async def chat_with_mcp(message: str, history: List[List[str]]) -> List[List[str]]:
130
- """Main chat function that uses MCP tools"""
131
 
132
  if not mcp_client.anthropic_client:
133
  error_msg = "❌ Anthropic API key not configured. Please set ANTHROPIC_API_KEY environment variable."
@@ -135,32 +157,27 @@ async def chat_with_mcp(message: str, history: List[List[str]]) -> List[List[str
135
  return history
136
 
137
  try:
138
- # Get available tools
139
- available_tools = await mcp_client.list_all_tools()
140
-
141
- if not available_tools:
142
- response = "⚠️ No MCP servers connected. Please check your server configurations."
143
  history.append([message, response])
144
  return history
145
 
146
- # Prepare tool information for Claude
147
- tools_info = []
148
- for tool_key in available_tools:
149
- tool_info = mcp_client.available_tools[tool_key]
150
- tool = tool_info["tool"]
151
- server_name = tool_info["server"]
152
-
153
- tools_info.append({
154
- "name": tool_key,
155
- "description": f"[{server_name}] {tool.description}",
156
- "input_schema": tool.inputSchema
157
  })
158
 
159
  # Create messages for Claude
160
  messages = []
161
 
162
- # Add conversation history
163
- for user_msg, assistant_msg in history:
 
164
  messages.append({"role": "user", "content": user_msg})
165
  messages.append({"role": "assistant", "content": assistant_msg})
166
 
@@ -168,28 +185,29 @@ async def chat_with_mcp(message: str, history: List[List[str]]) -> List[List[str
168
  messages.append({"role": "user", "content": message})
169
 
170
  # System prompt
171
- system_prompt = f"""You are a helpful AI assistant with access to various tools through MCP (Model Context Protocol).
172
 
173
- Available tools from connected MCP servers:
174
- {json.dumps(tools_info, indent=2)}
175
 
176
- When users request something that can be accomplished with these tools, use them appropriately.
177
- Always explain what you're doing and which tool/server you're using.
 
 
 
 
 
178
 
179
  Current time: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}
180
- """
 
181
 
182
- # Call Claude with tool use capability
183
  response = mcp_client.anthropic_client.messages.create(
184
  model="claude-3-5-sonnet-20241022",
185
  max_tokens=1024,
186
  system=system_prompt,
187
- messages=messages,
188
- tools=[{
189
- "name": tool_info["name"],
190
- "description": tool_info["description"],
191
- "input_schema": tool_info["input_schema"]
192
- } for tool_info in tools_info]
193
  )
194
 
195
  assistant_response = ""
@@ -198,91 +216,210 @@ Current time: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}
198
  for content in response.content:
199
  if content.type == "text":
200
  assistant_response += content.text
201
- elif content.type == "tool_use":
202
- # Execute the tool call
203
- try:
204
- tool_result = await mcp_client.call_tool(
205
- content.name,
206
- content.input
207
- )
 
 
 
 
 
 
 
 
 
 
208
 
209
- # Add tool result to response
210
- assistant_response += f"\n\n🔧 Used tool: {content.name}\n"
211
- assistant_response += f"Result: {tool_result.content[0].text if tool_result.content else 'No result'}"
 
 
 
 
 
 
 
212
 
213
- except Exception as e:
214
- assistant_response += f"\n\n❌ Tool error: {str(e)}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
215
 
216
  if not assistant_response:
217
- assistant_response = "I understand your request, but I couldn't generate a proper response."
218
 
219
  history.append([message, assistant_response])
220
  return history
221
 
222
  except Exception as e:
223
  error_msg = f"❌ Error: {str(e)}"
 
224
  history.append([message, error_msg])
225
  return history
226
 
227
- async def add_custom_server(name: str, url: str, description: str) -> str:
228
- """Add a custom MCP server"""
229
  if not name or not url:
230
  return "❌ Please provide both name and URL"
231
 
232
- if not url.endswith("/gradio_api/mcp/sse"):
233
- url = url.rstrip("/") + "/gradio_api/mcp/sse"
 
 
234
 
235
- config = MCPServerConfig(name=name, url=url, description=description or "Custom server")
 
 
 
 
236
 
237
- success = await mcp_client.add_server(config)
238
- if success:
239
- return f"✅ Successfully added server: {name}"
240
- else:
241
- return f"❌ Failed to add server: {name}"
 
 
 
 
 
 
 
 
 
 
 
 
242
 
243
- async def get_server_status() -> str:
244
  """Get status of all servers"""
245
- status = await mcp_client.get_server_status()
246
- status_text = "🔧 **MCP Server Status:**\n\n"
247
-
248
- for name, state in status.items():
249
- server_config = mcp_client.servers[name]
250
- status_text += f"**{name}**: {state}\n"
251
- status_text += f" - URL: {server_config.url}\n"
252
- status_text += f" - Description: {server_config.description}\n\n"
253
-
254
- # Add available tools count
255
- tools_count = len(mcp_client.available_tools)
256
- status_text += f"**Total available tools**: {tools_count}"
257
-
258
- return status_text
 
 
 
 
259
 
260
- # Gradio Interface
261
  def create_interface():
262
- with gr.Blocks(title="MCP Chatbot Client", theme=gr.themes.Ocean()) as demo:
263
  gr.Markdown("""
264
- # 🤖 MCP Chatbot Client
265
 
266
- This chatbot connects to multiple Gradio apps that function as MCP servers, giving it access to various tools and capabilities.
267
 
268
- **Features:**
269
- - Connect to multiple MCP servers (Gradio apps)
270
- - Use tools from different servers seamlessly
271
- - Add custom servers dynamically
272
- - Monitor server status
 
273
  """)
274
 
275
  with gr.Tabs():
276
  with gr.Tab("💬 Chat"):
277
  chatbot = gr.Chatbot(
278
- label="MCP-Powered Chatbot",
279
- height=400,
280
  show_label=True
281
  )
282
 
283
  msg = gr.Textbox(
284
  label="Your message",
285
- placeholder="Ask me anything! I can use tools from connected MCP servers...",
286
  lines=2
287
  )
288
 
@@ -290,116 +427,200 @@ def create_interface():
290
  submit_btn = gr.Button("Send", variant="primary")
291
  clear_btn = gr.Button("Clear Chat", variant="secondary")
292
 
293
- # Example prompts
294
  gr.Examples(
295
  examples=[
296
- "Count the letter 'a' in the word 'banana'",
297
- "What's the weather like today?",
298
  "Generate an image of a sunset over mountains",
299
- "What tools are available to me?",
 
 
 
 
300
  ],
301
  inputs=msg,
302
  label="Try these examples:"
303
  )
 
 
 
 
 
 
 
304
 
305
  with gr.Tab("🔧 Server Management"):
306
  gr.Markdown("### Add Custom MCP Server")
307
 
308
  with gr.Row():
309
- server_name = gr.Textbox(label="Server Name", placeholder="My Custom Server")
310
- server_url = gr.Textbox(label="Server URL", placeholder="https://your-space.hf.space")
 
 
 
 
 
 
 
 
 
 
 
311
 
312
- server_desc = gr.Textbox(label="Description", placeholder="What does this server do?")
313
  add_server_btn = gr.Button("Add Server", variant="primary")
314
  add_server_output = gr.Textbox(label="Result", interactive=False)
315
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
316
  gr.Markdown("### Server Status")
317
  status_btn = gr.Button("Check Status", variant="secondary")
318
  status_output = gr.Markdown()
319
 
320
- with gr.Tab("ℹ️ Help"):
321
  gr.Markdown("""
322
- ## How to Use
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
323
 
324
- ### 1. Chat Tab
325
- - Use the chat interface to interact with the MCP-powered chatbot
326
- - The bot can access tools from connected MCP servers
327
- - Try the example prompts to get started
328
 
329
- ### 2. Adding Custom Servers
330
- - Go to the "Server Management" tab
331
- - Enter the name and URL of your Gradio app
332
- - The URL should be the base URL of your Gradio app (e.g., `https://your-space.hf.space`)
333
- - The system will automatically append `/gradio_api/mcp/sse`
334
 
335
- ### 3. Creating MCP Server Apps
336
- To create a Gradio app that works as an MCP server:
 
 
 
 
 
 
 
 
337
 
338
  ```python
339
  import gradio as gr
 
 
 
 
 
 
 
 
340
 
341
- def your_function(param1: str, param2: int) -> str:
342
- '''Your function description here.
343
 
344
  Args:
345
- param1: Description of parameter 1
346
- param2: Description of parameter 2
347
 
348
  Returns:
349
- Description of return value
350
  '''
351
- return f"Result: {param1} - {param2}"
 
 
 
352
 
353
  demo = gr.Interface(
354
- fn=your_function,
355
- inputs=["text", "number"],
356
- outputs="text"
 
 
 
 
357
  )
358
 
359
- demo.launch(mcp_server=True) # This enables MCP server functionality
360
  ```
361
 
362
- ### 4. Environment Variables
363
- - Set `ANTHROPIC_API_KEY` to use Claude for chat functionality
364
- - The app will still work without it but with limited capabilities
 
 
 
 
 
 
 
 
365
 
366
- ### 5. Deployment on Hugging Face Spaces
367
- - Upload your code to a new Space
368
- - Set the environment variables in Space settings
369
- - Your app will be automatically accessible as an MCP server
 
 
 
 
 
 
 
370
  """)
371
 
372
  # Event handlers
373
  def submit_message(message, history):
374
  if message.strip():
375
- # Run async function
376
- loop = asyncio.new_event_loop()
377
- asyncio.set_event_loop(loop)
378
- try:
379
- result = loop.run_until_complete(chat_with_mcp(message, history))
380
- return result, ""
381
- finally:
382
- loop.close()
383
  return history, message
384
 
385
- def add_server_handler(name, url, desc):
386
- loop = asyncio.new_event_loop()
387
- asyncio.set_event_loop(loop)
388
- try:
389
- result = loop.run_until_complete(add_custom_server(name, url, desc))
390
- return result
391
- finally:
392
- loop.close()
393
-
394
- def status_handler():
395
- loop = asyncio.new_event_loop()
396
- asyncio.set_event_loop(loop)
397
- try:
398
- result = loop.run_until_complete(get_server_status())
399
- return result
400
- finally:
401
- loop.close()
402
-
403
  # Connect event handlers
404
  submit_btn.click(
405
  submit_message,
@@ -413,31 +634,32 @@ def create_interface():
413
  outputs=[chatbot, msg]
414
  )
415
 
416
- clear_btn.click(lambda: ([], ""), outputs=[chatbot, msg])
 
 
 
417
 
418
  add_server_btn.click(
419
- add_server_handler,
420
  inputs=[server_name, server_url, server_desc],
421
  outputs=[add_server_output]
422
  )
423
 
424
  status_btn.click(
425
- status_handler,
426
  outputs=[status_output]
427
  )
428
 
429
  return demo
430
 
431
- # Initialize the app
432
  if __name__ == "__main__":
433
- # Initialize default servers
434
- loop = asyncio.new_event_loop()
435
- asyncio.set_event_loop(loop)
436
- try:
437
- loop.run_until_complete(initialize_default_servers())
438
- finally:
439
- loop.close()
440
 
441
  # Create and launch the interface
442
  demo = create_interface()
443
- demo.launch()
 
 
 
 
 
2
  import asyncio
3
  import json
4
  import os
5
+ import base64
6
+ import io
7
+ import re
8
  from typing import List, Dict, Any, Optional
9
  from dataclasses import dataclass
10
  import aiohttp
 
 
11
  import anthropic
12
  from datetime import datetime
13
+ import logging
14
+ from PIL import Image
15
+ import requests
16
+
17
+ # Set up logging
18
+ logging.basicConfig(level=logging.INFO)
19
+ logger = logging.getLogger(__name__)
20
 
21
  # Configuration for MCP servers (Gradio apps acting as MCP servers)
22
  @dataclass
 
25
  url: str
26
  description: str
27
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  class MCPClient:
29
  def __init__(self):
30
  self.servers: Dict[str, MCPServerConfig] = {}
 
31
  self.available_tools: Dict[str, Dict] = {}
32
  self.anthropic_client = None
33
 
 
36
  self.anthropic_client = anthropic.Anthropic(
37
  api_key=os.getenv("ANTHROPIC_API_KEY")
38
  )
39
+ logger.info("✅ Anthropic client initialized")
40
+ else:
41
+ logger.warning("⚠️ ANTHROPIC_API_KEY not found")
42
 
43
+ async def add_server_async(self, config: MCPServerConfig) -> tuple[bool, str]:
44
+ """Add an MCP server to the client (async version)"""
45
  try:
46
  self.servers[config.name] = config
47
+
48
+ # Simple HTTP health check instead of full MCP connection during add
49
+ success, message = await self._test_server_connection(config)
50
+
51
+ if success:
52
+ logger.info(f"✅ Server {config.name} added successfully")
53
+ return True, f"✅ Successfully added server: {config.name}"
54
+ else:
55
+ logger.error(f"❌ Failed to add server {config.name}: {message}")
56
+ return False, f"❌ Failed to add server: {config.name} - {message}"
57
+
58
  except Exception as e:
59
+ error_msg = f"Failed to add server {config.name}: {str(e)}"
60
+ logger.error(error_msg)
61
+ return False, f"❌ {error_msg}"
62
 
63
+ async def _test_server_connection(self, config: MCPServerConfig) -> tuple[bool, str]:
64
+ """Test if server is reachable"""
65
  try:
66
+ # Ensure URL has correct MCP endpoint
67
+ if not config.url.endswith("/gradio_api/mcp/sse"):
68
+ base_url = config.url.rstrip("/")
69
+ config.url = f"{base_url}/gradio_api/mcp/sse"
70
 
71
+ # Simple HTTP check with timeout
72
+ timeout = aiohttp.ClientTimeout(total=10)
73
+ async with aiohttp.ClientSession(timeout=timeout) as session:
74
+ async with session.get(config.url.replace("/gradio_api/mcp/sse", "")) as response:
75
+ if response.status == 200:
76
+ return True, "Server is reachable"
77
+ else:
78
+ return False, f"Server returned status {response.status}"
79
+
80
+ except asyncio.TimeoutError:
81
+ return False, "Connection timeout"
 
 
 
 
 
 
 
 
 
 
82
  except Exception as e:
83
+ return False, f"Connection error: {str(e)}"
 
84
 
85
+ async def call_real_mcp_tool(self, server_name: str, tool_name: str, arguments: dict) -> tuple[bool, str, Optional[str]]:
86
+ """Call real MCP tool on connected server"""
 
 
 
 
 
 
 
 
 
 
 
87
  try:
88
+ if server_name not in self.servers:
89
+ return False, f"Server {server_name} not found", None
90
+
91
+ config = self.servers[server_name]
92
+
93
+ # Make HTTP request to the Gradio app's API
94
+ api_url = config.url.replace("/gradio_api/mcp/sse", "/api/predict")
95
+
96
+ timeout = aiohttp.ClientTimeout(total=30)
97
+ async with aiohttp.ClientSession(timeout=timeout) as session:
98
+ # Prepare the request payload for Gradio API
99
+ payload = {
100
+ "data": list(arguments.values()),
101
+ "fn_index": 0 # Usually the first (and often only) function
102
+ }
103
+
104
+ async with session.post(api_url, json=payload) as response:
105
+ if response.status == 200:
106
+ result = await response.json()
107
+
108
+ # Extract the result data
109
+ if "data" in result and result["data"]:
110
+ output = result["data"][0]
111
+
112
+ # Check if this looks like an image result
113
+ image_data = None
114
+ if isinstance(output, dict):
115
+ # Check for image in various formats
116
+ if "url" in output:
117
+ image_data = output["url"]
118
+ elif "path" in output:
119
+ # Convert local path to accessible URL
120
+ base_url = config.url.replace("/gradio_api/mcp/sse", "")
121
+ image_data = f"{base_url}/file={output['path']}"
122
+ elif "image" in str(output).lower():
123
+ image_data = str(output)
124
+ elif isinstance(output, str):
125
+ if output.startswith("data:image/") or output.startswith("http"):
126
+ image_data = output
127
+ elif "." in output and any(ext in output.lower() for ext in ['.png', '.jpg', '.jpeg', '.gif', '.webp']):
128
+ # Looks like a file path, convert to URL
129
+ base_url = config.url.replace("/gradio_api/mcp/sse", "")
130
+ image_data = f"{base_url}/file={output}"
131
+
132
+ return True, str(output), image_data
133
+ else:
134
+ return False, "No data in response", None
135
+ else:
136
+ return False, f"Server returned status {response.status}", None
137
+
138
  except Exception as e:
139
+ return False, f"Tool execution failed: {str(e)}", None
140
 
141
+ def get_server_status(self) -> Dict[str, str]:
142
  """Get status of all configured servers"""
143
  status = {}
144
  for name, config in self.servers.items():
145
+ status[name] = "✅ Connected" if name in self.servers else "❌ Disconnected"
 
 
 
146
  return status
147
 
148
  # Global MCP client instance
149
  mcp_client = MCPClient()
150
 
151
+ def chat_with_mcp(message: str, history: List[List[str]]) -> List[List[str]]:
152
+ """Main chat function that uses MCP tools (sync wrapper)"""
 
 
 
 
 
153
 
154
  if not mcp_client.anthropic_client:
155
  error_msg = "❌ Anthropic API key not configured. Please set ANTHROPIC_API_KEY environment variable."
 
157
  return history
158
 
159
  try:
160
+ # Get available servers
161
+ if not mcp_client.servers:
162
+ response = "⚠️ No MCP servers connected. Please add servers in the 'Server Management' tab."
 
 
163
  history.append([message, response])
164
  return history
165
 
166
+ # Prepare server information for Claude
167
+ servers_info = []
168
+ for name, config in mcp_client.servers.items():
169
+ servers_info.append({
170
+ "name": name,
171
+ "description": config.description,
172
+ "url": config.url
 
 
 
 
173
  })
174
 
175
  # Create messages for Claude
176
  messages = []
177
 
178
+ # Add conversation history (limit to last 10 exchanges to avoid token limits)
179
+ recent_history = history[-10:] if len(history) > 10 else history
180
+ for user_msg, assistant_msg in recent_history:
181
  messages.append({"role": "user", "content": user_msg})
182
  messages.append({"role": "assistant", "content": assistant_msg})
183
 
 
185
  messages.append({"role": "user", "content": message})
186
 
187
  # System prompt
188
+ system_prompt = f"""You are a helpful AI assistant with access to various tools through MCP (Model Context Protocol) servers.
189
 
190
+ Available MCP servers:
191
+ {json.dumps(servers_info, indent=2)}
192
 
193
+ When users request something that might be accomplished with these tools, I will attempt to call the real MCP servers. If a user asks for image generation, image processing, or any visual task, I will try to use the appropriate server.
194
+
195
+ For image-related requests, I can:
196
+ - Generate images from text descriptions
197
+ - Process existing images
198
+ - Apply filters or transformations
199
+ - Analyze image content
200
 
201
  Current time: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}
202
+
203
+ Be helpful and explain what tools you're using. When calling tools, I'll show you the actual results."""
204
 
205
+ # Call Claude
206
  response = mcp_client.anthropic_client.messages.create(
207
  model="claude-3-5-sonnet-20241022",
208
  max_tokens=1024,
209
  system=system_prompt,
210
+ messages=messages
 
 
 
 
 
211
  )
212
 
213
  assistant_response = ""
 
216
  for content in response.content:
217
  if content.type == "text":
218
  assistant_response += content.text
219
+
220
+ # Try to detect and execute tool calls based on user request
221
+ lower_message = message.lower()
222
+ tool_executed = False
223
+
224
+ # Check for image generation requests
225
+ if any(word in lower_message for word in ["generate", "create", "make"]) and any(word in lower_message for word in ["image", "picture", "photo", "drawing"]):
226
+ for server_name, config in mcp_client.servers.items():
227
+ if any(word in config.description.lower() for word in ["image", "generate", "draw", "create"]):
228
+ assistant_response += f"\n\n🔧 Using {server_name} to generate image..."
229
+
230
+ # Extract prompt from message
231
+ prompt = message
232
+ # Try to clean up the prompt
233
+ for remove_word in ["generate", "create", "make", "an", "image", "of", "picture", "photo"]:
234
+ prompt = re.sub(rf'\b{remove_word}\b', '', prompt, flags=re.IGNORECASE)
235
+ prompt = prompt.strip()
236
 
237
+ # Run async function in thread
238
+ def run_async_tool():
239
+ loop = asyncio.new_event_loop()
240
+ asyncio.set_event_loop(loop)
241
+ try:
242
+ return loop.run_until_complete(
243
+ mcp_client.call_real_mcp_tool(server_name, "generate", {"prompt": prompt})
244
+ )
245
+ finally:
246
+ loop.close()
247
 
248
+ try:
249
+ success, text_result, image_data = run_async_tool()
250
+ if success and image_data:
251
+ assistant_response += f"\n✅ Image generated successfully!"
252
+ assistant_response += f"\n🖼️ Image URL: {image_data}"
253
+ assistant_response += f"\n\n![Generated Image]({image_data})"
254
+ tool_executed = True
255
+ elif success:
256
+ assistant_response += f"\n✅ Tool executed: {text_result}"
257
+ tool_executed = True
258
+ else:
259
+ assistant_response += f"\n❌ Tool failed: {text_result}"
260
+ except Exception as e:
261
+ assistant_response += f"\n❌ Error calling tool: {str(e)}"
262
+ break
263
+
264
+ # Check for other tool requests (letter counting, math, etc.)
265
+ elif not tool_executed:
266
+ if any(word in lower_message for word in ["count", "letter"]):
267
+ for server_name, config in mcp_client.servers.items():
268
+ if "letter" in config.description.lower():
269
+ # Try to extract word and letter from message
270
+ words = message.split()
271
+ if len(words) >= 4:
272
+ for i, word in enumerate(words):
273
+ if word.lower() in ["count", "letter"] and i + 2 < len(words):
274
+ try:
275
+ letter = words[i + 1].strip("'\"")
276
+ target_word = words[i + 3].strip("'\"")
277
+
278
+ assistant_response += f"\n\n🔧 Using {server_name} to count letters..."
279
+
280
+ def run_async_tool():
281
+ loop = asyncio.new_event_loop()
282
+ asyncio.set_event_loop(loop)
283
+ try:
284
+ return loop.run_until_complete(
285
+ mcp_client.call_real_mcp_tool(server_name, "count", {"word": target_word, "letter": letter})
286
+ )
287
+ finally:
288
+ loop.close()
289
+
290
+ success, result, _ = run_async_tool()
291
+ if success:
292
+ assistant_response += f"\n✅ Result: {result}"
293
+ else:
294
+ assistant_response += f"\n❌ Tool failed: {result}"
295
+ tool_executed = True
296
+ break
297
+ except:
298
+ pass
299
+ break
300
+
301
+ elif any(word in lower_message for word in ["calculate", "math", "+", "-", "*", "/"]):
302
+ for server_name, config in mcp_client.servers.items():
303
+ if any(word in config.description.lower() for word in ["math", "calculate", "calculator"]):
304
+ # Try to find and evaluate expressions
305
+ math_expressions = re.findall(r'[\d+\-*/.\s()]+', message)
306
+ for expr in math_expressions:
307
+ if any(op in expr for op in ['+', '-', '*', '/']):
308
+ assistant_response += f"\n\n🔧 Using {server_name} to calculate..."
309
+
310
+ def run_async_tool():
311
+ loop = asyncio.new_event_loop()
312
+ asyncio.set_event_loop(loop)
313
+ try:
314
+ return loop.run_until_complete(
315
+ mcp_client.call_real_mcp_tool(server_name, "calculate", {"expression": expr.strip()})
316
+ )
317
+ finally:
318
+ loop.close()
319
+
320
+ success, result, _ = run_async_tool()
321
+ if success:
322
+ assistant_response += f"\n✅ Result: {result}"
323
+ else:
324
+ assistant_response += f"\n❌ Tool failed: {result}"
325
+ tool_executed = True
326
+ break
327
+ break
328
 
329
  if not assistant_response:
330
+ assistant_response = "I understand your request. Please ensure your MCP servers are properly connected and try again."
331
 
332
  history.append([message, assistant_response])
333
  return history
334
 
335
  except Exception as e:
336
  error_msg = f"❌ Error: {str(e)}"
337
+ logger.error(f"Chat error: {e}")
338
  history.append([message, error_msg])
339
  return history
340
 
341
+ def add_custom_server(name: str, url: str, description: str) -> str:
342
+ """Add a custom MCP server (sync wrapper)"""
343
  if not name or not url:
344
  return "❌ Please provide both name and URL"
345
 
346
+ # Clean up URL
347
+ url = url.strip()
348
+ if not url.startswith("http"):
349
+ return "❌ URL must start with http:// or https://"
350
 
351
+ config = MCPServerConfig(
352
+ name=name.strip(),
353
+ url=url,
354
+ description=description.strip() or "Custom server"
355
+ )
356
 
357
+ try:
358
+ # Run async function in thread to avoid blocking
359
+ def run_async():
360
+ loop = asyncio.new_event_loop()
361
+ asyncio.set_event_loop(loop)
362
+ try:
363
+ return loop.run_until_complete(mcp_client.add_server_async(config))
364
+ finally:
365
+ loop.close()
366
+
367
+ success, message = run_async()
368
+ return message
369
+
370
+ except Exception as e:
371
+ error_msg = f"❌ Failed to add server: {str(e)}"
372
+ logger.error(error_msg)
373
+ return error_msg
374
 
375
+ def get_server_status() -> str:
376
  """Get status of all servers"""
377
+ try:
378
+ status = mcp_client.get_server_status()
379
+ if not status:
380
+ return "No servers configured yet. Add servers using the form above."
381
+
382
+ status_text = "🔧 **MCP Server Status:**\n\n"
383
+
384
+ for name, state in status.items():
385
+ server_config = mcp_client.servers[name]
386
+ status_text += f"**{name}**: {state}\n"
387
+ status_text += f" - URL: {server_config.url}\n"
388
+ status_text += f" - Description: {server_config.description}\n\n"
389
+
390
+ status_text += f"**Total servers**: {len(status)}"
391
+ return status_text
392
+
393
+ except Exception as e:
394
+ return f"❌ Error getting status: {str(e)}"
395
 
396
+ # Create Gradio Interface
397
  def create_interface():
398
+ with gr.Blocks(title="MCP Chatbot Client", theme=gr.themes.Soft()) as demo:
399
  gr.Markdown("""
400
+ # 🤖 MCP Chatbot Client with Image Support
401
 
402
+ This chatbot connects to Gradio apps that function as MCP servers, including **image generation capabilities**!
403
 
404
+ **✨ New Features:**
405
+ - 🖼️ **Image Generation**: Connect to image-generating MCP servers
406
+ - 📱 **File Handling**: Proper support for image outputs from remote servers
407
+ - 🔗 **Real Tool Calls**: Actually calls your MCP servers (not just mock responses)
408
+
409
+ **Status**: Ready for real MCP servers - Add your servers to enable full functionality!
410
  """)
411
 
412
  with gr.Tabs():
413
  with gr.Tab("💬 Chat"):
414
  chatbot = gr.Chatbot(
415
+ label="MCP-Powered Chatbot with Image Support",
416
+ height=500,
417
  show_label=True
418
  )
419
 
420
  msg = gr.Textbox(
421
  label="Your message",
422
+ placeholder="Ask me to generate images, count letters, do math, or use any connected MCP tools...",
423
  lines=2
424
  )
425
 
 
427
  submit_btn = gr.Button("Send", variant="primary")
428
  clear_btn = gr.Button("Clear Chat", variant="secondary")
429
 
430
+ # Example prompts including image generation
431
  gr.Examples(
432
  examples=[
 
 
433
  "Generate an image of a sunset over mountains",
434
+ "Create a picture of a cute robot",
435
+ "Count the letter 'a' in the word 'banana'",
436
+ "Calculate sqrt(144) + 25",
437
+ "Analyze this text: Hello world!",
438
+ "What servers are connected?",
439
  ],
440
  inputs=msg,
441
  label="Try these examples:"
442
  )
443
+
444
+ gr.Markdown("""
445
+ ### 🎨 Image Generation Tips:
446
+ - Make sure you have an image-generating MCP server connected
447
+ - Use phrases like "generate an image of...", "create a picture of...", or "make an image showing..."
448
+ - The bot will automatically detect image requests and use the appropriate server
449
+ """)
450
 
451
  with gr.Tab("🔧 Server Management"):
452
  gr.Markdown("### Add Custom MCP Server")
453
 
454
  with gr.Row():
455
+ server_name = gr.Textbox(
456
+ label="Server Name",
457
+ placeholder="My Image Generator"
458
+ )
459
+ server_url = gr.Textbox(
460
+ label="Server URL",
461
+ placeholder="https://your-space.hf.space"
462
+ )
463
+
464
+ server_desc = gr.Textbox(
465
+ label="Description",
466
+ placeholder="Generates images from text prompts"
467
+ )
468
 
 
469
  add_server_btn = gr.Button("Add Server", variant="primary")
470
  add_server_output = gr.Textbox(label="Result", interactive=False)
471
 
472
+ gr.Markdown("### Pre-configured Example Servers")
473
+ gr.Markdown("""
474
+ Try adding these public MCP servers:
475
+
476
+ **Image Generators:**
477
+ - Name: `FLUX Image Generator`
478
+ - URL: `https://black-forest-labs-flux-1-schnell.hf.space`
479
+ - Description: `Generates images using FLUX.1 model`
480
+
481
+ **Text Tools:**
482
+ - Name: `Gradio MCP Tools`
483
+ - URL: `https://abidlabs-mcp-tools.hf.space`
484
+ - Description: `Official Gradio MCP demo tools`
485
+ """)
486
+
487
  gr.Markdown("### Server Status")
488
  status_btn = gr.Button("Check Status", variant="secondary")
489
  status_output = gr.Markdown()
490
 
491
+ with gr.Tab("ℹ️ Help & Setup"):
492
  gr.Markdown("""
493
+ ## 🚀 Image Generation Setup Guide
494
+
495
+ ### Step 1: Create an Image-Generating MCP Server
496
+
497
+ Create a new Hugging Face Space with this code:
498
+
499
+ ```python
500
+ import gradio as gr
501
+ import requests
502
+ import io
503
+ from PIL import Image
504
+
505
+ def generate_image(prompt: str) -> str:
506
+ '''Generate an image from a text prompt.
507
+
508
+ Args:
509
+ prompt: Text description of the image to generate
510
+
511
+ Returns:
512
+ URL or path to the generated image
513
+ '''
514
+ # This is a simple example - replace with your actual image generation
515
+ # For demo purposes, we'll create a colored rectangle
516
+ img = Image.new('RGB', (512, 512), color='lightblue')
517
+
518
+ # Save the image and return the path
519
+ img_path = "generated_image.png"
520
+ img.save(img_path)
521
+
522
+ return img_path
523
+
524
+ demo = gr.Interface(
525
+ fn=generate_image,
526
+ inputs=gr.Textbox(label="Prompt", placeholder="Describe the image you want"),
527
+ outputs=gr.Image(label="Generated Image"),
528
+ title="Image Generator MCP Server",
529
+ description="Generates images from text prompts"
530
+ )
531
+
532
+ if __name__ == "__main__":
533
+ demo.launch(mcp_server=True) # This enables MCP server functionality
534
+ ```
535
 
536
+ ### Step 2: Connect Real Image Generators
 
 
 
537
 
538
+ For actual image generation, connect to existing Spaces like:
539
+ - **FLUX.1**: `https://black-forest-labs-flux-1-schnell.hf.space`
540
+ - **Stable Diffusion**: Search for SD spaces on Hugging Face
541
+ - **DALL-E style**: Various community implementations
 
542
 
543
+ ### Step 3: Test Image Generation
544
+
545
+ Once connected, try prompts like:
546
+ - "Generate an image of a cat wearing a hat"
547
+ - "Create a landscape painting of mountains"
548
+ - "Make a futuristic city skyline"
549
+
550
+ ### 🔧 Advanced MCP Server Creation
551
+
552
+ For more advanced image generation:
553
 
554
  ```python
555
  import gradio as gr
556
+ from diffusers import StableDiffusionPipeline
557
+ import torch
558
+
559
+ # Load model once at startup
560
+ pipe = StableDiffusionPipeline.from_pretrained(
561
+ "runwayml/stable-diffusion-v1-5",
562
+ torch_dtype=torch.float16
563
+ ).to("cuda")
564
 
565
+ def generate_image(prompt: str, steps: int = 20) -> str:
566
+ '''Generate high-quality images using Stable Diffusion.
567
 
568
  Args:
569
+ prompt: Detailed text description
570
+ steps: Number of inference steps (higher = better quality)
571
 
572
  Returns:
573
+ Path to generated image
574
  '''
575
+ image = pipe(prompt, num_inference_steps=steps).images[0]
576
+ image_path = f"generated_{hash(prompt)}.png"
577
+ image.save(image_path)
578
+ return image_path
579
 
580
  demo = gr.Interface(
581
+ fn=generate_image,
582
+ inputs=[
583
+ gr.Textbox(label="Prompt"),
584
+ gr.Slider(10, 50, 20, label="Steps")
585
+ ],
586
+ outputs=gr.Image(label="Generated Image"),
587
+ title="Advanced Image Generator MCP Server"
588
  )
589
 
590
+ demo.launch(mcp_server=True)
591
  ```
592
 
593
+ ### 🐛 Troubleshooting Image Generation
594
+
595
+ **"Tool failed" errors:**
596
+ 1. Check that the MCP server is running
597
+ 2. Verify the server has `mcp_server=True` in launch()
598
+ 3. Ensure the server's function returns an image file path or URL
599
+
600
+ **Images not displaying:**
601
+ 1. Make sure the server returns accessible file paths
602
+ 2. Check that the Gradio app serves files properly
603
+ 3. Try using direct image URLs instead of local paths
604
 
605
+ **Performance issues:**
606
+ 1. Use GPU-enabled Spaces for faster generation
607
+ 2. Implement caching for repeated requests
608
+ 3. Consider using async functions for long-running tasks
609
+
610
+ ### 📚 Additional Resources
611
+
612
+ - [MCP Documentation](https://modelcontextprotocol.io/)
613
+ - [Gradio MCP Guide](https://www.gradio.app/guides/building-mcp-server-with-gradio)
614
+ - [Hugging Face Spaces](https://huggingface.co/docs/hub/spaces)
615
+ - [Image Generation Models on HF](https://huggingface.co/models?pipeline_tag=text-to-image)
616
  """)
617
 
618
  # Event handlers
619
  def submit_message(message, history):
620
  if message.strip():
621
+ return chat_with_mcp(message, history), ""
 
 
 
 
 
 
 
622
  return history, message
623
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
624
  # Connect event handlers
625
  submit_btn.click(
626
  submit_message,
 
634
  outputs=[chatbot, msg]
635
  )
636
 
637
+ clear_btn.click(
638
+ lambda: ([], ""),
639
+ outputs=[chatbot, msg]
640
+ )
641
 
642
  add_server_btn.click(
643
+ add_custom_server,
644
  inputs=[server_name, server_url, server_desc],
645
  outputs=[add_server_output]
646
  )
647
 
648
  status_btn.click(
649
+ get_server_status,
650
  outputs=[status_output]
651
  )
652
 
653
  return demo
654
 
655
+ # Initialize and launch the app
656
  if __name__ == "__main__":
657
+ logger.info("🚀 Starting MCP Chatbot Client with Image Support...")
 
 
 
 
 
 
658
 
659
  # Create and launch the interface
660
  demo = create_interface()
661
+
662
+ # Launch with proper configuration for HF Spaces
663
+ demo.launch()
664
+
665
+ logger.info("✅ MCP Chatbot Client with Image Support started successfully!")