#!/usr/bin/env python3 """ Universal MCP Toolkit with Tiny-Agents LLM Integration A comprehensive MCP client with embedded lightweight AI for server automation """ import gradio as gr import asyncio import json import logging import os import sys from typing import Dict, List, Any, Optional, Tuple from dataclasses import dataclass, asdict from datetime import datetime import httpx import websockets from pathlib import Path import logging # Настройка детального логирования logging.basicConfig( level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', handlers=[ logging.FileHandler('mcp_debug.log'), logging.StreamHandler() ] ) # Configure logging logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @dataclass class MCPServer: """MCP Server configuration""" name: str endpoint: str protocol: str = "http" # http, websocket, stdio auth_token: Optional[str] = None capabilities: List[str] = None status: str = "disconnected" @dataclass class MCPMessage: """MCP Protocol message structure""" jsonrpc: str = "2.0" id: Optional[str] = None method: Optional[str] = None params: Optional[Dict[str, Any]] = None result: Optional[Any] = None error: Optional[Dict[str, Any]] = None class TinyAgent: """Lightweight AI agent for technical task execution""" def __init__(self): self.system_prompt = """You are a tiny technical agent designed for server automation and MCP operations. You excel at: - Analyzing server responses and data - Generating appropriate MCP commands - Troubleshooting technical issues - Providing concise, actionable recommendations Always respond with practical, executable solutions. Be direct and technical.""" # Simple rule-based responses for common patterns self.patterns = { "list": ["show", "list", "get all", "enumerate"], "execute": ["run", "execute", "start", "launch"], "status": ["status", "health", "check", "info"], "config": ["config", "settings", "configure", "setup"], "debug": ["error", "debug", "troubleshoot", "fix", "issue"], "stop": ["stop", "kill", "terminate", "shutdown"], "restart": ["restart", "reload", "refresh"] } def analyze_command(self, user_input: str) -> Dict[str, Any]: """Analyze user input and suggest MCP actions""" user_lower = user_input.lower() suggestions = { "intent": "unknown", "mcp_method": None, "parameters": {}, "explanation": "" } # Pattern matching for common intents for intent, keywords in self.patterns.items(): if any(keyword in user_lower for keyword in keywords): suggestions["intent"] = intent break # Generate MCP method suggestions based on intent if suggestions["intent"] == "list": suggestions["mcp_method"] = "tools/list" suggestions["explanation"] = "Listing available tools/resources" elif suggestions["intent"] == "execute": suggestions["mcp_method"] = "tools/call" suggestions["explanation"] = "Executing a tool or command" elif suggestions["intent"] == "status": suggestions["mcp_method"] = "server/info" suggestions["explanation"] = "Getting server status information" return suggestions def process_response(self, mcp_response: Any) -> str: """Process MCP response and provide human-readable analysis""" if isinstance(mcp_response, dict): if "error" in mcp_response: return f"❌ Error: {mcp_response['error'].get('message', 'Unknown error')}" elif "result" in mcp_response: result = mcp_response["result"] if isinstance(result, list): return f"✅ Found {len(result)} items:\n" + "\n".join([f"- {item}" for item in result[:5]]) elif isinstance(result, dict): return f"✅ Response received:\n" + json.dumps(result, indent=2)[:500] else: return f"✅ Result: {str(result)[:200]}" return f"📝 Raw response: {str(mcp_response)[:300]}" class MCPClient: """Universal MCP Protocol Client""" def __init__(self): self.servers: Dict[str, MCPServer] = {} self.connections: Dict[str, Any] = {} self.message_id_counter = 0 def add_server(self, server: MCPServer) -> str: """Add a new MCP server configuration""" self.servers[server.name] = server return f"✅ Server '{server.name}' added successfully" def remove_server(self, server_name: str) -> str: """Remove MCP server configuration""" if server_name in self.servers: # Disconnect if connected if server_name in self.connections: asyncio.create_task(self.disconnect_server(server_name)) del self.servers[server_name] return f"✅ Server '{server_name}' removed" return f"❌ Server '{server_name}' not found" def get_next_message_id(self) -> str: """Generate unique message ID""" self.message_id_counter += 1 return str(self.message_id_counter) async def connect_server(self, server_name: str) -> str: """Connect to MCP server""" if server_name not in self.servers: return f"❌ Server '{server_name}' not configured" server = self.servers[server_name] try: if server.protocol == "http": # HTTP connection test async with httpx.AsyncClient() as client: response = await client.get(f"{server.endpoint}/health", timeout=5.0) if response.status_code == 200: self.connections[server_name] = client server.status = "connected" return f"✅ Connected to {server_name} via HTTP" elif server.protocol == "websocket": # WebSocket connection connection = await websockets.connect(server.endpoint) self.connections[server_name] = connection server.status = "connected" return f"✅ Connected to {server_name} via WebSocket" else: return f"❌ Unsupported protocol: {server.protocol}" except Exception as e: server.status = "error" return f"❌ Failed to connect to {server_name}: {str(e)}" async def disconnect_server(self, server_name: str) -> str: """Disconnect from MCP server""" if server_name in self.connections: connection = self.connections[server_name] try: if hasattr(connection, 'close'): await connection.close() del self.connections[server_name] if server_name in self.servers: self.servers[server_name].status = "disconnected" return f"✅ Disconnected from {server_name}" except Exception as e: return f"❌ Error disconnecting from {server_name}: {str(e)}" return f"ℹ️ {server_name} was not connected" async def send_mcp_request(self, server_name: str, method: str, params: Dict[str, Any] = None) -> Any: """Send MCP request to server""" if server_name not in self.connections: raise Exception(f"Not connected to server '{server_name}'") message = MCPMessage( id=self.get_next_message_id(), method=method, params=params or {} ) server = self.servers[server_name] connection = self.connections[server_name] try: if server.protocol == "http": # HTTP request async with httpx.AsyncClient() as client: response = await client.post( f"{server.endpoint}/mcp", json=asdict(message), headers={"Authorization": f"Bearer {server.auth_token}"} if server.auth_token else {}, timeout=10.0 ) return response.json() elif server.protocol == "websocket": # WebSocket request await connection.send(json.dumps(asdict(message))) response = await connection.recv() return json.loads(response) except Exception as e: raise Exception(f"MCP request failed: {str(e)}") def list_servers(self) -> List[Dict[str, Any]]: """Get list of all configured servers""" return [ { "name": name, "endpoint": server.endpoint, "protocol": server.protocol, "status": server.status, "capabilities": server.capabilities or [] } for name, server in self.servers.items() ] # Global instances mcp_client = MCPClient() tiny_agent = TinyAgent() # Gradio Interface Functions def format_server_list() -> str: """Format server list for display""" servers = mcp_client.list_servers() if not servers: return "No servers configured" output = "## 🖥️ MCP Servers\n\n" for server in servers: status_emoji = "🟢" if server["status"] == "connected" else "🔴" if server["status"] == "error" else "⚪" output += f"**{status_emoji} {server['name']}**\n" output += f"- Endpoint: `{server['endpoint']}`\n" output += f"- Protocol: `{server['protocol']}`\n" output += f"- Status: `{server['status']}`\n" if server['capabilities']: output += f"- Capabilities: {', '.join(server['capabilities'])}\n" output += "\n" return output def add_server_config(name: str, endpoint: str, protocol: str, auth_token: str = "") -> Tuple[str, str]: """Add new server configuration""" if not name or not endpoint: return "❌ Name and endpoint are required", format_server_list() server = MCPServer( name=name, endpoint=endpoint, protocol=protocol, auth_token=auth_token if auth_token else None ) result = mcp_client.add_server(server) return result, format_server_list() async def connect_to_server(server_name: str) -> Tuple[str, str]: """Connect to selected server""" if not server_name: return "❌ Please select a server", format_server_list() result = await mcp_client.connect_server(server_name) return result, format_server_list() async def disconnect_from_server(server_name: str) -> Tuple[str, str]: """Disconnect from selected server""" if not server_name: return "❌ Please select a server", format_server_list() result = await mcp_client.disconnect_server(server_name) return result, format_server_list() def remove_server_config(server_name: str) -> Tuple[str, str]: """Remove server configuration""" if not server_name: return "❌ Please select a server", format_server_list() result = mcp_client.remove_server(server_name) return result, format_server_list() async def execute_mcp_command(server_name: str, method: str, params_json: str) -> str: """Execute MCP command on selected server""" if not server_name: return "❌ Please select a server" if not method: return "❌ Please specify an MCP method" # Parse parameters try: params = json.loads(params_json) if params_json.strip() else {} except json.JSONDecodeError as e: return f"❌ Invalid JSON parameters: {str(e)}" try: response = await mcp_client.send_mcp_request(server_name, method, params) formatted_response = tiny_agent.process_response(response) # Also show raw response raw_response = f"\n\n**Raw Response:**\n```json\n{json.dumps(response, indent=2)}\n```" return formatted_response + raw_response except Exception as e: return f"❌ Command execution failed: {str(e)}" def analyze_user_input(user_message: str) -> str: """Analyze user input with tiny agent""" if not user_message.strip(): return "Please enter a command or question." analysis = tiny_agent.analyze_command(user_message) output = f"## 🤖 AI Analysis\n\n" output += f"**Intent Detected:** `{analysis['intent']}`\n" if analysis['mcp_method']: output += f"**Suggested MCP Method:** `{analysis['mcp_method']}`\n" output += f"**Explanation:** {analysis['explanation']}\n" if analysis['intent'] != "unknown": output += f"\n**💡 Suggestion:** Try using the suggested MCP method with an appropriate server." return output def chat_interface(message: str, history: List[List[str]]) -> Tuple[str, List[List[str]]]: """Main chat interface for MCP toolkit""" if not message.strip(): return "", history # Add user message to history history.append([message, None]) # Analyze message with tiny agent analysis = tiny_agent.analyze_command(message) if analysis['intent'] == "unknown": response = "I'm a technical MCP assistant. I can help you with:\n" response += "- Managing MCP server connections\n" response += "- Executing MCP commands\n" response += "- Analyzing server responses\n" response += "- Troubleshooting technical issues\n\n" response += "Try commands like: 'list tools', 'check server status', 'execute command'" else: response = f"Intent: {analysis['intent']}\n" if analysis['mcp_method']: response += f"Suggested MCP method: {analysis['mcp_method']}\n" response += f"Explanation: {analysis['explanation']}\n\n" response += "Use the MCP Commands tab to execute this action on a connected server." # Update history with response history[-1][1] = response return "", history # Build Gradio Interface def create_interface(): """Create the main Gradio interface""" with gr.Blocks( title="Universal MCP Toolkit", theme=gr.themes.Soft(), css=""" .gradio-container { max-width: 1200px !important; } .server-status { font-family: monospace; background: #f5f5f5; padding: 10px; border-radius: 5px; margin: 10px 0; } """ ) as interface: gr.Markdown(""" # 🔧 Universal MCP Toolkit ### Model Context Protocol Client with Tiny-Agents AI Connect to MCP servers, execute commands, and automate your infrastructure with AI assistance. """) with gr.Tabs(): # Chat Interface Tab with gr.Tab("💬 AI Assistant"): gr.Markdown("Chat with the AI assistant for MCP guidance and automation help.") chatbot = gr.Chatbot( height=400, label="MCP Assistant", placeholder="Ask me about MCP operations, server management, or technical automation..." ) chat_input = gr.Textbox( placeholder="Type your message here...", label="Message", lines=2 ) chat_input.submit( chat_interface, inputs=[chat_input, chatbot], outputs=[chat_input, chatbot] ) # Server Management Tab with gr.Tab("🖥️ Server Management"): gr.Markdown("Configure and manage MCP server connections.") with gr.Row(): with gr.Column(scale=1): gr.Markdown("### Add New Server") server_name_input = gr.Textbox( label="Server Name", placeholder="my-server" ) server_endpoint_input = gr.Textbox( label="Endpoint URL", placeholder="http://localhost:8080" ) server_protocol_input = gr.Dropdown( choices=["http", "websocket", "stdio"], value="http", label="Protocol" ) server_auth_input = gr.Textbox( label="Auth Token (optional)", placeholder="Bearer token...", type="password" ) add_server_btn = gr.Button("Add Server", variant="primary") add_server_output = gr.Textbox(label="Result", lines=2) with gr.Column(scale=2): gr.Markdown("### Server Status") server_list_display = gr.Markdown( format_server_list(), elem_classes=["server-status"] ) with gr.Row(): server_selector = gr.Dropdown( choices=[], label="Select Server", interactive=True ) connect_btn = gr.Button("Connect", variant="secondary") disconnect_btn = gr.Button("Disconnect", variant="secondary") remove_btn = gr.Button("Remove", variant="stop") server_action_output = gr.Textbox(label="Action Result", lines=2) # Event handlers add_server_btn.click( add_server_config, inputs=[server_name_input, server_endpoint_input, server_protocol_input, server_auth_input], outputs=[add_server_output, server_list_display] ).then( lambda: [s["name"] for s in mcp_client.list_servers()], outputs=[server_selector] ) connect_btn.click( connect_to_server, inputs=[server_selector], outputs=[server_action_output, server_list_display] ) disconnect_btn.click( disconnect_from_server, inputs=[server_selector], outputs=[server_action_output, server_list_display] ) remove_btn.click( remove_server_config, inputs=[server_selector], outputs=[server_action_output, server_list_display] ).then( lambda: [s["name"] for s in mcp_client.list_servers()], outputs=[server_selector] ) # MCP Commands Tab with gr.Tab("⚡ MCP Commands"): gr.Markdown("Execute MCP protocol commands on connected servers.") with gr.Row(): with gr.Column(): command_server_selector = gr.Dropdown( choices=[], label="Target Server", interactive=True ) mcp_method_input = gr.Textbox( label="MCP Method", placeholder="tools/list", value="tools/list" ) mcp_params_input = gr.Textbox( label="Parameters (JSON)", placeholder='{"arg1": "value1"}', lines=3 ) execute_btn = gr.Button("Execute Command", variant="primary") with gr.Column(): gr.Markdown("### Common MCP Methods:") gr.Markdown(""" - `tools/list` - List available tools - `tools/call` - Execute a tool - `resources/list` - List resources - `resources/read` - Read resource content - `prompts/list` - List available prompts - `server/info` - Get server information """) command_output = gr.Textbox( label="Command Output", lines=15, max_lines=20, interactive=False ) execute_btn.click( execute_mcp_command, inputs=[command_server_selector, mcp_method_input, mcp_params_input], outputs=[command_output] ) # AI Analysis Tab with gr.Tab("🤖 AI Analysis"): gr.Markdown("Get AI-powered analysis and recommendations for your commands.") analysis_input = gr.Textbox( label="Describe what you want to do", placeholder="List all running processes on the server", lines=3 ) analyze_btn = gr.Button("Analyze", variant="primary") analysis_output = gr.Markdown( label="AI Analysis", value="Enter a command or question above for AI analysis." ) analyze_btn.click( analyze_user_input, inputs=[analysis_input], outputs=[analysis_output] ) # Documentation Tab with gr.Tab("📚 Documentation"): gr.Markdown(""" ## Universal MCP Toolkit Documentation ### Overview This toolkit provides a comprehensive interface for interacting with Model Context Protocol (MCP) servers. It includes an AI assistant powered by tiny-agents for automated analysis and command generation. ### Features - **Multi-protocol support**: HTTP, WebSocket, and stdio connections - **AI-powered analysis**: Intelligent command interpretation and suggestions - **Real-time server management**: Connect, disconnect, and monitor MCP servers - **Command execution**: Direct MCP protocol command execution - **Response analysis**: AI-powered interpretation of server responses ### Getting Started 1. **Add Servers**: Configure your MCP servers in the Server Management tab 2. **Connect**: Establish connections to your servers 3. **Execute Commands**: Use the MCP Commands tab to interact with servers 4. **Get AI Help**: Use the AI Assistant for guidance and automation ### MCP Protocol The Model Context Protocol enables secure, standardized communication between AI systems and external resources. Common operations include: - Tool execution - Resource access - Prompt management - Server introspection ### Security - Use authentication tokens for secure connections - Validate all server endpoints before connecting - Monitor connection status and error logs ### Troubleshooting - Check server status in the Server Management tab - Verify endpoint URLs and authentication - Use the AI Assistant for diagnostic help - Check the console for detailed error logs """) # Auto-refresh server list interface.load( lambda: [s["name"] for s in mcp_client.list_servers()], outputs=[server_selector, command_server_selector] ) return interface # Main application if __name__ == "__main__": # Create and launch the interface app = create_interface() # Add some example servers for demonstration example_servers = [ MCPServer( name="local-demo", endpoint="http://localhost:8080", protocol="http", capabilities=["tools", "resources"] ), MCPServer( name="websocket-demo", endpoint="ws://localhost:8081/mcp", protocol="websocket", capabilities=["tools", "prompts"] ) ] for server in example_servers: mcp_client.add_server(server) # Launch the application app.launch( server_name="0.0.0.0", server_port=7860, share=False, inbrowser=True, show_error=True )