kailash 0.6.2__py3-none-any.whl → 0.6.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +3 -3
- kailash/api/custom_nodes_secure.py +3 -3
- kailash/api/gateway.py +1 -1
- kailash/api/studio.py +2 -3
- kailash/api/workflow_api.py +3 -4
- kailash/core/resilience/bulkhead.py +460 -0
- kailash/core/resilience/circuit_breaker.py +92 -10
- kailash/edge/discovery.py +86 -0
- kailash/mcp_server/__init__.py +334 -0
- kailash/mcp_server/advanced_features.py +1022 -0
- kailash/{mcp → mcp_server}/ai_registry_server.py +29 -4
- kailash/mcp_server/auth.py +789 -0
- kailash/mcp_server/client.py +712 -0
- kailash/mcp_server/discovery.py +1593 -0
- kailash/mcp_server/errors.py +673 -0
- kailash/mcp_server/oauth.py +1727 -0
- kailash/mcp_server/protocol.py +1126 -0
- kailash/mcp_server/registry_integration.py +587 -0
- kailash/mcp_server/server.py +1747 -0
- kailash/{mcp → mcp_server}/servers/ai_registry.py +2 -2
- kailash/mcp_server/transports.py +1169 -0
- kailash/mcp_server/utils/cache.py +510 -0
- kailash/middleware/auth/auth_manager.py +3 -3
- kailash/middleware/communication/api_gateway.py +2 -9
- kailash/middleware/communication/realtime.py +1 -1
- kailash/middleware/mcp/client_integration.py +1 -1
- kailash/middleware/mcp/enhanced_server.py +2 -2
- kailash/nodes/__init__.py +2 -0
- kailash/nodes/admin/audit_log.py +6 -6
- kailash/nodes/admin/permission_check.py +8 -8
- kailash/nodes/admin/role_management.py +32 -28
- kailash/nodes/admin/schema.sql +6 -1
- kailash/nodes/admin/schema_manager.py +13 -13
- kailash/nodes/admin/security_event.py +16 -20
- kailash/nodes/admin/tenant_isolation.py +3 -3
- kailash/nodes/admin/transaction_utils.py +3 -3
- kailash/nodes/admin/user_management.py +21 -22
- kailash/nodes/ai/a2a.py +11 -11
- kailash/nodes/ai/ai_providers.py +9 -12
- kailash/nodes/ai/embedding_generator.py +13 -14
- kailash/nodes/ai/intelligent_agent_orchestrator.py +19 -19
- kailash/nodes/ai/iterative_llm_agent.py +3 -3
- kailash/nodes/ai/llm_agent.py +213 -36
- kailash/nodes/ai/self_organizing.py +2 -2
- kailash/nodes/alerts/discord.py +4 -4
- kailash/nodes/api/graphql.py +6 -6
- kailash/nodes/api/http.py +12 -17
- kailash/nodes/api/rate_limiting.py +4 -4
- kailash/nodes/api/rest.py +15 -15
- kailash/nodes/auth/mfa.py +3 -4
- kailash/nodes/auth/risk_assessment.py +2 -2
- kailash/nodes/auth/session_management.py +5 -5
- kailash/nodes/auth/sso.py +143 -0
- kailash/nodes/base.py +6 -2
- kailash/nodes/base_async.py +16 -2
- kailash/nodes/base_with_acl.py +2 -2
- kailash/nodes/cache/__init__.py +9 -0
- kailash/nodes/cache/cache.py +1172 -0
- kailash/nodes/cache/cache_invalidation.py +870 -0
- kailash/nodes/cache/redis_pool_manager.py +595 -0
- kailash/nodes/code/async_python.py +2 -1
- kailash/nodes/code/python.py +196 -35
- kailash/nodes/compliance/data_retention.py +6 -6
- kailash/nodes/compliance/gdpr.py +5 -5
- kailash/nodes/data/__init__.py +10 -0
- kailash/nodes/data/optimistic_locking.py +906 -0
- kailash/nodes/data/readers.py +8 -8
- kailash/nodes/data/redis.py +349 -0
- kailash/nodes/data/sql.py +314 -3
- kailash/nodes/data/streaming.py +21 -0
- kailash/nodes/enterprise/__init__.py +8 -0
- kailash/nodes/enterprise/audit_logger.py +285 -0
- kailash/nodes/enterprise/batch_processor.py +22 -3
- kailash/nodes/enterprise/data_lineage.py +1 -1
- kailash/nodes/enterprise/mcp_executor.py +205 -0
- kailash/nodes/enterprise/service_discovery.py +150 -0
- kailash/nodes/enterprise/tenant_assignment.py +108 -0
- kailash/nodes/logic/async_operations.py +2 -2
- kailash/nodes/logic/convergence.py +1 -1
- kailash/nodes/logic/operations.py +1 -1
- kailash/nodes/monitoring/__init__.py +11 -1
- kailash/nodes/monitoring/health_check.py +456 -0
- kailash/nodes/monitoring/log_processor.py +817 -0
- kailash/nodes/monitoring/metrics_collector.py +627 -0
- kailash/nodes/monitoring/performance_benchmark.py +137 -11
- kailash/nodes/rag/advanced.py +7 -7
- kailash/nodes/rag/agentic.py +49 -2
- kailash/nodes/rag/conversational.py +3 -3
- kailash/nodes/rag/evaluation.py +3 -3
- kailash/nodes/rag/federated.py +3 -3
- kailash/nodes/rag/graph.py +3 -3
- kailash/nodes/rag/multimodal.py +3 -3
- kailash/nodes/rag/optimized.py +5 -5
- kailash/nodes/rag/privacy.py +3 -3
- kailash/nodes/rag/query_processing.py +6 -6
- kailash/nodes/rag/realtime.py +1 -1
- kailash/nodes/rag/registry.py +2 -6
- kailash/nodes/rag/router.py +1 -1
- kailash/nodes/rag/similarity.py +7 -7
- kailash/nodes/rag/strategies.py +4 -4
- kailash/nodes/security/abac_evaluator.py +6 -6
- kailash/nodes/security/behavior_analysis.py +5 -6
- kailash/nodes/security/credential_manager.py +1 -1
- kailash/nodes/security/rotating_credentials.py +11 -11
- kailash/nodes/security/threat_detection.py +8 -8
- kailash/nodes/testing/credential_testing.py +2 -2
- kailash/nodes/transform/processors.py +5 -5
- kailash/runtime/local.py +162 -14
- kailash/runtime/parameter_injection.py +425 -0
- kailash/runtime/parameter_injector.py +657 -0
- kailash/runtime/testing.py +2 -2
- kailash/testing/fixtures.py +2 -2
- kailash/workflow/builder.py +99 -18
- kailash/workflow/builder_improvements.py +207 -0
- kailash/workflow/input_handling.py +170 -0
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/METADATA +21 -8
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/RECORD +126 -101
- kailash/mcp/__init__.py +0 -53
- kailash/mcp/client.py +0 -445
- kailash/mcp/server.py +0 -292
- kailash/mcp/server_enhanced.py +0 -449
- kailash/mcp/utils/cache.py +0 -267
- /kailash/{mcp → mcp_server}/client_new.py +0 -0
- /kailash/{mcp → mcp_server}/utils/__init__.py +0 -0
- /kailash/{mcp → mcp_server}/utils/config.py +0 -0
- /kailash/{mcp → mcp_server}/utils/formatters.py +0 -0
- /kailash/{mcp → mcp_server}/utils/metrics.py +0 -0
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/WHEEL +0 -0
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/entry_points.txt +0 -0
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.6.2.dist-info → kailash-0.6.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1747 @@
|
|
1
|
+
"""
|
2
|
+
Enhanced MCP Server Framework with production-ready capabilities.
|
3
|
+
|
4
|
+
This module provides both basic and enhanced MCP server implementations using
|
5
|
+
the official FastMCP framework from Anthropic. Servers run as long-lived
|
6
|
+
services that expose tools, resources, and prompts to MCP clients.
|
7
|
+
|
8
|
+
Enhanced Features:
|
9
|
+
- Multiple transport support (STDIO, SSE, HTTP)
|
10
|
+
- Authentication and authorization
|
11
|
+
- Rate limiting and circuit breaker patterns
|
12
|
+
- Metrics collection and monitoring
|
13
|
+
- Error handling with structured codes
|
14
|
+
- Service discovery integration
|
15
|
+
- Resource streaming
|
16
|
+
- Connection pooling
|
17
|
+
- Caching with TTL support
|
18
|
+
|
19
|
+
Basic Usage:
|
20
|
+
Abstract base class for custom servers:
|
21
|
+
|
22
|
+
>>> class MyServer(MCPServerBase):
|
23
|
+
... def setup(self):
|
24
|
+
... @self.add_tool()
|
25
|
+
... def calculate(a: int, b: int) -> int:
|
26
|
+
... return a + b
|
27
|
+
>>> server = MyServer("calculator")
|
28
|
+
>>> server.start()
|
29
|
+
|
30
|
+
Production Usage:
|
31
|
+
Main server with all production features:
|
32
|
+
|
33
|
+
>>> from kailash.mcp_server import MCPServer
|
34
|
+
>>> server = MCPServer("my-server", enable_cache=True)
|
35
|
+
>>> @server.tool(cache_key="search", cache_ttl=600)
|
36
|
+
... def search(query: str) -> dict:
|
37
|
+
... return {"results": f"Found data for {query}"}
|
38
|
+
>>> server.run()
|
39
|
+
|
40
|
+
Enhanced Production Usage:
|
41
|
+
Server with authentication and monitoring:
|
42
|
+
|
43
|
+
>>> from kailash.mcp_server.auth import APIKeyAuth
|
44
|
+
>>> auth = APIKeyAuth({"user1": "secret-key"})
|
45
|
+
>>> server = MCPServer(
|
46
|
+
... "my-server",
|
47
|
+
... auth_provider=auth,
|
48
|
+
... enable_metrics=True,
|
49
|
+
... enable_http_transport=True,
|
50
|
+
... rate_limit_config={"requests_per_minute": 100}
|
51
|
+
... )
|
52
|
+
>>> server.run()
|
53
|
+
"""
|
54
|
+
|
55
|
+
import asyncio
|
56
|
+
import functools
|
57
|
+
import logging
|
58
|
+
import time
|
59
|
+
import uuid
|
60
|
+
from abc import ABC, abstractmethod
|
61
|
+
from collections.abc import Callable
|
62
|
+
from pathlib import Path
|
63
|
+
from typing import Any, Dict, List, Optional, TypeVar, Union
|
64
|
+
|
65
|
+
from .auth import AuthManager, AuthProvider, PermissionManager, RateLimiter
|
66
|
+
from .errors import (
|
67
|
+
AuthenticationError,
|
68
|
+
AuthorizationError,
|
69
|
+
ErrorAggregator,
|
70
|
+
MCPError,
|
71
|
+
MCPErrorCode,
|
72
|
+
RateLimitError,
|
73
|
+
ResourceError,
|
74
|
+
RetryableOperation,
|
75
|
+
ToolError,
|
76
|
+
)
|
77
|
+
from .utils import CacheManager, ConfigManager, MetricsCollector, format_response
|
78
|
+
|
79
|
+
logger = logging.getLogger(__name__)
|
80
|
+
|
81
|
+
F = TypeVar("F", bound=Callable[..., Any])
|
82
|
+
|
83
|
+
|
84
|
+
class MCPServerBase(ABC):
|
85
|
+
"""Base class for MCP servers using FastMCP.
|
86
|
+
|
87
|
+
This provides a framework for creating MCP servers that expose
|
88
|
+
tools, resources, and prompts via the Model Context Protocol.
|
89
|
+
|
90
|
+
Examples:
|
91
|
+
Creating a custom server:
|
92
|
+
|
93
|
+
>>> class MyServer(MCPServerBase):
|
94
|
+
... def setup(self):
|
95
|
+
... @self.add_tool()
|
96
|
+
... def search(query: str) -> str:
|
97
|
+
... return f"Results for: {query}"
|
98
|
+
... @self.add_resource("data://example")
|
99
|
+
... def get_example():
|
100
|
+
... return "Example data"
|
101
|
+
>>> server = MyServer("my-server", port=8080)
|
102
|
+
>>> server.start() # Runs until stopped
|
103
|
+
"""
|
104
|
+
|
105
|
+
def __init__(self, name: str, port: int = 8080, host: str = "localhost"):
|
106
|
+
"""Initialize the MCP server.
|
107
|
+
|
108
|
+
Args:
|
109
|
+
name: Name of the server.
|
110
|
+
port: Port to listen on (default: 8080).
|
111
|
+
host: Host to bind to (default: "localhost").
|
112
|
+
"""
|
113
|
+
self.name = name
|
114
|
+
self.port = port
|
115
|
+
self.host = host
|
116
|
+
self._mcp = None
|
117
|
+
self._running = False
|
118
|
+
|
119
|
+
@abstractmethod
|
120
|
+
def setup(self):
|
121
|
+
"""Setup server tools, resources, and prompts.
|
122
|
+
|
123
|
+
This method should be implemented by subclasses to define
|
124
|
+
the server's capabilities using decorators.
|
125
|
+
|
126
|
+
Note:
|
127
|
+
Use @self.add_tool(), @self.add_resource(uri), and
|
128
|
+
@self.add_prompt(name) decorators to register capabilities.
|
129
|
+
"""
|
130
|
+
|
131
|
+
def add_tool(self):
|
132
|
+
"""Decorator to add a tool to the server.
|
133
|
+
|
134
|
+
Returns:
|
135
|
+
Function decorator for registering tools.
|
136
|
+
|
137
|
+
Examples:
|
138
|
+
>>> @server.add_tool()
|
139
|
+
... def calculate(a: int, b: int) -> int:
|
140
|
+
... '''Add two numbers'''
|
141
|
+
... return a + b
|
142
|
+
"""
|
143
|
+
|
144
|
+
def decorator(func: Callable):
|
145
|
+
if self._mcp is None:
|
146
|
+
self._init_mcp()
|
147
|
+
|
148
|
+
# Use FastMCP's tool decorator
|
149
|
+
return self._mcp.tool()(func)
|
150
|
+
|
151
|
+
return decorator
|
152
|
+
|
153
|
+
def add_resource(self, uri: str):
|
154
|
+
"""Decorator to add a resource to the server.
|
155
|
+
|
156
|
+
Args:
|
157
|
+
uri: URI pattern for the resource (supports wildcards).
|
158
|
+
|
159
|
+
Returns:
|
160
|
+
Function decorator for registering resources.
|
161
|
+
|
162
|
+
Examples:
|
163
|
+
>>> @server.add_resource("file:///data/*")
|
164
|
+
... def get_file(path: str) -> str:
|
165
|
+
... return f"Content of {path}"
|
166
|
+
"""
|
167
|
+
|
168
|
+
def decorator(func: Callable):
|
169
|
+
if self._mcp is None:
|
170
|
+
self._init_mcp()
|
171
|
+
|
172
|
+
# Use FastMCP's resource decorator
|
173
|
+
return self._mcp.resource(uri)(func)
|
174
|
+
|
175
|
+
return decorator
|
176
|
+
|
177
|
+
def add_prompt(self, name: str):
|
178
|
+
"""Decorator to add a prompt template to the server.
|
179
|
+
|
180
|
+
Args:
|
181
|
+
name: Name of the prompt.
|
182
|
+
|
183
|
+
Returns:
|
184
|
+
Function decorator for registering prompts.
|
185
|
+
|
186
|
+
Examples:
|
187
|
+
>>> @server.add_prompt("analyze")
|
188
|
+
... def analyze_prompt(data: str) -> str:
|
189
|
+
... return f"Please analyze the following data: {data}"
|
190
|
+
"""
|
191
|
+
|
192
|
+
def decorator(func: Callable):
|
193
|
+
if self._mcp is None:
|
194
|
+
self._init_mcp()
|
195
|
+
|
196
|
+
# Use FastMCP's prompt decorator
|
197
|
+
return self._mcp.prompt(name)(func)
|
198
|
+
|
199
|
+
return decorator
|
200
|
+
|
201
|
+
def _init_mcp(self):
|
202
|
+
"""Initialize the FastMCP instance."""
|
203
|
+
try:
|
204
|
+
# Try independent FastMCP package first (when available)
|
205
|
+
from fastmcp import FastMCP
|
206
|
+
self._mcp = FastMCP(self.name)
|
207
|
+
except ImportError:
|
208
|
+
logger.warning("FastMCP not available, using fallback mode")
|
209
|
+
# Use same fallback as MCPServer
|
210
|
+
self._mcp = self._create_fallback_server()
|
211
|
+
|
212
|
+
def _create_fallback_server(self):
|
213
|
+
"""Create a fallback server when FastMCP is not available."""
|
214
|
+
class FallbackMCPServer:
|
215
|
+
def __init__(self, name: str):
|
216
|
+
self.name = name
|
217
|
+
self._tools = {}
|
218
|
+
self._resources = {}
|
219
|
+
self._prompts = {}
|
220
|
+
|
221
|
+
def tool(self, *args, **kwargs):
|
222
|
+
def decorator(func):
|
223
|
+
self._tools[func.__name__] = func
|
224
|
+
return func
|
225
|
+
return decorator
|
226
|
+
|
227
|
+
def resource(self, uri):
|
228
|
+
def decorator(func):
|
229
|
+
self._resources[uri] = func
|
230
|
+
return func
|
231
|
+
return decorator
|
232
|
+
|
233
|
+
def prompt(self, name):
|
234
|
+
def decorator(func):
|
235
|
+
self._prompts[name] = func
|
236
|
+
return func
|
237
|
+
return decorator
|
238
|
+
|
239
|
+
def run(self, **kwargs):
|
240
|
+
raise NotImplementedError("FastMCP not available")
|
241
|
+
|
242
|
+
return FallbackMCPServer(self.name)
|
243
|
+
|
244
|
+
def start(self):
|
245
|
+
"""Start the MCP server.
|
246
|
+
|
247
|
+
This runs the server as a long-lived process until stopped.
|
248
|
+
|
249
|
+
Raises:
|
250
|
+
ImportError: If FastMCP is not available.
|
251
|
+
Exception: If server fails to start.
|
252
|
+
"""
|
253
|
+
if self._mcp is None:
|
254
|
+
self._init_mcp()
|
255
|
+
|
256
|
+
# Run setup to register tools/resources
|
257
|
+
self.setup()
|
258
|
+
|
259
|
+
logger.info(f"Starting MCP server '{self.name}' on {self.host}:{self.port}")
|
260
|
+
self._running = True
|
261
|
+
|
262
|
+
try:
|
263
|
+
# Run the FastMCP server
|
264
|
+
logger.info("Running FastMCP server in stdio mode")
|
265
|
+
self._mcp.run()
|
266
|
+
except Exception as e:
|
267
|
+
logger.error(f"Failed to start server: {e}")
|
268
|
+
raise
|
269
|
+
finally:
|
270
|
+
self._running = False
|
271
|
+
|
272
|
+
def stop(self):
|
273
|
+
"""Stop the MCP server."""
|
274
|
+
logger.info(f"Stopping MCP server '{self.name}'")
|
275
|
+
self._running = False
|
276
|
+
# In a real implementation, we'd need to handle graceful shutdown
|
277
|
+
|
278
|
+
|
279
|
+
class MCPServer:
|
280
|
+
"""
|
281
|
+
Kailash MCP Server - Node-based Model Context Protocol server.
|
282
|
+
|
283
|
+
This MCP server follows Kailash philosophy by integrating with the node
|
284
|
+
and workflow system. Tools can be implemented as nodes, and complex
|
285
|
+
MCP capabilities can be built using workflows.
|
286
|
+
|
287
|
+
Core Features:
|
288
|
+
- Node-based tool implementation using Kailash nodes
|
289
|
+
- Workflow-based complex operations
|
290
|
+
- Production-ready with authentication, caching, and monitoring
|
291
|
+
- Multiple transport support (STDIO, SSE, HTTP)
|
292
|
+
- Integration with Kailash runtime and infrastructure
|
293
|
+
|
294
|
+
Kailash Philosophy Integration:
|
295
|
+
Using nodes as MCP tools:
|
296
|
+
>>> from kailash.mcp_server import MCPServer
|
297
|
+
>>> from kailash.nodes import PythonCodeNode
|
298
|
+
>>>
|
299
|
+
>>> server = MCPServer("my-server")
|
300
|
+
>>>
|
301
|
+
>>> # Register a node as an MCP tool
|
302
|
+
>>> @server.node_tool(PythonCodeNode)
|
303
|
+
... def calculate(a: int, b: int) -> int:
|
304
|
+
... return a + b
|
305
|
+
>>>
|
306
|
+
>>> server.run()
|
307
|
+
|
308
|
+
Using workflows as MCP tools:
|
309
|
+
>>> from kailash.workflows import WorkflowBuilder
|
310
|
+
>>>
|
311
|
+
>>> # Create workflow for complex MCP operation
|
312
|
+
>>> workflow = WorkflowBuilder()
|
313
|
+
>>> workflow.add_node("csv_reader", "CSVReaderNode", {"file_path": "data.csv"})
|
314
|
+
>>> workflow.add_node("processor", "PythonCodeNode", {"code": "process_data"})
|
315
|
+
>>> workflow.add_connection("csv_reader", "processor", "data", "input_data")
|
316
|
+
>>>
|
317
|
+
>>> server.register_workflow_tool("process_csv", workflow)
|
318
|
+
|
319
|
+
Traditional usage (for compatibility):
|
320
|
+
>>> server = MCPServer("my-server", enable_cache=True, enable_metrics=True)
|
321
|
+
>>> @server.tool(cache_key="search", cache_ttl=600)
|
322
|
+
... def search(query: str) -> dict:
|
323
|
+
... return {"results": f"Found: {query}"}
|
324
|
+
>>> server.run()
|
325
|
+
|
326
|
+
With authentication and advanced features:
|
327
|
+
>>> from kailash.mcp_server.auth import APIKeyAuth
|
328
|
+
>>> auth = APIKeyAuth({"user1": "secret-key"})
|
329
|
+
>>> server = MCPServer(
|
330
|
+
... "my-server",
|
331
|
+
... auth_provider=auth,
|
332
|
+
... enable_http_transport=True,
|
333
|
+
... rate_limit_config={"requests_per_minute": 100},
|
334
|
+
... circuit_breaker_config={"failure_threshold": 5}
|
335
|
+
... )
|
336
|
+
>>> server.run()
|
337
|
+
"""
|
338
|
+
|
339
|
+
def __init__(
|
340
|
+
self,
|
341
|
+
name: str,
|
342
|
+
config_file: Optional[Union[str, Path]] = None,
|
343
|
+
enable_cache: bool = True,
|
344
|
+
cache_ttl: int = 300,
|
345
|
+
cache_backend: str = "memory", # "memory" or "redis"
|
346
|
+
cache_config: Optional[Dict[str, Any]] = None,
|
347
|
+
enable_metrics: bool = True,
|
348
|
+
enable_formatting: bool = True,
|
349
|
+
enable_monitoring: bool = False, # Health checks, alerts, observability
|
350
|
+
# Enhanced features (optional for backward compatibility)
|
351
|
+
auth_provider: Optional[AuthProvider] = None,
|
352
|
+
enable_http_transport: bool = False,
|
353
|
+
enable_sse_transport: bool = False,
|
354
|
+
rate_limit_config: Optional[Dict[str, Any]] = None,
|
355
|
+
circuit_breaker_config: Optional[Dict[str, Any]] = None,
|
356
|
+
enable_discovery: bool = False,
|
357
|
+
connection_pool_config: Optional[Dict[str, Any]] = None,
|
358
|
+
error_aggregation: bool = True,
|
359
|
+
transport_timeout: float = 30.0,
|
360
|
+
max_request_size: int = 10_000_000, # 10MB
|
361
|
+
enable_streaming: bool = False,
|
362
|
+
):
|
363
|
+
"""
|
364
|
+
Initialize enhanced MCP server.
|
365
|
+
|
366
|
+
Args:
|
367
|
+
name: Server name
|
368
|
+
config_file: Optional configuration file path
|
369
|
+
enable_cache: Whether to enable caching (default: True)
|
370
|
+
cache_ttl: Default cache TTL in seconds (default: 300)
|
371
|
+
cache_backend: Cache backend ("memory" or "redis")
|
372
|
+
cache_config: Cache configuration (for Redis: {"redis_url": "redis://...", "prefix": "mcp:"})
|
373
|
+
enable_metrics: Whether to enable metrics collection (default: True)
|
374
|
+
enable_formatting: Whether to enable response formatting (default: True)
|
375
|
+
auth_provider: Optional authentication provider
|
376
|
+
enable_http_transport: Enable HTTP transport support
|
377
|
+
enable_sse_transport: Enable SSE transport support
|
378
|
+
rate_limit_config: Rate limiting configuration
|
379
|
+
circuit_breaker_config: Circuit breaker configuration
|
380
|
+
enable_discovery: Enable service discovery
|
381
|
+
connection_pool_config: Connection pooling configuration
|
382
|
+
error_aggregation: Enable error aggregation
|
383
|
+
transport_timeout: Transport timeout in seconds
|
384
|
+
max_request_size: Maximum request size in bytes
|
385
|
+
enable_streaming: Enable streaming support
|
386
|
+
"""
|
387
|
+
self.name = name
|
388
|
+
|
389
|
+
# Enhanced features
|
390
|
+
self.auth_provider = auth_provider
|
391
|
+
self.enable_http_transport = enable_http_transport
|
392
|
+
self.enable_sse_transport = enable_sse_transport
|
393
|
+
self.enable_discovery = enable_discovery
|
394
|
+
self.enable_streaming = enable_streaming
|
395
|
+
self.enable_monitoring = enable_monitoring
|
396
|
+
self.transport_timeout = transport_timeout
|
397
|
+
self.max_request_size = max_request_size
|
398
|
+
|
399
|
+
# Initialize configuration
|
400
|
+
self.config = ConfigManager(config_file)
|
401
|
+
|
402
|
+
# Set default configuration values including enhanced features
|
403
|
+
self.config.update(
|
404
|
+
{
|
405
|
+
"server": {
|
406
|
+
"name": name,
|
407
|
+
"version": "1.0.0",
|
408
|
+
"transport": "stdio",
|
409
|
+
"enable_http": enable_http_transport,
|
410
|
+
"enable_sse": enable_sse_transport,
|
411
|
+
"timeout": transport_timeout,
|
412
|
+
"max_request_size": max_request_size,
|
413
|
+
"enable_streaming": enable_streaming,
|
414
|
+
},
|
415
|
+
"cache": {
|
416
|
+
"enabled": enable_cache,
|
417
|
+
"default_ttl": cache_ttl,
|
418
|
+
"max_size": 128,
|
419
|
+
"backend": cache_backend,
|
420
|
+
"config": cache_config or {},
|
421
|
+
},
|
422
|
+
"metrics": {
|
423
|
+
"enabled": enable_metrics,
|
424
|
+
"collect_performance": True,
|
425
|
+
"collect_usage": True,
|
426
|
+
},
|
427
|
+
"formatting": {
|
428
|
+
"enabled": enable_formatting,
|
429
|
+
"default_format": "markdown",
|
430
|
+
},
|
431
|
+
"monitoring": {
|
432
|
+
"enabled": enable_monitoring,
|
433
|
+
"health_checks": enable_monitoring,
|
434
|
+
"observability": enable_monitoring,
|
435
|
+
},
|
436
|
+
"auth": {
|
437
|
+
"enabled": auth_provider is not None,
|
438
|
+
"provider_type": (
|
439
|
+
type(auth_provider).__name__ if auth_provider else None
|
440
|
+
),
|
441
|
+
},
|
442
|
+
"rate_limiting": rate_limit_config or {},
|
443
|
+
"circuit_breaker": circuit_breaker_config or {},
|
444
|
+
"discovery": {"enabled": enable_discovery},
|
445
|
+
"connection_pool": connection_pool_config or {},
|
446
|
+
}
|
447
|
+
)
|
448
|
+
|
449
|
+
# Initialize authentication manager
|
450
|
+
if auth_provider:
|
451
|
+
self.auth_manager = AuthManager(
|
452
|
+
provider=auth_provider,
|
453
|
+
permission_manager=PermissionManager(),
|
454
|
+
rate_limiter=RateLimiter(**(rate_limit_config or {})),
|
455
|
+
)
|
456
|
+
else:
|
457
|
+
self.auth_manager = None
|
458
|
+
|
459
|
+
# Initialize components
|
460
|
+
self.cache = CacheManager(
|
461
|
+
enabled=self.config.get("cache.enabled", enable_cache),
|
462
|
+
default_ttl=self.config.get("cache.default_ttl", cache_ttl),
|
463
|
+
backend=self.config.get("cache.backend", cache_backend),
|
464
|
+
config=self.config.get("cache.config", cache_config or {}),
|
465
|
+
)
|
466
|
+
|
467
|
+
self.metrics = MetricsCollector(
|
468
|
+
enabled=self.config.get("metrics.enabled", enable_metrics),
|
469
|
+
collect_performance=self.config.get("metrics.collect_performance", True),
|
470
|
+
collect_usage=self.config.get("metrics.collect_usage", True),
|
471
|
+
)
|
472
|
+
|
473
|
+
# Error aggregation
|
474
|
+
if error_aggregation:
|
475
|
+
self.error_aggregator = ErrorAggregator()
|
476
|
+
else:
|
477
|
+
self.error_aggregator = None
|
478
|
+
|
479
|
+
# Circuit breaker for tool calls
|
480
|
+
if circuit_breaker_config:
|
481
|
+
from .errors import CircuitBreakerRetry
|
482
|
+
|
483
|
+
self.circuit_breaker = CircuitBreakerRetry(**circuit_breaker_config)
|
484
|
+
else:
|
485
|
+
self.circuit_breaker = None
|
486
|
+
|
487
|
+
# FastMCP server instance (initialized lazily)
|
488
|
+
self._mcp = None
|
489
|
+
self._running = False
|
490
|
+
self._active_sessions: Dict[str, Dict[str, Any]] = {}
|
491
|
+
self._connection_pools: Dict[str, List[Any]] = {}
|
492
|
+
|
493
|
+
# Tool registry for management
|
494
|
+
self._tool_registry: Dict[str, Dict[str, Any]] = {}
|
495
|
+
self._resource_registry: Dict[str, Dict[str, Any]] = {}
|
496
|
+
self._prompt_registry: Dict[str, Dict[str, Any]] = {}
|
497
|
+
|
498
|
+
def _init_mcp(self):
|
499
|
+
"""Initialize FastMCP server."""
|
500
|
+
if self._mcp is not None:
|
501
|
+
return
|
502
|
+
|
503
|
+
try:
|
504
|
+
# Try independent FastMCP package first (when available)
|
505
|
+
from fastmcp import FastMCP
|
506
|
+
self._mcp = FastMCP(self.name)
|
507
|
+
logger.info(f"Initialized FastMCP server: {self.name}")
|
508
|
+
except ImportError as e1:
|
509
|
+
logger.warning(f"Independent FastMCP not available: {e1}")
|
510
|
+
try:
|
511
|
+
# Fallback to official MCP FastMCP (when fixed)
|
512
|
+
from mcp.server import FastMCP
|
513
|
+
self._mcp = FastMCP(self.name)
|
514
|
+
logger.info(f"Initialized official FastMCP server: {self.name}")
|
515
|
+
except ImportError as e2:
|
516
|
+
logger.warning(f"Official FastMCP not available: {e2}")
|
517
|
+
# Final fallback: Create a minimal FastMCP-compatible wrapper
|
518
|
+
logger.info(f"Using low-level MCP Server fallback for: {self.name}")
|
519
|
+
self._mcp = self._create_fallback_server()
|
520
|
+
|
521
|
+
def _create_fallback_server(self):
|
522
|
+
"""Create a fallback server when FastMCP is not available."""
|
523
|
+
logger.info("Creating fallback server implementation")
|
524
|
+
|
525
|
+
class FallbackMCPServer:
|
526
|
+
"""Minimal FastMCP-compatible server for when FastMCP is unavailable."""
|
527
|
+
|
528
|
+
def __init__(self, name: str):
|
529
|
+
self.name = name
|
530
|
+
self._tools = {}
|
531
|
+
self._resources = {}
|
532
|
+
self._prompts = {}
|
533
|
+
logger.info(f"Fallback MCP server '{name}' initialized")
|
534
|
+
|
535
|
+
def tool(self, *args, **kwargs):
|
536
|
+
"""Tool decorator that stores tool registration."""
|
537
|
+
def decorator(func):
|
538
|
+
tool_name = func.__name__
|
539
|
+
self._tools[tool_name] = func
|
540
|
+
logger.debug(f"Registered fallback tool: {tool_name}")
|
541
|
+
return func
|
542
|
+
return decorator
|
543
|
+
|
544
|
+
def resource(self, uri):
|
545
|
+
"""Resource decorator that stores resource registration."""
|
546
|
+
def decorator(func):
|
547
|
+
self._resources[uri] = func
|
548
|
+
logger.debug(f"Registered fallback resource: {uri}")
|
549
|
+
return func
|
550
|
+
return decorator
|
551
|
+
|
552
|
+
def prompt(self, name):
|
553
|
+
"""Prompt decorator that stores prompt registration."""
|
554
|
+
def decorator(func):
|
555
|
+
self._prompts[name] = func
|
556
|
+
logger.debug(f"Registered fallback prompt: {name}")
|
557
|
+
return func
|
558
|
+
return decorator
|
559
|
+
|
560
|
+
def run(self, **kwargs):
|
561
|
+
"""Placeholder run method."""
|
562
|
+
logger.warning(f"Fallback server '{self.name}' run() called - FastMCP features limited")
|
563
|
+
logger.info(f"Registered: {len(self._tools)} tools, {len(self._resources)} resources, {len(self._prompts)} prompts")
|
564
|
+
# In a real implementation, we would set up low-level MCP protocol here
|
565
|
+
raise NotImplementedError(
|
566
|
+
"Full MCP protocol not implemented in fallback mode. "
|
567
|
+
"Install 'fastmcp>=2.10.0' or wait for official MCP package fix."
|
568
|
+
)
|
569
|
+
|
570
|
+
return FallbackMCPServer(self.name)
|
571
|
+
|
572
|
+
def tool(
|
573
|
+
self,
|
574
|
+
cache_key: Optional[str] = None,
|
575
|
+
cache_ttl: Optional[int] = None,
|
576
|
+
format_response: Optional[str] = None,
|
577
|
+
# Enhanced features
|
578
|
+
required_permission: Optional[str] = None,
|
579
|
+
required_permissions: Optional[
|
580
|
+
List[str]
|
581
|
+
] = None, # Added for backward compatibility
|
582
|
+
rate_limit: Optional[Dict[str, Any]] = None,
|
583
|
+
enable_circuit_breaker: bool = True,
|
584
|
+
timeout: Optional[float] = None,
|
585
|
+
retryable: bool = True,
|
586
|
+
stream_response: bool = False,
|
587
|
+
):
|
588
|
+
"""
|
589
|
+
Enhanced tool decorator with authentication, caching, metrics, and error handling.
|
590
|
+
|
591
|
+
Args:
|
592
|
+
cache_key: Optional cache key for caching results
|
593
|
+
cache_ttl: Optional TTL override for this tool
|
594
|
+
format_response: Optional response format ("json", "markdown", "table", etc.)
|
595
|
+
required_permission: Single required permission for tool access
|
596
|
+
required_permissions: List of required permissions (alternative to required_permission)
|
597
|
+
rate_limit: Tool-specific rate limiting configuration
|
598
|
+
enable_circuit_breaker: Enable circuit breaker for this tool
|
599
|
+
timeout: Tool execution timeout in seconds
|
600
|
+
retryable: Whether tool failures are retryable
|
601
|
+
stream_response: Enable streaming response for large results
|
602
|
+
|
603
|
+
Returns:
|
604
|
+
Decorated function with enhanced capabilities
|
605
|
+
|
606
|
+
Example:
|
607
|
+
@server.tool(
|
608
|
+
cache_key="weather",
|
609
|
+
cache_ttl=600,
|
610
|
+
format_response="markdown",
|
611
|
+
required_permission="weather.read",
|
612
|
+
rate_limit={"requests_per_minute": 10},
|
613
|
+
timeout=30.0
|
614
|
+
)
|
615
|
+
async def get_weather(city: str) -> dict:
|
616
|
+
# Expensive API call - will be cached for 10 minutes
|
617
|
+
return await fetch_weather_data(city)
|
618
|
+
"""
|
619
|
+
|
620
|
+
def decorator(func: F) -> F:
|
621
|
+
if self._mcp is None:
|
622
|
+
self._init_mcp()
|
623
|
+
|
624
|
+
# Get function name for registration
|
625
|
+
tool_name = func.__name__
|
626
|
+
|
627
|
+
# Normalize permissions - support both singular and plural
|
628
|
+
normalized_permission = None
|
629
|
+
if required_permissions is not None and required_permission is not None:
|
630
|
+
raise ValueError(
|
631
|
+
"Cannot specify both required_permission and required_permissions"
|
632
|
+
)
|
633
|
+
elif required_permissions is not None:
|
634
|
+
if len(required_permissions) == 1:
|
635
|
+
normalized_permission = required_permissions[0]
|
636
|
+
elif len(required_permissions) > 1:
|
637
|
+
# For now, take the first permission. Future enhancement could support multiple.
|
638
|
+
normalized_permission = required_permissions[0]
|
639
|
+
logger.warning(
|
640
|
+
f"Tool {tool_name}: Multiple permissions specified, using first: {normalized_permission}"
|
641
|
+
)
|
642
|
+
elif required_permission is not None:
|
643
|
+
normalized_permission = required_permission
|
644
|
+
|
645
|
+
# Create enhanced wrapper
|
646
|
+
enhanced_func = self._create_enhanced_tool(
|
647
|
+
func,
|
648
|
+
tool_name,
|
649
|
+
cache_key,
|
650
|
+
cache_ttl,
|
651
|
+
format_response,
|
652
|
+
normalized_permission,
|
653
|
+
rate_limit,
|
654
|
+
enable_circuit_breaker,
|
655
|
+
timeout,
|
656
|
+
retryable,
|
657
|
+
stream_response,
|
658
|
+
)
|
659
|
+
|
660
|
+
# Register with FastMCP
|
661
|
+
mcp_tool = self._mcp.tool()(enhanced_func)
|
662
|
+
|
663
|
+
# Track in registry with enhanced metadata
|
664
|
+
self._tool_registry[tool_name] = {
|
665
|
+
"function": mcp_tool,
|
666
|
+
"original_function": func,
|
667
|
+
"cached": cache_key is not None,
|
668
|
+
"cache_key": cache_key,
|
669
|
+
"cache_ttl": cache_ttl,
|
670
|
+
"format_response": format_response,
|
671
|
+
"required_permission": normalized_permission,
|
672
|
+
"rate_limit": rate_limit,
|
673
|
+
"enable_circuit_breaker": enable_circuit_breaker,
|
674
|
+
"timeout": timeout,
|
675
|
+
"retryable": retryable,
|
676
|
+
"stream_response": stream_response,
|
677
|
+
"call_count": 0,
|
678
|
+
"error_count": 0,
|
679
|
+
"last_called": None,
|
680
|
+
}
|
681
|
+
|
682
|
+
logger.debug(
|
683
|
+
f"Registered enhanced tool: {tool_name} "
|
684
|
+
f"(cached: {cache_key is not None}, "
|
685
|
+
f"auth: {required_permission is not None}, "
|
686
|
+
f"rate_limited: {rate_limit is not None})"
|
687
|
+
)
|
688
|
+
return mcp_tool
|
689
|
+
|
690
|
+
return decorator
|
691
|
+
|
692
|
+
def _create_enhanced_tool(
|
693
|
+
self,
|
694
|
+
func: F,
|
695
|
+
tool_name: str,
|
696
|
+
cache_key: Optional[str],
|
697
|
+
cache_ttl: Optional[int],
|
698
|
+
response_format: Optional[str],
|
699
|
+
required_permission: Optional[str],
|
700
|
+
rate_limit: Optional[Dict[str, Any]],
|
701
|
+
enable_circuit_breaker: bool,
|
702
|
+
timeout: Optional[float],
|
703
|
+
retryable: bool,
|
704
|
+
stream_response: bool,
|
705
|
+
) -> F:
|
706
|
+
"""Create enhanced tool function with authentication, caching, metrics, error handling, and more."""
|
707
|
+
|
708
|
+
@functools.wraps(func)
|
709
|
+
def sync_wrapper(*args, **kwargs):
|
710
|
+
# Generate session ID for tracking
|
711
|
+
session_id = str(uuid.uuid4())
|
712
|
+
start_time = time.time() if self.metrics.enabled else None
|
713
|
+
|
714
|
+
try:
|
715
|
+
# Authentication check
|
716
|
+
if self.auth_manager and required_permission:
|
717
|
+
# Extract credentials from kwargs or context
|
718
|
+
credentials = self._extract_credentials_from_context(kwargs)
|
719
|
+
try:
|
720
|
+
user_info = self.auth_manager.authenticate_and_authorize(
|
721
|
+
credentials, required_permission
|
722
|
+
)
|
723
|
+
# Add user info to session
|
724
|
+
self._active_sessions[session_id] = {
|
725
|
+
"user": user_info,
|
726
|
+
"tool": tool_name,
|
727
|
+
"start_time": start_time,
|
728
|
+
"permission": required_permission,
|
729
|
+
}
|
730
|
+
except (AuthenticationError, AuthorizationError) as e:
|
731
|
+
if self.error_aggregator:
|
732
|
+
self.error_aggregator.record_error(e)
|
733
|
+
raise ToolError(
|
734
|
+
f"Access denied for {tool_name}: {str(e)}",
|
735
|
+
tool_name=tool_name,
|
736
|
+
)
|
737
|
+
|
738
|
+
# Rate limiting check
|
739
|
+
if rate_limit and self.auth_manager:
|
740
|
+
user_id = (
|
741
|
+
self._active_sessions.get(session_id, {})
|
742
|
+
.get("user", {})
|
743
|
+
.get("id", "anonymous")
|
744
|
+
)
|
745
|
+
try:
|
746
|
+
self.auth_manager.rate_limiter.check_rate_limit(
|
747
|
+
user_id, tool_name, **rate_limit
|
748
|
+
)
|
749
|
+
except RateLimitError as e:
|
750
|
+
if self.error_aggregator:
|
751
|
+
self.error_aggregator.record_error(e)
|
752
|
+
raise
|
753
|
+
|
754
|
+
# Circuit breaker check
|
755
|
+
if enable_circuit_breaker and self.circuit_breaker:
|
756
|
+
if not self.circuit_breaker.should_retry(
|
757
|
+
MCPError("Circuit breaker check"), 1
|
758
|
+
):
|
759
|
+
error = MCPError(
|
760
|
+
f"Circuit breaker open for {tool_name}",
|
761
|
+
error_code=MCPErrorCode.CIRCUIT_BREAKER_OPEN,
|
762
|
+
retryable=True,
|
763
|
+
)
|
764
|
+
if self.error_aggregator:
|
765
|
+
self.error_aggregator.record_error(error)
|
766
|
+
raise error
|
767
|
+
|
768
|
+
# Try cache first if enabled
|
769
|
+
if cache_key and self.cache.enabled:
|
770
|
+
cache = self.cache.get_cache(cache_key, ttl=cache_ttl)
|
771
|
+
cache_lookup_key = self.cache._create_cache_key(
|
772
|
+
tool_name, args, kwargs
|
773
|
+
)
|
774
|
+
|
775
|
+
# For sync functions with Redis, we need to handle async operations
|
776
|
+
if cache.is_redis:
|
777
|
+
# Try to run async cache operations in sync context
|
778
|
+
try:
|
779
|
+
# Check if we're already in an async context
|
780
|
+
try:
|
781
|
+
asyncio.get_running_loop()
|
782
|
+
# We're in an async context, but this is a sync function
|
783
|
+
# Fall back to memory cache behavior (no caching for now)
|
784
|
+
result = None
|
785
|
+
except RuntimeError:
|
786
|
+
# Not in async context, we can use asyncio.run
|
787
|
+
result = asyncio.run(cache.aget(cache_lookup_key))
|
788
|
+
except Exception as e:
|
789
|
+
logger.debug(f"Redis cache error in sync context: {e}")
|
790
|
+
result = None
|
791
|
+
else:
|
792
|
+
result = cache.get(cache_lookup_key)
|
793
|
+
|
794
|
+
if result is not None:
|
795
|
+
logger.debug(f"Cache hit for {tool_name}")
|
796
|
+
if self.metrics.enabled:
|
797
|
+
latency = time.time() - start_time
|
798
|
+
self.metrics.track_tool_call(tool_name, latency, True)
|
799
|
+
|
800
|
+
# Update registry stats
|
801
|
+
self._tool_registry[tool_name]["call_count"] += 1
|
802
|
+
self._tool_registry[tool_name]["last_called"] = time.time()
|
803
|
+
|
804
|
+
return self._format_response(
|
805
|
+
result, response_format, stream_response
|
806
|
+
)
|
807
|
+
|
808
|
+
# Execute function with timeout
|
809
|
+
if timeout:
|
810
|
+
import signal
|
811
|
+
|
812
|
+
def timeout_handler(signum, frame):
|
813
|
+
raise TimeoutError(
|
814
|
+
f"Tool {tool_name} timed out after {timeout}s"
|
815
|
+
)
|
816
|
+
|
817
|
+
old_handler = signal.signal(signal.SIGALRM, timeout_handler)
|
818
|
+
signal.alarm(int(timeout))
|
819
|
+
|
820
|
+
try:
|
821
|
+
result = func(*args, **kwargs)
|
822
|
+
finally:
|
823
|
+
signal.alarm(0)
|
824
|
+
signal.signal(signal.SIGALRM, old_handler)
|
825
|
+
else:
|
826
|
+
result = func(*args, **kwargs)
|
827
|
+
|
828
|
+
# Cache result if enabled
|
829
|
+
if cache_key and self.cache.enabled:
|
830
|
+
# For sync functions with Redis, handle async operations
|
831
|
+
if cache.is_redis:
|
832
|
+
try:
|
833
|
+
# Check if we're already in an async context
|
834
|
+
try:
|
835
|
+
asyncio.get_running_loop()
|
836
|
+
# We're in an async context, but this is a sync function
|
837
|
+
# Fall back to memory cache behavior (no caching for now)
|
838
|
+
pass
|
839
|
+
except RuntimeError:
|
840
|
+
# Not in async context, we can use asyncio.run
|
841
|
+
asyncio.run(cache.aset(cache_lookup_key, result))
|
842
|
+
except Exception as e:
|
843
|
+
logger.debug(f"Redis cache set error in sync context: {e}")
|
844
|
+
else:
|
845
|
+
cache.set(cache_lookup_key, result)
|
846
|
+
logger.debug(f"Cached result for {tool_name}")
|
847
|
+
|
848
|
+
# Track success metrics
|
849
|
+
if self.metrics.enabled:
|
850
|
+
latency = time.time() - start_time
|
851
|
+
self.metrics.track_tool_call(tool_name, latency, True)
|
852
|
+
|
853
|
+
# Update circuit breaker on success
|
854
|
+
if enable_circuit_breaker and self.circuit_breaker:
|
855
|
+
self.circuit_breaker.on_success()
|
856
|
+
|
857
|
+
# Update registry stats
|
858
|
+
self._tool_registry[tool_name]["call_count"] += 1
|
859
|
+
self._tool_registry[tool_name]["last_called"] = time.time()
|
860
|
+
|
861
|
+
return self._format_response(result, response_format, stream_response)
|
862
|
+
|
863
|
+
except Exception as e:
|
864
|
+
# Convert to MCP error if needed
|
865
|
+
if not isinstance(e, MCPError):
|
866
|
+
mcp_error = ToolError(
|
867
|
+
f"Tool execution failed: {str(e)}",
|
868
|
+
tool_name=tool_name,
|
869
|
+
retryable=retryable,
|
870
|
+
cause=e,
|
871
|
+
)
|
872
|
+
else:
|
873
|
+
mcp_error = e
|
874
|
+
|
875
|
+
# Record error
|
876
|
+
if self.error_aggregator:
|
877
|
+
self.error_aggregator.record_error(mcp_error)
|
878
|
+
|
879
|
+
# Update circuit breaker on failure
|
880
|
+
if enable_circuit_breaker and self.circuit_breaker:
|
881
|
+
self.circuit_breaker.on_failure(mcp_error)
|
882
|
+
|
883
|
+
# Track error metrics
|
884
|
+
if self.metrics.enabled and start_time:
|
885
|
+
latency = time.time() - start_time
|
886
|
+
self.metrics.track_tool_call(
|
887
|
+
tool_name, latency, False, type(e).__name__
|
888
|
+
)
|
889
|
+
|
890
|
+
# Update registry stats
|
891
|
+
self._tool_registry[tool_name]["error_count"] += 1
|
892
|
+
self._tool_registry[tool_name]["last_called"] = time.time()
|
893
|
+
|
894
|
+
logger.error(f"Error in tool {tool_name}: {mcp_error}")
|
895
|
+
raise mcp_error
|
896
|
+
|
897
|
+
finally:
|
898
|
+
# Clean up session
|
899
|
+
if session_id in self._active_sessions:
|
900
|
+
del self._active_sessions[session_id]
|
901
|
+
|
902
|
+
@functools.wraps(func)
|
903
|
+
async def async_wrapper(*args, **kwargs):
|
904
|
+
# Generate session ID for tracking
|
905
|
+
session_id = str(uuid.uuid4())
|
906
|
+
start_time = time.time() if self.metrics.enabled else None
|
907
|
+
|
908
|
+
try:
|
909
|
+
# Authentication check
|
910
|
+
if self.auth_manager and required_permission:
|
911
|
+
# Extract credentials from kwargs or context
|
912
|
+
credentials = self._extract_credentials_from_context(kwargs)
|
913
|
+
|
914
|
+
# Allow bypassing auth for direct calls when no credentials provided
|
915
|
+
# This enables testing and development scenarios
|
916
|
+
if not credentials and not any(
|
917
|
+
k.startswith("mcp_") for k in kwargs.keys()
|
918
|
+
):
|
919
|
+
logger.debug(
|
920
|
+
f"Tool {tool_name}: No credentials provided, allowing direct call (development/testing)"
|
921
|
+
)
|
922
|
+
user_info = None
|
923
|
+
else:
|
924
|
+
try:
|
925
|
+
user_info = self.auth_manager.authenticate_and_authorize(
|
926
|
+
credentials, required_permission
|
927
|
+
)
|
928
|
+
# Add user info to session
|
929
|
+
self._active_sessions[session_id] = {
|
930
|
+
"user": user_info,
|
931
|
+
"tool": tool_name,
|
932
|
+
"start_time": start_time,
|
933
|
+
"permission": required_permission,
|
934
|
+
}
|
935
|
+
except (AuthenticationError, AuthorizationError) as e:
|
936
|
+
if self.error_aggregator:
|
937
|
+
self.error_aggregator.record_error(e)
|
938
|
+
raise ToolError(
|
939
|
+
f"Access denied for {tool_name}: {str(e)}",
|
940
|
+
tool_name=tool_name,
|
941
|
+
)
|
942
|
+
|
943
|
+
# Rate limiting check
|
944
|
+
if rate_limit and self.auth_manager:
|
945
|
+
user_id = (
|
946
|
+
self._active_sessions.get(session_id, {})
|
947
|
+
.get("user", {})
|
948
|
+
.get("id", "anonymous")
|
949
|
+
)
|
950
|
+
try:
|
951
|
+
self.auth_manager.rate_limiter.check_rate_limit(
|
952
|
+
user_id, tool_name, **rate_limit
|
953
|
+
)
|
954
|
+
except RateLimitError as e:
|
955
|
+
if self.error_aggregator:
|
956
|
+
self.error_aggregator.record_error(e)
|
957
|
+
raise
|
958
|
+
|
959
|
+
# Circuit breaker check
|
960
|
+
if enable_circuit_breaker and self.circuit_breaker:
|
961
|
+
if not self.circuit_breaker.should_retry(
|
962
|
+
MCPError("Circuit breaker check"), 1
|
963
|
+
):
|
964
|
+
error = MCPError(
|
965
|
+
f"Circuit breaker open for {tool_name}",
|
966
|
+
error_code=MCPErrorCode.CIRCUIT_BREAKER_OPEN,
|
967
|
+
retryable=True,
|
968
|
+
)
|
969
|
+
if self.error_aggregator:
|
970
|
+
self.error_aggregator.record_error(error)
|
971
|
+
raise error
|
972
|
+
|
973
|
+
# Execute with caching and stampede prevention if enabled
|
974
|
+
if cache_key and self.cache.enabled:
|
975
|
+
cache = self.cache.get_cache(cache_key, ttl=cache_ttl)
|
976
|
+
cache_lookup_key = self.cache._create_cache_key(
|
977
|
+
tool_name, args, kwargs
|
978
|
+
)
|
979
|
+
|
980
|
+
# Define the compute function for cache-or-compute
|
981
|
+
async def compute_result():
|
982
|
+
# Filter out auth credentials from kwargs before calling the function
|
983
|
+
clean_kwargs = {
|
984
|
+
k: v
|
985
|
+
for k, v in kwargs.items()
|
986
|
+
if k
|
987
|
+
not in [
|
988
|
+
"api_key",
|
989
|
+
"token",
|
990
|
+
"username",
|
991
|
+
"password",
|
992
|
+
"jwt",
|
993
|
+
"authorization",
|
994
|
+
"mcp_auth",
|
995
|
+
]
|
996
|
+
}
|
997
|
+
|
998
|
+
# Execute function with timeout
|
999
|
+
if timeout:
|
1000
|
+
return await asyncio.wait_for(
|
1001
|
+
func(*args, **clean_kwargs), timeout=timeout
|
1002
|
+
)
|
1003
|
+
else:
|
1004
|
+
return await func(*args, **clean_kwargs)
|
1005
|
+
|
1006
|
+
# Use cache-or-compute with stampede prevention
|
1007
|
+
result = await cache.get_or_compute(
|
1008
|
+
cache_lookup_key, compute_result, cache_ttl
|
1009
|
+
)
|
1010
|
+
logger.debug(f"Got result for {tool_name} (cached or computed)")
|
1011
|
+
else:
|
1012
|
+
# No caching - execute directly
|
1013
|
+
# Filter out auth credentials from kwargs before calling the function
|
1014
|
+
clean_kwargs = {
|
1015
|
+
k: v
|
1016
|
+
for k, v in kwargs.items()
|
1017
|
+
if k
|
1018
|
+
not in [
|
1019
|
+
"api_key",
|
1020
|
+
"token",
|
1021
|
+
"username",
|
1022
|
+
"password",
|
1023
|
+
"jwt",
|
1024
|
+
"authorization",
|
1025
|
+
"mcp_auth",
|
1026
|
+
]
|
1027
|
+
}
|
1028
|
+
|
1029
|
+
# Execute function with timeout
|
1030
|
+
if timeout:
|
1031
|
+
result = await asyncio.wait_for(
|
1032
|
+
func(*args, **clean_kwargs), timeout=timeout
|
1033
|
+
)
|
1034
|
+
else:
|
1035
|
+
result = await func(*args, **clean_kwargs)
|
1036
|
+
|
1037
|
+
# Track success metrics
|
1038
|
+
if self.metrics.enabled:
|
1039
|
+
latency = time.time() - start_time
|
1040
|
+
self.metrics.track_tool_call(tool_name, latency, True)
|
1041
|
+
|
1042
|
+
# Update circuit breaker on success
|
1043
|
+
if enable_circuit_breaker and self.circuit_breaker:
|
1044
|
+
self.circuit_breaker.on_success()
|
1045
|
+
|
1046
|
+
# Update registry stats
|
1047
|
+
self._tool_registry[tool_name]["call_count"] += 1
|
1048
|
+
self._tool_registry[tool_name]["last_called"] = time.time()
|
1049
|
+
|
1050
|
+
return self._format_response(result, response_format, stream_response)
|
1051
|
+
|
1052
|
+
except Exception as e:
|
1053
|
+
# Convert to MCP error if needed
|
1054
|
+
if not isinstance(e, MCPError):
|
1055
|
+
mcp_error = ToolError(
|
1056
|
+
f"Tool execution failed: {str(e)}",
|
1057
|
+
tool_name=tool_name,
|
1058
|
+
retryable=retryable,
|
1059
|
+
cause=e,
|
1060
|
+
)
|
1061
|
+
else:
|
1062
|
+
mcp_error = e
|
1063
|
+
|
1064
|
+
# Record error
|
1065
|
+
if self.error_aggregator:
|
1066
|
+
self.error_aggregator.record_error(mcp_error)
|
1067
|
+
|
1068
|
+
# Update circuit breaker on failure
|
1069
|
+
if enable_circuit_breaker and self.circuit_breaker:
|
1070
|
+
self.circuit_breaker.on_failure(mcp_error)
|
1071
|
+
|
1072
|
+
# Track error metrics
|
1073
|
+
if self.metrics.enabled and start_time:
|
1074
|
+
latency = time.time() - start_time
|
1075
|
+
self.metrics.track_tool_call(
|
1076
|
+
tool_name, latency, False, type(e).__name__
|
1077
|
+
)
|
1078
|
+
|
1079
|
+
# Update registry stats
|
1080
|
+
self._tool_registry[tool_name]["error_count"] += 1
|
1081
|
+
self._tool_registry[tool_name]["last_called"] = time.time()
|
1082
|
+
|
1083
|
+
logger.error(f"Error in tool {tool_name}: {mcp_error}")
|
1084
|
+
raise mcp_error
|
1085
|
+
|
1086
|
+
finally:
|
1087
|
+
# Clean up session
|
1088
|
+
if session_id in self._active_sessions:
|
1089
|
+
del self._active_sessions[session_id]
|
1090
|
+
|
1091
|
+
# Return appropriate wrapper based on function type
|
1092
|
+
if asyncio.iscoroutinefunction(func):
|
1093
|
+
return async_wrapper
|
1094
|
+
else:
|
1095
|
+
return sync_wrapper
|
1096
|
+
|
1097
|
+
def _format_response(
|
1098
|
+
self, result: Any, response_format: Optional[str], stream_response: bool = False
|
1099
|
+
) -> Any:
|
1100
|
+
"""Format response if formatting is enabled, with optional streaming support."""
|
1101
|
+
if not self.config.get("formatting.enabled", True) or not response_format:
|
1102
|
+
if (
|
1103
|
+
stream_response
|
1104
|
+
and isinstance(result, (list, dict))
|
1105
|
+
and len(str(result)) > 1000
|
1106
|
+
):
|
1107
|
+
# For large results, consider streaming (simplified implementation)
|
1108
|
+
return {
|
1109
|
+
"streaming": True,
|
1110
|
+
"data": result,
|
1111
|
+
"chunks": self._chunk_large_response(result),
|
1112
|
+
}
|
1113
|
+
return result
|
1114
|
+
|
1115
|
+
try:
|
1116
|
+
formatted = format_response(result, response_format)
|
1117
|
+
if stream_response and isinstance(formatted, str) and len(formatted) > 1000:
|
1118
|
+
return {
|
1119
|
+
"streaming": True,
|
1120
|
+
"data": formatted,
|
1121
|
+
"chunks": self._chunk_large_response(formatted),
|
1122
|
+
}
|
1123
|
+
return formatted
|
1124
|
+
except Exception as e:
|
1125
|
+
logger.warning(f"Failed to format response: {e}")
|
1126
|
+
return result
|
1127
|
+
|
1128
|
+
def _chunk_large_response(self, data: Any, chunk_size: int = 1000) -> List[str]:
|
1129
|
+
"""Chunk large responses for streaming."""
|
1130
|
+
if isinstance(data, str):
|
1131
|
+
return [data[i : i + chunk_size] for i in range(0, len(data), chunk_size)]
|
1132
|
+
elif isinstance(data, (list, dict)):
|
1133
|
+
data_str = str(data)
|
1134
|
+
return [
|
1135
|
+
data_str[i : i + chunk_size]
|
1136
|
+
for i in range(0, len(data_str), chunk_size)
|
1137
|
+
]
|
1138
|
+
else:
|
1139
|
+
return [str(data)]
|
1140
|
+
|
1141
|
+
def _extract_credentials_from_context(
|
1142
|
+
self, kwargs: Dict[str, Any]
|
1143
|
+
) -> Dict[str, Any]:
|
1144
|
+
"""Extract credentials from function context or kwargs."""
|
1145
|
+
# Look for common credential patterns in kwargs
|
1146
|
+
credentials = {}
|
1147
|
+
|
1148
|
+
# Check for MCP-style authentication headers
|
1149
|
+
if "mcp_auth" in kwargs:
|
1150
|
+
credentials.update(kwargs["mcp_auth"])
|
1151
|
+
|
1152
|
+
# Check for common auth patterns
|
1153
|
+
auth_fields = ["api_key", "token", "username", "password", "jwt"]
|
1154
|
+
for field in auth_fields:
|
1155
|
+
if field in kwargs:
|
1156
|
+
credentials[field] = kwargs[field]
|
1157
|
+
|
1158
|
+
# Check for Authorization header pattern
|
1159
|
+
if "authorization" in kwargs:
|
1160
|
+
auth_header = kwargs["authorization"]
|
1161
|
+
if auth_header.startswith("Bearer "):
|
1162
|
+
credentials["token"] = auth_header[7:]
|
1163
|
+
elif auth_header.startswith("Basic "):
|
1164
|
+
import base64
|
1165
|
+
|
1166
|
+
try:
|
1167
|
+
decoded = base64.b64decode(auth_header[6:]).decode()
|
1168
|
+
if ":" in decoded:
|
1169
|
+
username, password = decoded.split(":", 1)
|
1170
|
+
credentials["username"] = username
|
1171
|
+
credentials["password"] = password
|
1172
|
+
except Exception:
|
1173
|
+
pass
|
1174
|
+
|
1175
|
+
return credentials
|
1176
|
+
|
1177
|
+
def resource(self, uri: str):
|
1178
|
+
"""
|
1179
|
+
Add resource with metrics tracking.
|
1180
|
+
|
1181
|
+
Args:
|
1182
|
+
uri: Resource URI pattern
|
1183
|
+
|
1184
|
+
Returns:
|
1185
|
+
Decorated function
|
1186
|
+
"""
|
1187
|
+
|
1188
|
+
def decorator(func: F) -> F:
|
1189
|
+
if self._mcp is None:
|
1190
|
+
self._init_mcp()
|
1191
|
+
|
1192
|
+
# Wrap with metrics if enabled
|
1193
|
+
if self.metrics.enabled:
|
1194
|
+
func = self.metrics.track_tool(f"resource:{uri}")(func)
|
1195
|
+
|
1196
|
+
return self._mcp.resource(uri)(func)
|
1197
|
+
|
1198
|
+
return decorator
|
1199
|
+
|
1200
|
+
def prompt(self, name: str):
|
1201
|
+
"""
|
1202
|
+
Add prompt with metrics tracking.
|
1203
|
+
|
1204
|
+
Args:
|
1205
|
+
name: Prompt name
|
1206
|
+
|
1207
|
+
Returns:
|
1208
|
+
Decorated function
|
1209
|
+
"""
|
1210
|
+
|
1211
|
+
def decorator(func: F) -> F:
|
1212
|
+
if self._mcp is None:
|
1213
|
+
self._init_mcp()
|
1214
|
+
|
1215
|
+
# Wrap with metrics if enabled
|
1216
|
+
if self.metrics.enabled:
|
1217
|
+
func = self.metrics.track_tool(f"prompt:{name}")(func)
|
1218
|
+
|
1219
|
+
return self._mcp.prompt(name)(func)
|
1220
|
+
|
1221
|
+
return decorator
|
1222
|
+
|
1223
|
+
def get_tool_stats(self) -> Dict[str, Any]:
|
1224
|
+
"""Get statistics for all registered tools."""
|
1225
|
+
stats = {
|
1226
|
+
"registered_tools": len(self._tool_registry),
|
1227
|
+
"cached_tools": sum(1 for t in self._tool_registry.values() if t["cached"]),
|
1228
|
+
"tools": {},
|
1229
|
+
}
|
1230
|
+
|
1231
|
+
for tool_name, tool_info in self._tool_registry.items():
|
1232
|
+
stats["tools"][tool_name] = {
|
1233
|
+
"cached": tool_info["cached"],
|
1234
|
+
"cache_key": tool_info.get("cache_key"),
|
1235
|
+
"format_response": tool_info.get("format_response"),
|
1236
|
+
}
|
1237
|
+
|
1238
|
+
return stats
|
1239
|
+
|
1240
|
+
def get_server_stats(self) -> Dict[str, Any]:
|
1241
|
+
"""Get comprehensive server statistics."""
|
1242
|
+
stats = {
|
1243
|
+
"server": {
|
1244
|
+
"name": self.name,
|
1245
|
+
"running": self._running,
|
1246
|
+
"config": self.config.to_dict(),
|
1247
|
+
"active_sessions": len(self._active_sessions),
|
1248
|
+
"transport": {
|
1249
|
+
"http_enabled": self.enable_http_transport,
|
1250
|
+
"sse_enabled": self.enable_sse_transport,
|
1251
|
+
"streaming_enabled": self.enable_streaming,
|
1252
|
+
"timeout": self.transport_timeout,
|
1253
|
+
"max_request_size": self.max_request_size,
|
1254
|
+
},
|
1255
|
+
"features": {
|
1256
|
+
"auth_enabled": self.auth_manager is not None,
|
1257
|
+
"circuit_breaker_enabled": self.circuit_breaker is not None,
|
1258
|
+
"error_aggregation_enabled": self.error_aggregator is not None,
|
1259
|
+
"discovery_enabled": self.enable_discovery,
|
1260
|
+
},
|
1261
|
+
},
|
1262
|
+
"tools": self.get_tool_stats(),
|
1263
|
+
"resources": self.get_resource_stats(),
|
1264
|
+
"prompts": self.get_prompt_stats(),
|
1265
|
+
}
|
1266
|
+
|
1267
|
+
if self.metrics.enabled:
|
1268
|
+
stats["metrics"] = self.metrics.export_metrics()
|
1269
|
+
|
1270
|
+
if self.cache.enabled:
|
1271
|
+
stats["cache"] = self.cache.stats()
|
1272
|
+
|
1273
|
+
if self.error_aggregator:
|
1274
|
+
stats["errors"] = self.error_aggregator.get_error_stats(
|
1275
|
+
time_window=3600
|
1276
|
+
) # Last hour
|
1277
|
+
|
1278
|
+
if self.circuit_breaker:
|
1279
|
+
stats["circuit_breaker"] = {
|
1280
|
+
"state": self.circuit_breaker.state,
|
1281
|
+
"failure_count": self.circuit_breaker.failure_count,
|
1282
|
+
"success_count": self.circuit_breaker.success_count,
|
1283
|
+
}
|
1284
|
+
|
1285
|
+
return stats
|
1286
|
+
|
1287
|
+
def get_resource_stats(self) -> Dict[str, Any]:
|
1288
|
+
"""Get resource statistics."""
|
1289
|
+
return {
|
1290
|
+
"registered_resources": len(self._resource_registry),
|
1291
|
+
"resources": {
|
1292
|
+
uri: {
|
1293
|
+
"call_count": info.get("call_count", 0),
|
1294
|
+
"error_count": info.get("error_count", 0),
|
1295
|
+
"last_accessed": info.get("last_accessed"),
|
1296
|
+
}
|
1297
|
+
for uri, info in self._resource_registry.items()
|
1298
|
+
},
|
1299
|
+
}
|
1300
|
+
|
1301
|
+
def get_prompt_stats(self) -> Dict[str, Any]:
|
1302
|
+
"""Get prompt statistics."""
|
1303
|
+
return {
|
1304
|
+
"registered_prompts": len(self._prompt_registry),
|
1305
|
+
"prompts": {
|
1306
|
+
name: {
|
1307
|
+
"call_count": info.get("call_count", 0),
|
1308
|
+
"error_count": info.get("error_count", 0),
|
1309
|
+
"last_used": info.get("last_used"),
|
1310
|
+
}
|
1311
|
+
for name, info in self._prompt_registry.items()
|
1312
|
+
},
|
1313
|
+
}
|
1314
|
+
|
1315
|
+
def get_active_sessions(self) -> Dict[str, Dict[str, Any]]:
|
1316
|
+
"""Get information about active sessions."""
|
1317
|
+
return {
|
1318
|
+
session_id: {
|
1319
|
+
"user": session_info.get("user", {}),
|
1320
|
+
"tool": session_info.get("tool"),
|
1321
|
+
"permission": session_info.get("permission"),
|
1322
|
+
"duration": time.time() - session_info.get("start_time", time.time()),
|
1323
|
+
}
|
1324
|
+
for session_id, session_info in self._active_sessions.items()
|
1325
|
+
}
|
1326
|
+
|
1327
|
+
def get_error_trends(
|
1328
|
+
self, time_window: Optional[float] = None
|
1329
|
+
) -> List[Dict[str, Any]]:
|
1330
|
+
"""Get error trends over time."""
|
1331
|
+
if not self.error_aggregator:
|
1332
|
+
return []
|
1333
|
+
return self.error_aggregator.get_error_trends()
|
1334
|
+
|
1335
|
+
def health_check(self) -> Dict[str, Any]:
|
1336
|
+
"""Perform comprehensive health check."""
|
1337
|
+
health_status = {
|
1338
|
+
"status": "healthy",
|
1339
|
+
"timestamp": time.time(),
|
1340
|
+
"server": {
|
1341
|
+
"name": self.name,
|
1342
|
+
"running": self._running,
|
1343
|
+
"uptime": time.time()
|
1344
|
+
- self.config.get("server.start_time", time.time()),
|
1345
|
+
},
|
1346
|
+
"components": {
|
1347
|
+
"mcp": self._mcp is not None,
|
1348
|
+
"cache": self.cache.enabled if self.cache else False,
|
1349
|
+
"metrics": self.metrics.enabled if self.metrics else False,
|
1350
|
+
"auth": self.auth_manager is not None,
|
1351
|
+
"circuit_breaker": self.circuit_breaker is not None,
|
1352
|
+
},
|
1353
|
+
"resources": {
|
1354
|
+
"active_sessions": len(self._active_sessions),
|
1355
|
+
"tools_registered": len(self._tool_registry),
|
1356
|
+
"resources_registered": len(self._resource_registry),
|
1357
|
+
"prompts_registered": len(self._prompt_registry),
|
1358
|
+
},
|
1359
|
+
}
|
1360
|
+
|
1361
|
+
# Check for issues
|
1362
|
+
issues = []
|
1363
|
+
|
1364
|
+
# Check error rates
|
1365
|
+
if self.error_aggregator:
|
1366
|
+
error_stats = self.error_aggregator.get_error_stats(
|
1367
|
+
time_window=300
|
1368
|
+
) # Last 5 minutes
|
1369
|
+
if error_stats.get("error_rate", 0) > 10: # More than 10 errors per second
|
1370
|
+
issues.append("High error rate detected")
|
1371
|
+
health_status["status"] = "degraded"
|
1372
|
+
|
1373
|
+
# Check circuit breaker state
|
1374
|
+
if self.circuit_breaker and self.circuit_breaker.state == "open":
|
1375
|
+
issues.append("Circuit breaker is open")
|
1376
|
+
health_status["status"] = "degraded"
|
1377
|
+
|
1378
|
+
# Check memory usage for caches
|
1379
|
+
if self.cache and self.cache.enabled:
|
1380
|
+
cache_stats = self.cache.stats()
|
1381
|
+
# Simple heuristic - if any cache is over 90% full
|
1382
|
+
for cache_name, stats in cache_stats.items():
|
1383
|
+
if isinstance(stats, dict) and stats.get("utilization", 0) > 0.9:
|
1384
|
+
issues.append(f"Cache {cache_name} is over 90% full")
|
1385
|
+
health_status["status"] = "degraded"
|
1386
|
+
|
1387
|
+
health_status["issues"] = issues
|
1388
|
+
|
1389
|
+
if issues and health_status["status"] == "healthy":
|
1390
|
+
health_status["status"] = "degraded"
|
1391
|
+
|
1392
|
+
return health_status
|
1393
|
+
|
1394
|
+
def clear_cache(self, cache_name: Optional[str] = None) -> None:
|
1395
|
+
"""Clear cache(s)."""
|
1396
|
+
if cache_name:
|
1397
|
+
cache = self.cache.get_cache(cache_name)
|
1398
|
+
cache.clear()
|
1399
|
+
logger.info(f"Cleared cache: {cache_name}")
|
1400
|
+
else:
|
1401
|
+
self.cache.clear_all()
|
1402
|
+
logger.info("Cleared all caches")
|
1403
|
+
|
1404
|
+
def reset_circuit_breaker(self) -> None:
|
1405
|
+
"""Reset circuit breaker to closed state."""
|
1406
|
+
if self.circuit_breaker:
|
1407
|
+
self.circuit_breaker.state = "closed"
|
1408
|
+
self.circuit_breaker.failure_count = 0
|
1409
|
+
self.circuit_breaker.success_count = 0
|
1410
|
+
logger.info("Circuit breaker reset to closed state")
|
1411
|
+
|
1412
|
+
def terminate_session(self, session_id: str) -> bool:
|
1413
|
+
"""Terminate an active session."""
|
1414
|
+
if session_id in self._active_sessions:
|
1415
|
+
del self._active_sessions[session_id]
|
1416
|
+
logger.info(f"Terminated session: {session_id}")
|
1417
|
+
return True
|
1418
|
+
return False
|
1419
|
+
|
1420
|
+
def get_tool_by_name(self, tool_name: str) -> Optional[Dict[str, Any]]:
|
1421
|
+
"""Get tool information by name."""
|
1422
|
+
return self._tool_registry.get(tool_name)
|
1423
|
+
|
1424
|
+
def disable_tool(self, tool_name: str) -> bool:
|
1425
|
+
"""Temporarily disable a tool."""
|
1426
|
+
if tool_name in self._tool_registry:
|
1427
|
+
self._tool_registry[tool_name]["disabled"] = True
|
1428
|
+
logger.info(f"Disabled tool: {tool_name}")
|
1429
|
+
return True
|
1430
|
+
return False
|
1431
|
+
|
1432
|
+
def enable_tool(self, tool_name: str) -> bool:
|
1433
|
+
"""Re-enable a disabled tool."""
|
1434
|
+
if tool_name in self._tool_registry:
|
1435
|
+
self._tool_registry[tool_name]["disabled"] = False
|
1436
|
+
logger.info(f"Enabled tool: {tool_name}")
|
1437
|
+
return True
|
1438
|
+
return False
|
1439
|
+
|
1440
|
+
def run(self):
|
1441
|
+
"""Run the enhanced MCP server with all features."""
|
1442
|
+
if self._mcp is None:
|
1443
|
+
self._init_mcp()
|
1444
|
+
|
1445
|
+
# Record server start time
|
1446
|
+
self.config.update({"server.start_time": time.time()})
|
1447
|
+
|
1448
|
+
# Log enhanced server startup
|
1449
|
+
logger.info(f"Starting enhanced MCP server: {self.name}")
|
1450
|
+
logger.info("Features enabled:")
|
1451
|
+
logger.info(f" - Cache: {self.cache.enabled if self.cache else False}")
|
1452
|
+
logger.info(f" - Metrics: {self.metrics.enabled if self.metrics else False}")
|
1453
|
+
logger.info(f" - Authentication: {self.auth_manager is not None}")
|
1454
|
+
logger.info(f" - HTTP Transport: {self.enable_http_transport}")
|
1455
|
+
logger.info(f" - SSE Transport: {self.enable_sse_transport}")
|
1456
|
+
logger.info(f" - Streaming: {self.enable_streaming}")
|
1457
|
+
logger.info(f" - Circuit Breaker: {self.circuit_breaker is not None}")
|
1458
|
+
logger.info(f" - Error Aggregation: {self.error_aggregator is not None}")
|
1459
|
+
logger.info(f" - Service Discovery: {self.enable_discovery}")
|
1460
|
+
|
1461
|
+
logger.info("Server configuration:")
|
1462
|
+
logger.info(f" - Tools registered: {len(self._tool_registry)}")
|
1463
|
+
logger.info(f" - Resources registered: {len(self._resource_registry)}")
|
1464
|
+
logger.info(f" - Prompts registered: {len(self._prompt_registry)}")
|
1465
|
+
logger.info(f" - Transport timeout: {self.transport_timeout}s")
|
1466
|
+
logger.info(f" - Max request size: {self.max_request_size} bytes")
|
1467
|
+
|
1468
|
+
self._running = True
|
1469
|
+
|
1470
|
+
try:
|
1471
|
+
# Perform health check before starting
|
1472
|
+
health = self.health_check()
|
1473
|
+
if health["status"] != "healthy":
|
1474
|
+
logger.warning(f"Server health check shows issues: {health['issues']}")
|
1475
|
+
|
1476
|
+
# Run the FastMCP server
|
1477
|
+
logger.info("Starting FastMCP server...")
|
1478
|
+
self._mcp.run()
|
1479
|
+
|
1480
|
+
except KeyboardInterrupt:
|
1481
|
+
logger.info("Server stopped by user")
|
1482
|
+
except Exception as e:
|
1483
|
+
logger.error(f"Server error: {e}")
|
1484
|
+
|
1485
|
+
# Record error if aggregator is enabled
|
1486
|
+
if self.error_aggregator:
|
1487
|
+
error = MCPError(
|
1488
|
+
f"Server startup/runtime error: {str(e)}",
|
1489
|
+
error_code=MCPErrorCode.SERVER_UNAVAILABLE,
|
1490
|
+
cause=e,
|
1491
|
+
)
|
1492
|
+
self.error_aggregator.record_error(error)
|
1493
|
+
|
1494
|
+
raise
|
1495
|
+
finally:
|
1496
|
+
logger.info("Shutting down enhanced MCP server...")
|
1497
|
+
|
1498
|
+
# Clean up active sessions
|
1499
|
+
if self._active_sessions:
|
1500
|
+
logger.info(f"Terminating {len(self._active_sessions)} active sessions")
|
1501
|
+
self._active_sessions.clear()
|
1502
|
+
|
1503
|
+
# Log final stats
|
1504
|
+
if self.metrics and self.metrics.enabled:
|
1505
|
+
final_stats = self.get_server_stats()
|
1506
|
+
logger.info(
|
1507
|
+
f"Final server statistics: {final_stats.get('metrics', {})}"
|
1508
|
+
)
|
1509
|
+
|
1510
|
+
self._running = False
|
1511
|
+
logger.info(f"Enhanced MCP server '{self.name}' stopped")
|
1512
|
+
|
1513
|
+
async def run_stdio(self):
|
1514
|
+
"""Run the server using stdio transport for testing."""
|
1515
|
+
if self._mcp is None:
|
1516
|
+
self._init_mcp()
|
1517
|
+
|
1518
|
+
# For testing, we'll implement a simple stdio server
|
1519
|
+
import json
|
1520
|
+
import sys
|
1521
|
+
|
1522
|
+
logger.info(f"Starting MCP server '{self.name}' in stdio mode")
|
1523
|
+
self._running = True
|
1524
|
+
|
1525
|
+
try:
|
1526
|
+
while self._running:
|
1527
|
+
# Read JSON-RPC request from stdin
|
1528
|
+
line = sys.stdin.readline()
|
1529
|
+
if not line:
|
1530
|
+
break
|
1531
|
+
|
1532
|
+
try:
|
1533
|
+
request = json.loads(line.strip())
|
1534
|
+
|
1535
|
+
# Handle different request types
|
1536
|
+
if request.get("method") == "tools/list":
|
1537
|
+
# Return list of tools
|
1538
|
+
tools = []
|
1539
|
+
for name, info in self._tool_registry.items():
|
1540
|
+
if not info.get("disabled", False):
|
1541
|
+
tools.append(
|
1542
|
+
{
|
1543
|
+
"name": name,
|
1544
|
+
"description": info.get("description", ""),
|
1545
|
+
"inputSchema": info.get("input_schema", {}),
|
1546
|
+
}
|
1547
|
+
)
|
1548
|
+
|
1549
|
+
response = {"id": request.get("id"), "result": {"tools": tools}}
|
1550
|
+
|
1551
|
+
elif request.get("method") == "tools/call":
|
1552
|
+
# Call a tool
|
1553
|
+
params = request.get("params", {})
|
1554
|
+
tool_name = params.get("name")
|
1555
|
+
arguments = params.get("arguments", {})
|
1556
|
+
|
1557
|
+
if tool_name in self._tool_registry:
|
1558
|
+
handler = self._tool_registry[tool_name]["handler"]
|
1559
|
+
try:
|
1560
|
+
# Execute tool
|
1561
|
+
if asyncio.iscoroutinefunction(handler):
|
1562
|
+
result = await handler(**arguments)
|
1563
|
+
else:
|
1564
|
+
result = handler(**arguments)
|
1565
|
+
|
1566
|
+
response = {
|
1567
|
+
"id": request.get("id"),
|
1568
|
+
"result": {
|
1569
|
+
"content": [
|
1570
|
+
{"type": "text", "text": str(result)}
|
1571
|
+
]
|
1572
|
+
},
|
1573
|
+
}
|
1574
|
+
except Exception as e:
|
1575
|
+
response = {
|
1576
|
+
"id": request.get("id"),
|
1577
|
+
"error": {"code": -32603, "message": str(e)},
|
1578
|
+
}
|
1579
|
+
else:
|
1580
|
+
response = {
|
1581
|
+
"id": request.get("id"),
|
1582
|
+
"error": {
|
1583
|
+
"code": -32601,
|
1584
|
+
"message": f"Tool not found: {tool_name}",
|
1585
|
+
},
|
1586
|
+
}
|
1587
|
+
|
1588
|
+
else:
|
1589
|
+
# Unknown method
|
1590
|
+
response = {
|
1591
|
+
"id": request.get("id"),
|
1592
|
+
"error": {
|
1593
|
+
"code": -32601,
|
1594
|
+
"message": f"Method not found: {request.get('method')}",
|
1595
|
+
},
|
1596
|
+
}
|
1597
|
+
|
1598
|
+
# Write response to stdout
|
1599
|
+
sys.stdout.write(json.dumps(response) + "\n")
|
1600
|
+
sys.stdout.flush()
|
1601
|
+
|
1602
|
+
except json.JSONDecodeError:
|
1603
|
+
# Invalid JSON
|
1604
|
+
error_response = {
|
1605
|
+
"id": None,
|
1606
|
+
"error": {"code": -32700, "message": "Parse error"},
|
1607
|
+
}
|
1608
|
+
sys.stdout.write(json.dumps(error_response) + "\n")
|
1609
|
+
sys.stdout.flush()
|
1610
|
+
|
1611
|
+
except KeyboardInterrupt:
|
1612
|
+
logger.info("Server stopped by user")
|
1613
|
+
except Exception as e:
|
1614
|
+
logger.error(f"Server error: {e}")
|
1615
|
+
raise
|
1616
|
+
finally:
|
1617
|
+
self._running = False
|
1618
|
+
|
1619
|
+
|
1620
|
+
class SimpleMCPServer(MCPServerBase):
|
1621
|
+
"""Simple MCP Server for prototyping and development.
|
1622
|
+
|
1623
|
+
This is a lightweight version of MCPServer without authentication,
|
1624
|
+
metrics, caching, or other production features. Perfect for:
|
1625
|
+
- Quick prototyping
|
1626
|
+
- Development and testing
|
1627
|
+
- Simple use cases without advanced features
|
1628
|
+
|
1629
|
+
Example:
|
1630
|
+
>>> server = SimpleMCPServer("my-prototype")
|
1631
|
+
>>> @server.tool()
|
1632
|
+
... def hello(name: str) -> str:
|
1633
|
+
... return f"Hello, {name}!"
|
1634
|
+
>>> server.run()
|
1635
|
+
"""
|
1636
|
+
|
1637
|
+
def __init__(self, name: str, description: str = None):
|
1638
|
+
"""Initialize simple MCP server.
|
1639
|
+
|
1640
|
+
Args:
|
1641
|
+
name: Server name
|
1642
|
+
description: Server description
|
1643
|
+
"""
|
1644
|
+
super().__init__(name, description)
|
1645
|
+
|
1646
|
+
# Disable all advanced features for simplicity
|
1647
|
+
self.enable_cache = False
|
1648
|
+
self.enable_metrics = False
|
1649
|
+
self.enable_http_transport = False
|
1650
|
+
self.rate_limit_config = None
|
1651
|
+
self.circuit_breaker_config = None
|
1652
|
+
self.auth_provider = None
|
1653
|
+
|
1654
|
+
# Simple in-memory storage
|
1655
|
+
self._simple_tools = {}
|
1656
|
+
self._simple_resources = {}
|
1657
|
+
self._simple_prompts = {}
|
1658
|
+
|
1659
|
+
logger.info(f"SimpleMCPServer '{name}' initialized for prototyping")
|
1660
|
+
|
1661
|
+
def setup(self):
|
1662
|
+
"""Setup method - no additional setup needed for SimpleMCPServer."""
|
1663
|
+
pass
|
1664
|
+
|
1665
|
+
def tool(self, description: str = None):
|
1666
|
+
"""Register a simple tool (no auth, caching, or metrics).
|
1667
|
+
|
1668
|
+
Args:
|
1669
|
+
description: Tool description
|
1670
|
+
|
1671
|
+
Returns:
|
1672
|
+
Decorator function
|
1673
|
+
"""
|
1674
|
+
|
1675
|
+
def decorator(func):
|
1676
|
+
# Initialize MCP if needed
|
1677
|
+
if self._mcp is None:
|
1678
|
+
self._init_mcp()
|
1679
|
+
|
1680
|
+
tool_name = func.__name__
|
1681
|
+
self._simple_tools[tool_name] = {
|
1682
|
+
"function": func,
|
1683
|
+
"description": description or f"Tool: {tool_name}",
|
1684
|
+
"created_at": time.time(),
|
1685
|
+
}
|
1686
|
+
|
1687
|
+
# Register with FastMCP
|
1688
|
+
self._mcp.tool(description or f"Tool: {tool_name}")(func)
|
1689
|
+
|
1690
|
+
logger.debug(f"SimpleMCPServer: Registered tool '{tool_name}'")
|
1691
|
+
return func
|
1692
|
+
|
1693
|
+
return decorator
|
1694
|
+
|
1695
|
+
def resource(self, uri: str, description: str = None):
|
1696
|
+
"""Register a simple resource.
|
1697
|
+
|
1698
|
+
Args:
|
1699
|
+
uri: Resource URI
|
1700
|
+
description: Resource description
|
1701
|
+
|
1702
|
+
Returns:
|
1703
|
+
Decorator function
|
1704
|
+
"""
|
1705
|
+
|
1706
|
+
def decorator(func):
|
1707
|
+
# Initialize MCP if needed
|
1708
|
+
if self._mcp is None:
|
1709
|
+
self._init_mcp()
|
1710
|
+
|
1711
|
+
self._simple_resources[uri] = {
|
1712
|
+
"function": func,
|
1713
|
+
"description": description or f"Resource: {uri}",
|
1714
|
+
"created_at": time.time(),
|
1715
|
+
}
|
1716
|
+
|
1717
|
+
# Register with FastMCP
|
1718
|
+
self._mcp.resource(uri, description or f"Resource: {uri}")(func)
|
1719
|
+
|
1720
|
+
logger.debug(f"SimpleMCPServer: Registered resource '{uri}'")
|
1721
|
+
return func
|
1722
|
+
|
1723
|
+
return decorator
|
1724
|
+
|
1725
|
+
def get_stats(self) -> dict:
|
1726
|
+
"""Get simple server statistics.
|
1727
|
+
|
1728
|
+
Returns:
|
1729
|
+
Dictionary with basic stats
|
1730
|
+
"""
|
1731
|
+
return {
|
1732
|
+
"server_name": self.name,
|
1733
|
+
"server_type": "SimpleMCPServer",
|
1734
|
+
"tools_count": len(self._simple_tools),
|
1735
|
+
"resources_count": len(self._simple_resources),
|
1736
|
+
"prompts_count": len(self._simple_prompts),
|
1737
|
+
"features": {
|
1738
|
+
"authentication": False,
|
1739
|
+
"caching": False,
|
1740
|
+
"metrics": False,
|
1741
|
+
"rate_limiting": False,
|
1742
|
+
"circuit_breaker": False,
|
1743
|
+
},
|
1744
|
+
}
|
1745
|
+
|
1746
|
+
|
1747
|
+
# Note: EnhancedMCPServer alias removed - use MCPServer directly
|