kailash 0.6.3__py3-none-any.whl → 0.6.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. kailash/__init__.py +3 -3
  2. kailash/api/custom_nodes_secure.py +3 -3
  3. kailash/api/gateway.py +1 -1
  4. kailash/api/studio.py +2 -3
  5. kailash/api/workflow_api.py +3 -4
  6. kailash/core/resilience/bulkhead.py +460 -0
  7. kailash/core/resilience/circuit_breaker.py +92 -10
  8. kailash/edge/discovery.py +86 -0
  9. kailash/mcp_server/__init__.py +309 -33
  10. kailash/mcp_server/advanced_features.py +1022 -0
  11. kailash/mcp_server/ai_registry_server.py +27 -2
  12. kailash/mcp_server/auth.py +789 -0
  13. kailash/mcp_server/client.py +645 -378
  14. kailash/mcp_server/discovery.py +1593 -0
  15. kailash/mcp_server/errors.py +673 -0
  16. kailash/mcp_server/oauth.py +1727 -0
  17. kailash/mcp_server/protocol.py +1126 -0
  18. kailash/mcp_server/registry_integration.py +587 -0
  19. kailash/mcp_server/server.py +1213 -98
  20. kailash/mcp_server/transports.py +1169 -0
  21. kailash/mcp_server/utils/__init__.py +6 -1
  22. kailash/mcp_server/utils/cache.py +250 -7
  23. kailash/middleware/auth/auth_manager.py +3 -3
  24. kailash/middleware/communication/api_gateway.py +2 -9
  25. kailash/middleware/communication/realtime.py +1 -1
  26. kailash/middleware/mcp/enhanced_server.py +1 -1
  27. kailash/nodes/__init__.py +2 -0
  28. kailash/nodes/admin/audit_log.py +6 -6
  29. kailash/nodes/admin/permission_check.py +8 -8
  30. kailash/nodes/admin/role_management.py +32 -28
  31. kailash/nodes/admin/schema.sql +6 -1
  32. kailash/nodes/admin/schema_manager.py +13 -13
  33. kailash/nodes/admin/security_event.py +16 -20
  34. kailash/nodes/admin/tenant_isolation.py +3 -3
  35. kailash/nodes/admin/transaction_utils.py +3 -3
  36. kailash/nodes/admin/user_management.py +21 -22
  37. kailash/nodes/ai/a2a.py +11 -11
  38. kailash/nodes/ai/ai_providers.py +9 -12
  39. kailash/nodes/ai/embedding_generator.py +13 -14
  40. kailash/nodes/ai/intelligent_agent_orchestrator.py +19 -19
  41. kailash/nodes/ai/iterative_llm_agent.py +2 -2
  42. kailash/nodes/ai/llm_agent.py +210 -33
  43. kailash/nodes/ai/self_organizing.py +2 -2
  44. kailash/nodes/alerts/discord.py +4 -4
  45. kailash/nodes/api/graphql.py +6 -6
  46. kailash/nodes/api/http.py +12 -17
  47. kailash/nodes/api/rate_limiting.py +4 -4
  48. kailash/nodes/api/rest.py +15 -15
  49. kailash/nodes/auth/mfa.py +3 -4
  50. kailash/nodes/auth/risk_assessment.py +2 -2
  51. kailash/nodes/auth/session_management.py +5 -5
  52. kailash/nodes/auth/sso.py +143 -0
  53. kailash/nodes/base.py +6 -2
  54. kailash/nodes/base_async.py +16 -2
  55. kailash/nodes/base_with_acl.py +2 -2
  56. kailash/nodes/cache/__init__.py +9 -0
  57. kailash/nodes/cache/cache.py +1172 -0
  58. kailash/nodes/cache/cache_invalidation.py +870 -0
  59. kailash/nodes/cache/redis_pool_manager.py +595 -0
  60. kailash/nodes/code/async_python.py +2 -1
  61. kailash/nodes/code/python.py +196 -35
  62. kailash/nodes/compliance/data_retention.py +6 -6
  63. kailash/nodes/compliance/gdpr.py +5 -5
  64. kailash/nodes/data/__init__.py +10 -0
  65. kailash/nodes/data/optimistic_locking.py +906 -0
  66. kailash/nodes/data/readers.py +8 -8
  67. kailash/nodes/data/redis.py +349 -0
  68. kailash/nodes/data/sql.py +314 -3
  69. kailash/nodes/data/streaming.py +21 -0
  70. kailash/nodes/enterprise/__init__.py +8 -0
  71. kailash/nodes/enterprise/audit_logger.py +285 -0
  72. kailash/nodes/enterprise/batch_processor.py +22 -3
  73. kailash/nodes/enterprise/data_lineage.py +1 -1
  74. kailash/nodes/enterprise/mcp_executor.py +205 -0
  75. kailash/nodes/enterprise/service_discovery.py +150 -0
  76. kailash/nodes/enterprise/tenant_assignment.py +108 -0
  77. kailash/nodes/logic/async_operations.py +2 -2
  78. kailash/nodes/logic/convergence.py +1 -1
  79. kailash/nodes/logic/operations.py +1 -1
  80. kailash/nodes/monitoring/__init__.py +11 -1
  81. kailash/nodes/monitoring/health_check.py +456 -0
  82. kailash/nodes/monitoring/log_processor.py +817 -0
  83. kailash/nodes/monitoring/metrics_collector.py +627 -0
  84. kailash/nodes/monitoring/performance_benchmark.py +137 -11
  85. kailash/nodes/rag/advanced.py +7 -7
  86. kailash/nodes/rag/agentic.py +49 -2
  87. kailash/nodes/rag/conversational.py +3 -3
  88. kailash/nodes/rag/evaluation.py +3 -3
  89. kailash/nodes/rag/federated.py +3 -3
  90. kailash/nodes/rag/graph.py +3 -3
  91. kailash/nodes/rag/multimodal.py +3 -3
  92. kailash/nodes/rag/optimized.py +5 -5
  93. kailash/nodes/rag/privacy.py +3 -3
  94. kailash/nodes/rag/query_processing.py +6 -6
  95. kailash/nodes/rag/realtime.py +1 -1
  96. kailash/nodes/rag/registry.py +2 -6
  97. kailash/nodes/rag/router.py +1 -1
  98. kailash/nodes/rag/similarity.py +7 -7
  99. kailash/nodes/rag/strategies.py +4 -4
  100. kailash/nodes/security/abac_evaluator.py +6 -6
  101. kailash/nodes/security/behavior_analysis.py +5 -6
  102. kailash/nodes/security/credential_manager.py +1 -1
  103. kailash/nodes/security/rotating_credentials.py +11 -11
  104. kailash/nodes/security/threat_detection.py +8 -8
  105. kailash/nodes/testing/credential_testing.py +2 -2
  106. kailash/nodes/transform/processors.py +5 -5
  107. kailash/runtime/local.py +162 -14
  108. kailash/runtime/parameter_injection.py +425 -0
  109. kailash/runtime/parameter_injector.py +657 -0
  110. kailash/runtime/testing.py +2 -2
  111. kailash/testing/fixtures.py +2 -2
  112. kailash/workflow/builder.py +99 -18
  113. kailash/workflow/builder_improvements.py +207 -0
  114. kailash/workflow/input_handling.py +170 -0
  115. {kailash-0.6.3.dist-info → kailash-0.6.4.dist-info}/METADATA +22 -9
  116. {kailash-0.6.3.dist-info → kailash-0.6.4.dist-info}/RECORD +120 -94
  117. {kailash-0.6.3.dist-info → kailash-0.6.4.dist-info}/WHEEL +0 -0
  118. {kailash-0.6.3.dist-info → kailash-0.6.4.dist-info}/entry_points.txt +0 -0
  119. {kailash-0.6.3.dist-info → kailash-0.6.4.dist-info}/licenses/LICENSE +0 -0
  120. {kailash-0.6.3.dist-info → kailash-0.6.4.dist-info}/top_level.txt +0 -0
@@ -1,10 +1,21 @@
1
1
  """
2
- MCP Server Framework with production-ready capabilities.
2
+ Enhanced MCP Server Framework with production-ready capabilities.
3
3
 
4
4
  This module provides both basic and enhanced MCP server implementations using
5
5
  the official FastMCP framework from Anthropic. Servers run as long-lived
6
6
  services that expose tools, resources, and prompts to MCP clients.
7
7
 
8
+ Enhanced Features:
9
+ - Multiple transport support (STDIO, SSE, HTTP)
10
+ - Authentication and authorization
11
+ - Rate limiting and circuit breaker patterns
12
+ - Metrics collection and monitoring
13
+ - Error handling with structured codes
14
+ - Service discovery integration
15
+ - Resource streaming
16
+ - Connection pooling
17
+ - Caching with TTL support
18
+
8
19
  Basic Usage:
9
20
  Abstract base class for custom servers:
10
21
 
@@ -25,16 +36,44 @@ Production Usage:
25
36
  ... def search(query: str) -> dict:
26
37
  ... return {"results": f"Found data for {query}"}
27
38
  >>> server.run()
39
+
40
+ Enhanced Production Usage:
41
+ Server with authentication and monitoring:
42
+
43
+ >>> from kailash.mcp_server.auth import APIKeyAuth
44
+ >>> auth = APIKeyAuth({"user1": "secret-key"})
45
+ >>> server = MCPServer(
46
+ ... "my-server",
47
+ ... auth_provider=auth,
48
+ ... enable_metrics=True,
49
+ ... enable_http_transport=True,
50
+ ... rate_limit_config={"requests_per_minute": 100}
51
+ ... )
52
+ >>> server.run()
28
53
  """
29
54
 
30
55
  import asyncio
31
56
  import functools
32
57
  import logging
58
+ import time
59
+ import uuid
33
60
  from abc import ABC, abstractmethod
34
61
  from collections.abc import Callable
35
62
  from pathlib import Path
36
- from typing import Any, Dict, Optional, TypeVar, Union
37
-
63
+ from typing import Any, Dict, List, Optional, TypeVar, Union
64
+
65
+ from .auth import AuthManager, AuthProvider, PermissionManager, RateLimiter
66
+ from .errors import (
67
+ AuthenticationError,
68
+ AuthorizationError,
69
+ ErrorAggregator,
70
+ MCPError,
71
+ MCPErrorCode,
72
+ RateLimitError,
73
+ ResourceError,
74
+ RetryableOperation,
75
+ ToolError,
76
+ )
38
77
  from .utils import CacheManager, ConfigManager, MetricsCollector, format_response
39
78
 
40
79
  logger = logging.getLogger(__name__)
@@ -162,14 +201,45 @@ class MCPServerBase(ABC):
162
201
  def _init_mcp(self):
163
202
  """Initialize the FastMCP instance."""
164
203
  try:
165
- from mcp.server import FastMCP
166
-
204
+ # Try independent FastMCP package first (when available)
205
+ from fastmcp import FastMCP
167
206
  self._mcp = FastMCP(self.name)
168
207
  except ImportError:
169
- logger.error(
170
- "FastMCP not available. Install with: pip install 'mcp[server]'"
171
- )
172
- raise
208
+ logger.warning("FastMCP not available, using fallback mode")
209
+ # Use same fallback as MCPServer
210
+ self._mcp = self._create_fallback_server()
211
+
212
+ def _create_fallback_server(self):
213
+ """Create a fallback server when FastMCP is not available."""
214
+ class FallbackMCPServer:
215
+ def __init__(self, name: str):
216
+ self.name = name
217
+ self._tools = {}
218
+ self._resources = {}
219
+ self._prompts = {}
220
+
221
+ def tool(self, *args, **kwargs):
222
+ def decorator(func):
223
+ self._tools[func.__name__] = func
224
+ return func
225
+ return decorator
226
+
227
+ def resource(self, uri):
228
+ def decorator(func):
229
+ self._resources[uri] = func
230
+ return func
231
+ return decorator
232
+
233
+ def prompt(self, name):
234
+ def decorator(func):
235
+ self._prompts[name] = func
236
+ return func
237
+ return decorator
238
+
239
+ def run(self, **kwargs):
240
+ raise NotImplementedError("FastMCP not available")
241
+
242
+ return FallbackMCPServer(self.name)
173
243
 
174
244
  def start(self):
175
245
  """Start the MCP server.
@@ -206,35 +276,64 @@ class MCPServerBase(ABC):
206
276
  # In a real implementation, we'd need to handle graceful shutdown
207
277
 
208
278
 
209
- class EnhancedMCPServer:
279
+ class MCPServer:
210
280
  """
211
- Production-ready MCP server (available as SimpleMCPServer).
281
+ Kailash MCP Server - Node-based Model Context Protocol server.
212
282
 
213
- This is the main concrete MCP server implementation with all production
214
- features available. Features can be enabled/disabled as needed.
283
+ This MCP server follows Kailash philosophy by integrating with the node
284
+ and workflow system. Tools can be implemented as nodes, and complex
285
+ MCP capabilities can be built using workflows.
215
286
 
216
- Features available:
217
- - Caching with TTL support (enable_cache=True)
218
- - Metrics collection and monitoring (enable_metrics=True)
219
- - Response formatting utilities (enable_formatting=True)
220
- - Hierarchical configuration management
221
- - Error handling and logging
287
+ Core Features:
288
+ - Node-based tool implementation using Kailash nodes
289
+ - Workflow-based complex operations
290
+ - Production-ready with authentication, caching, and monitoring
291
+ - Multiple transport support (STDIO, SSE, HTTP)
292
+ - Integration with Kailash runtime and infrastructure
222
293
 
223
- Examples:
224
- Basic usage (recommended):
294
+ Kailash Philosophy Integration:
295
+ Using nodes as MCP tools:
225
296
  >>> from kailash.mcp_server import MCPServer
297
+ >>> from kailash.nodes import PythonCodeNode
298
+ >>>
226
299
  >>> server = MCPServer("my-server")
227
- >>> @server.tool()
228
- ... def search(query: str) -> dict:
229
- ... return {"results": f"Found: {query}"}
300
+ >>>
301
+ >>> # Register a node as an MCP tool
302
+ >>> @server.node_tool(PythonCodeNode)
303
+ ... def calculate(a: int, b: int) -> int:
304
+ ... return a + b
305
+ >>>
230
306
  >>> server.run()
231
307
 
232
- With production features enabled:
308
+ Using workflows as MCP tools:
309
+ >>> from kailash.workflows import WorkflowBuilder
310
+ >>>
311
+ >>> # Create workflow for complex MCP operation
312
+ >>> workflow = WorkflowBuilder()
313
+ >>> workflow.add_node("csv_reader", "CSVReaderNode", {"file_path": "data.csv"})
314
+ >>> workflow.add_node("processor", "PythonCodeNode", {"code": "process_data"})
315
+ >>> workflow.add_connection("csv_reader", "processor", "data", "input_data")
316
+ >>>
317
+ >>> server.register_workflow_tool("process_csv", workflow)
318
+
319
+ Traditional usage (for compatibility):
233
320
  >>> server = MCPServer("my-server", enable_cache=True, enable_metrics=True)
234
321
  >>> @server.tool(cache_key="search", cache_ttl=600)
235
322
  ... def search(query: str) -> dict:
236
323
  ... return {"results": f"Found: {query}"}
237
324
  >>> server.run()
325
+
326
+ With authentication and advanced features:
327
+ >>> from kailash.mcp_server.auth import APIKeyAuth
328
+ >>> auth = APIKeyAuth({"user1": "secret-key"})
329
+ >>> server = MCPServer(
330
+ ... "my-server",
331
+ ... auth_provider=auth,
332
+ ... enable_http_transport=True,
333
+ ... rate_limit_config={"requests_per_minute": 100},
334
+ ... circuit_breaker_config={"failure_threshold": 5}
335
+ ... )
336
+ >>> server.run()
238
337
  """
239
338
 
240
339
  def __init__(
@@ -243,8 +342,23 @@ class EnhancedMCPServer:
243
342
  config_file: Optional[Union[str, Path]] = None,
244
343
  enable_cache: bool = True,
245
344
  cache_ttl: int = 300,
345
+ cache_backend: str = "memory", # "memory" or "redis"
346
+ cache_config: Optional[Dict[str, Any]] = None,
246
347
  enable_metrics: bool = True,
247
348
  enable_formatting: bool = True,
349
+ enable_monitoring: bool = False, # Health checks, alerts, observability
350
+ # Enhanced features (optional for backward compatibility)
351
+ auth_provider: Optional[AuthProvider] = None,
352
+ enable_http_transport: bool = False,
353
+ enable_sse_transport: bool = False,
354
+ rate_limit_config: Optional[Dict[str, Any]] = None,
355
+ circuit_breaker_config: Optional[Dict[str, Any]] = None,
356
+ enable_discovery: bool = False,
357
+ connection_pool_config: Optional[Dict[str, Any]] = None,
358
+ error_aggregation: bool = True,
359
+ transport_timeout: float = 30.0,
360
+ max_request_size: int = 10_000_000, # 10MB
361
+ enable_streaming: bool = False,
248
362
  ):
249
363
  """
250
364
  Initialize enhanced MCP server.
@@ -254,22 +368,56 @@ class EnhancedMCPServer:
254
368
  config_file: Optional configuration file path
255
369
  enable_cache: Whether to enable caching (default: True)
256
370
  cache_ttl: Default cache TTL in seconds (default: 300)
371
+ cache_backend: Cache backend ("memory" or "redis")
372
+ cache_config: Cache configuration (for Redis: {"redis_url": "redis://...", "prefix": "mcp:"})
257
373
  enable_metrics: Whether to enable metrics collection (default: True)
258
374
  enable_formatting: Whether to enable response formatting (default: True)
375
+ auth_provider: Optional authentication provider
376
+ enable_http_transport: Enable HTTP transport support
377
+ enable_sse_transport: Enable SSE transport support
378
+ rate_limit_config: Rate limiting configuration
379
+ circuit_breaker_config: Circuit breaker configuration
380
+ enable_discovery: Enable service discovery
381
+ connection_pool_config: Connection pooling configuration
382
+ error_aggregation: Enable error aggregation
383
+ transport_timeout: Transport timeout in seconds
384
+ max_request_size: Maximum request size in bytes
385
+ enable_streaming: Enable streaming support
259
386
  """
260
387
  self.name = name
261
388
 
389
+ # Enhanced features
390
+ self.auth_provider = auth_provider
391
+ self.enable_http_transport = enable_http_transport
392
+ self.enable_sse_transport = enable_sse_transport
393
+ self.enable_discovery = enable_discovery
394
+ self.enable_streaming = enable_streaming
395
+ self.enable_monitoring = enable_monitoring
396
+ self.transport_timeout = transport_timeout
397
+ self.max_request_size = max_request_size
398
+
262
399
  # Initialize configuration
263
400
  self.config = ConfigManager(config_file)
264
401
 
265
- # Set default configuration values
402
+ # Set default configuration values including enhanced features
266
403
  self.config.update(
267
404
  {
268
- "server": {"name": name, "version": "1.0.0", "transport": "stdio"},
405
+ "server": {
406
+ "name": name,
407
+ "version": "1.0.0",
408
+ "transport": "stdio",
409
+ "enable_http": enable_http_transport,
410
+ "enable_sse": enable_sse_transport,
411
+ "timeout": transport_timeout,
412
+ "max_request_size": max_request_size,
413
+ "enable_streaming": enable_streaming,
414
+ },
269
415
  "cache": {
270
416
  "enabled": enable_cache,
271
417
  "default_ttl": cache_ttl,
272
418
  "max_size": 128,
419
+ "backend": cache_backend,
420
+ "config": cache_config or {},
273
421
  },
274
422
  "metrics": {
275
423
  "enabled": enable_metrics,
@@ -280,13 +428,40 @@ class EnhancedMCPServer:
280
428
  "enabled": enable_formatting,
281
429
  "default_format": "markdown",
282
430
  },
431
+ "monitoring": {
432
+ "enabled": enable_monitoring,
433
+ "health_checks": enable_monitoring,
434
+ "observability": enable_monitoring,
435
+ },
436
+ "auth": {
437
+ "enabled": auth_provider is not None,
438
+ "provider_type": (
439
+ type(auth_provider).__name__ if auth_provider else None
440
+ ),
441
+ },
442
+ "rate_limiting": rate_limit_config or {},
443
+ "circuit_breaker": circuit_breaker_config or {},
444
+ "discovery": {"enabled": enable_discovery},
445
+ "connection_pool": connection_pool_config or {},
283
446
  }
284
447
  )
285
448
 
449
+ # Initialize authentication manager
450
+ if auth_provider:
451
+ self.auth_manager = AuthManager(
452
+ provider=auth_provider,
453
+ permission_manager=PermissionManager(),
454
+ rate_limiter=RateLimiter(**(rate_limit_config or {})),
455
+ )
456
+ else:
457
+ self.auth_manager = None
458
+
286
459
  # Initialize components
287
460
  self.cache = CacheManager(
288
461
  enabled=self.config.get("cache.enabled", enable_cache),
289
462
  default_ttl=self.config.get("cache.default_ttl", cache_ttl),
463
+ backend=self.config.get("cache.backend", cache_backend),
464
+ config=self.config.get("cache.config", cache_config or {}),
290
465
  )
291
466
 
292
467
  self.metrics = MetricsCollector(
@@ -295,12 +470,30 @@ class EnhancedMCPServer:
295
470
  collect_usage=self.config.get("metrics.collect_usage", True),
296
471
  )
297
472
 
473
+ # Error aggregation
474
+ if error_aggregation:
475
+ self.error_aggregator = ErrorAggregator()
476
+ else:
477
+ self.error_aggregator = None
478
+
479
+ # Circuit breaker for tool calls
480
+ if circuit_breaker_config:
481
+ from .errors import CircuitBreakerRetry
482
+
483
+ self.circuit_breaker = CircuitBreakerRetry(**circuit_breaker_config)
484
+ else:
485
+ self.circuit_breaker = None
486
+
298
487
  # FastMCP server instance (initialized lazily)
299
488
  self._mcp = None
300
489
  self._running = False
490
+ self._active_sessions: Dict[str, Dict[str, Any]] = {}
491
+ self._connection_pools: Dict[str, List[Any]] = {}
301
492
 
302
493
  # Tool registry for management
303
494
  self._tool_registry: Dict[str, Dict[str, Any]] = {}
495
+ self._resource_registry: Dict[str, Dict[str, Any]] = {}
496
+ self._prompt_registry: Dict[str, Dict[str, Any]] = {}
304
497
 
305
498
  def _init_mcp(self):
306
499
  """Initialize FastMCP server."""
@@ -308,41 +501,117 @@ class EnhancedMCPServer:
308
501
  return
309
502
 
310
503
  try:
311
- # Now we can safely import from external mcp.server (no namespace collision)
312
- from mcp.server import FastMCP
313
-
504
+ # Try independent FastMCP package first (when available)
505
+ from fastmcp import FastMCP
314
506
  self._mcp = FastMCP(self.name)
315
507
  logger.info(f"Initialized FastMCP server: {self.name}")
316
- except ImportError as e:
317
- logger.error(
318
- f"FastMCP import failed with: {e}. Details: {type(e).__name__}"
319
- )
320
- logger.error(
321
- "FastMCP not available. Install with: pip install 'mcp[server]'"
322
- )
323
- raise ImportError(
324
- "FastMCP not available. Install with: pip install 'mcp[server]'"
325
- ) from e
508
+ except ImportError as e1:
509
+ logger.warning(f"Independent FastMCP not available: {e1}")
510
+ try:
511
+ # Fallback to official MCP FastMCP (when fixed)
512
+ from mcp.server import FastMCP
513
+ self._mcp = FastMCP(self.name)
514
+ logger.info(f"Initialized official FastMCP server: {self.name}")
515
+ except ImportError as e2:
516
+ logger.warning(f"Official FastMCP not available: {e2}")
517
+ # Final fallback: Create a minimal FastMCP-compatible wrapper
518
+ logger.info(f"Using low-level MCP Server fallback for: {self.name}")
519
+ self._mcp = self._create_fallback_server()
520
+
521
+ def _create_fallback_server(self):
522
+ """Create a fallback server when FastMCP is not available."""
523
+ logger.info("Creating fallback server implementation")
524
+
525
+ class FallbackMCPServer:
526
+ """Minimal FastMCP-compatible server for when FastMCP is unavailable."""
527
+
528
+ def __init__(self, name: str):
529
+ self.name = name
530
+ self._tools = {}
531
+ self._resources = {}
532
+ self._prompts = {}
533
+ logger.info(f"Fallback MCP server '{name}' initialized")
534
+
535
+ def tool(self, *args, **kwargs):
536
+ """Tool decorator that stores tool registration."""
537
+ def decorator(func):
538
+ tool_name = func.__name__
539
+ self._tools[tool_name] = func
540
+ logger.debug(f"Registered fallback tool: {tool_name}")
541
+ return func
542
+ return decorator
543
+
544
+ def resource(self, uri):
545
+ """Resource decorator that stores resource registration."""
546
+ def decorator(func):
547
+ self._resources[uri] = func
548
+ logger.debug(f"Registered fallback resource: {uri}")
549
+ return func
550
+ return decorator
551
+
552
+ def prompt(self, name):
553
+ """Prompt decorator that stores prompt registration."""
554
+ def decorator(func):
555
+ self._prompts[name] = func
556
+ logger.debug(f"Registered fallback prompt: {name}")
557
+ return func
558
+ return decorator
559
+
560
+ def run(self, **kwargs):
561
+ """Placeholder run method."""
562
+ logger.warning(f"Fallback server '{self.name}' run() called - FastMCP features limited")
563
+ logger.info(f"Registered: {len(self._tools)} tools, {len(self._resources)} resources, {len(self._prompts)} prompts")
564
+ # In a real implementation, we would set up low-level MCP protocol here
565
+ raise NotImplementedError(
566
+ "Full MCP protocol not implemented in fallback mode. "
567
+ "Install 'fastmcp>=2.10.0' or wait for official MCP package fix."
568
+ )
569
+
570
+ return FallbackMCPServer(self.name)
326
571
 
327
572
  def tool(
328
573
  self,
329
574
  cache_key: Optional[str] = None,
330
575
  cache_ttl: Optional[int] = None,
331
576
  format_response: Optional[str] = None,
577
+ # Enhanced features
578
+ required_permission: Optional[str] = None,
579
+ required_permissions: Optional[
580
+ List[str]
581
+ ] = None, # Added for backward compatibility
582
+ rate_limit: Optional[Dict[str, Any]] = None,
583
+ enable_circuit_breaker: bool = True,
584
+ timeout: Optional[float] = None,
585
+ retryable: bool = True,
586
+ stream_response: bool = False,
332
587
  ):
333
588
  """
334
- Enhanced tool decorator with optional caching and metrics.
589
+ Enhanced tool decorator with authentication, caching, metrics, and error handling.
335
590
 
336
591
  Args:
337
592
  cache_key: Optional cache key for caching results
338
593
  cache_ttl: Optional TTL override for this tool
339
594
  format_response: Optional response format ("json", "markdown", "table", etc.)
595
+ required_permission: Single required permission for tool access
596
+ required_permissions: List of required permissions (alternative to required_permission)
597
+ rate_limit: Tool-specific rate limiting configuration
598
+ enable_circuit_breaker: Enable circuit breaker for this tool
599
+ timeout: Tool execution timeout in seconds
600
+ retryable: Whether tool failures are retryable
601
+ stream_response: Enable streaming response for large results
340
602
 
341
603
  Returns:
342
604
  Decorated function with enhanced capabilities
343
605
 
344
606
  Example:
345
- @server.tool(cache_key="weather", cache_ttl=600, format_response="markdown")
607
+ @server.tool(
608
+ cache_key="weather",
609
+ cache_ttl=600,
610
+ format_response="markdown",
611
+ required_permission="weather.read",
612
+ rate_limit={"requests_per_minute": 10},
613
+ timeout=30.0
614
+ )
346
615
  async def get_weather(city: str) -> dict:
347
616
  # Expensive API call - will be cached for 10 minutes
348
617
  return await fetch_weather_data(city)
@@ -355,15 +624,43 @@ class EnhancedMCPServer:
355
624
  # Get function name for registration
356
625
  tool_name = func.__name__
357
626
 
627
+ # Normalize permissions - support both singular and plural
628
+ normalized_permission = None
629
+ if required_permissions is not None and required_permission is not None:
630
+ raise ValueError(
631
+ "Cannot specify both required_permission and required_permissions"
632
+ )
633
+ elif required_permissions is not None:
634
+ if len(required_permissions) == 1:
635
+ normalized_permission = required_permissions[0]
636
+ elif len(required_permissions) > 1:
637
+ # For now, take the first permission. Future enhancement could support multiple.
638
+ normalized_permission = required_permissions[0]
639
+ logger.warning(
640
+ f"Tool {tool_name}: Multiple permissions specified, using first: {normalized_permission}"
641
+ )
642
+ elif required_permission is not None:
643
+ normalized_permission = required_permission
644
+
358
645
  # Create enhanced wrapper
359
646
  enhanced_func = self._create_enhanced_tool(
360
- func, tool_name, cache_key, cache_ttl, format_response
647
+ func,
648
+ tool_name,
649
+ cache_key,
650
+ cache_ttl,
651
+ format_response,
652
+ normalized_permission,
653
+ rate_limit,
654
+ enable_circuit_breaker,
655
+ timeout,
656
+ retryable,
657
+ stream_response,
361
658
  )
362
659
 
363
660
  # Register with FastMCP
364
661
  mcp_tool = self._mcp.tool()(enhanced_func)
365
662
 
366
- # Track in registry
663
+ # Track in registry with enhanced metadata
367
664
  self._tool_registry[tool_name] = {
368
665
  "function": mcp_tool,
369
666
  "original_function": func,
@@ -371,10 +668,22 @@ class EnhancedMCPServer:
371
668
  "cache_key": cache_key,
372
669
  "cache_ttl": cache_ttl,
373
670
  "format_response": format_response,
671
+ "required_permission": normalized_permission,
672
+ "rate_limit": rate_limit,
673
+ "enable_circuit_breaker": enable_circuit_breaker,
674
+ "timeout": timeout,
675
+ "retryable": retryable,
676
+ "stream_response": stream_response,
677
+ "call_count": 0,
678
+ "error_count": 0,
679
+ "last_called": None,
374
680
  }
375
681
 
376
682
  logger.debug(
377
- f"Registered tool: {tool_name} (cached: {cache_key is not None})"
683
+ f"Registered enhanced tool: {tool_name} "
684
+ f"(cached: {cache_key is not None}, "
685
+ f"auth: {required_permission is not None}, "
686
+ f"rate_limited: {rate_limit is not None})"
378
687
  )
379
688
  return mcp_tool
380
689
 
@@ -387,19 +696,75 @@ class EnhancedMCPServer:
387
696
  cache_key: Optional[str],
388
697
  cache_ttl: Optional[int],
389
698
  response_format: Optional[str],
699
+ required_permission: Optional[str],
700
+ rate_limit: Optional[Dict[str, Any]],
701
+ enable_circuit_breaker: bool,
702
+ timeout: Optional[float],
703
+ retryable: bool,
704
+ stream_response: bool,
390
705
  ) -> F:
391
- """Create enhanced tool function with caching, metrics, and formatting."""
706
+ """Create enhanced tool function with authentication, caching, metrics, error handling, and more."""
392
707
 
393
708
  @functools.wraps(func)
394
709
  def sync_wrapper(*args, **kwargs):
395
- # Apply metrics tracking
396
- start_time = None
397
- if self.metrics.enabled:
398
- import time
399
-
400
- start_time = time.time()
710
+ # Generate session ID for tracking
711
+ session_id = str(uuid.uuid4())
712
+ start_time = time.time() if self.metrics.enabled else None
401
713
 
402
714
  try:
715
+ # Authentication check
716
+ if self.auth_manager and required_permission:
717
+ # Extract credentials from kwargs or context
718
+ credentials = self._extract_credentials_from_context(kwargs)
719
+ try:
720
+ user_info = self.auth_manager.authenticate_and_authorize(
721
+ credentials, required_permission
722
+ )
723
+ # Add user info to session
724
+ self._active_sessions[session_id] = {
725
+ "user": user_info,
726
+ "tool": tool_name,
727
+ "start_time": start_time,
728
+ "permission": required_permission,
729
+ }
730
+ except (AuthenticationError, AuthorizationError) as e:
731
+ if self.error_aggregator:
732
+ self.error_aggregator.record_error(e)
733
+ raise ToolError(
734
+ f"Access denied for {tool_name}: {str(e)}",
735
+ tool_name=tool_name,
736
+ )
737
+
738
+ # Rate limiting check
739
+ if rate_limit and self.auth_manager:
740
+ user_id = (
741
+ self._active_sessions.get(session_id, {})
742
+ .get("user", {})
743
+ .get("id", "anonymous")
744
+ )
745
+ try:
746
+ self.auth_manager.rate_limiter.check_rate_limit(
747
+ user_id, tool_name, **rate_limit
748
+ )
749
+ except RateLimitError as e:
750
+ if self.error_aggregator:
751
+ self.error_aggregator.record_error(e)
752
+ raise
753
+
754
+ # Circuit breaker check
755
+ if enable_circuit_breaker and self.circuit_breaker:
756
+ if not self.circuit_breaker.should_retry(
757
+ MCPError("Circuit breaker check"), 1
758
+ ):
759
+ error = MCPError(
760
+ f"Circuit breaker open for {tool_name}",
761
+ error_code=MCPErrorCode.CIRCUIT_BREAKER_OPEN,
762
+ retryable=True,
763
+ )
764
+ if self.error_aggregator:
765
+ self.error_aggregator.record_error(error)
766
+ raise error
767
+
403
768
  # Try cache first if enabled
404
769
  if cache_key and self.cache.enabled:
405
770
  cache = self.cache.get_cache(cache_key, ttl=cache_ttl)
@@ -407,20 +772,77 @@ class EnhancedMCPServer:
407
772
  tool_name, args, kwargs
408
773
  )
409
774
 
410
- result = cache.get(cache_lookup_key)
775
+ # For sync functions with Redis, we need to handle async operations
776
+ if cache.is_redis:
777
+ # Try to run async cache operations in sync context
778
+ try:
779
+ # Check if we're already in an async context
780
+ try:
781
+ asyncio.get_running_loop()
782
+ # We're in an async context, but this is a sync function
783
+ # Fall back to memory cache behavior (no caching for now)
784
+ result = None
785
+ except RuntimeError:
786
+ # Not in async context, we can use asyncio.run
787
+ result = asyncio.run(cache.aget(cache_lookup_key))
788
+ except Exception as e:
789
+ logger.debug(f"Redis cache error in sync context: {e}")
790
+ result = None
791
+ else:
792
+ result = cache.get(cache_lookup_key)
793
+
411
794
  if result is not None:
412
795
  logger.debug(f"Cache hit for {tool_name}")
413
796
  if self.metrics.enabled:
414
797
  latency = time.time() - start_time
415
798
  self.metrics.track_tool_call(tool_name, latency, True)
416
- return self._format_response(result, response_format)
417
799
 
418
- # Execute function
419
- result = func(*args, **kwargs)
800
+ # Update registry stats
801
+ self._tool_registry[tool_name]["call_count"] += 1
802
+ self._tool_registry[tool_name]["last_called"] = time.time()
803
+
804
+ return self._format_response(
805
+ result, response_format, stream_response
806
+ )
807
+
808
+ # Execute function with timeout
809
+ if timeout:
810
+ import signal
811
+
812
+ def timeout_handler(signum, frame):
813
+ raise TimeoutError(
814
+ f"Tool {tool_name} timed out after {timeout}s"
815
+ )
816
+
817
+ old_handler = signal.signal(signal.SIGALRM, timeout_handler)
818
+ signal.alarm(int(timeout))
819
+
820
+ try:
821
+ result = func(*args, **kwargs)
822
+ finally:
823
+ signal.alarm(0)
824
+ signal.signal(signal.SIGALRM, old_handler)
825
+ else:
826
+ result = func(*args, **kwargs)
420
827
 
421
828
  # Cache result if enabled
422
829
  if cache_key and self.cache.enabled:
423
- cache.set(cache_lookup_key, result)
830
+ # For sync functions with Redis, handle async operations
831
+ if cache.is_redis:
832
+ try:
833
+ # Check if we're already in an async context
834
+ try:
835
+ asyncio.get_running_loop()
836
+ # We're in an async context, but this is a sync function
837
+ # Fall back to memory cache behavior (no caching for now)
838
+ pass
839
+ except RuntimeError:
840
+ # Not in async context, we can use asyncio.run
841
+ asyncio.run(cache.aset(cache_lookup_key, result))
842
+ except Exception as e:
843
+ logger.debug(f"Redis cache set error in sync context: {e}")
844
+ else:
845
+ cache.set(cache_lookup_key, result)
424
846
  logger.debug(f"Cached result for {tool_name}")
425
847
 
426
848
  # Track success metrics
@@ -428,9 +850,36 @@ class EnhancedMCPServer:
428
850
  latency = time.time() - start_time
429
851
  self.metrics.track_tool_call(tool_name, latency, True)
430
852
 
431
- return self._format_response(result, response_format)
853
+ # Update circuit breaker on success
854
+ if enable_circuit_breaker and self.circuit_breaker:
855
+ self.circuit_breaker.on_success()
856
+
857
+ # Update registry stats
858
+ self._tool_registry[tool_name]["call_count"] += 1
859
+ self._tool_registry[tool_name]["last_called"] = time.time()
860
+
861
+ return self._format_response(result, response_format, stream_response)
432
862
 
433
863
  except Exception as e:
864
+ # Convert to MCP error if needed
865
+ if not isinstance(e, MCPError):
866
+ mcp_error = ToolError(
867
+ f"Tool execution failed: {str(e)}",
868
+ tool_name=tool_name,
869
+ retryable=retryable,
870
+ cause=e,
871
+ )
872
+ else:
873
+ mcp_error = e
874
+
875
+ # Record error
876
+ if self.error_aggregator:
877
+ self.error_aggregator.record_error(mcp_error)
878
+
879
+ # Update circuit breaker on failure
880
+ if enable_circuit_breaker and self.circuit_breaker:
881
+ self.circuit_breaker.on_failure(mcp_error)
882
+
434
883
  # Track error metrics
435
884
  if self.metrics.enabled and start_time:
436
885
  latency = time.time() - start_time
@@ -438,50 +887,188 @@ class EnhancedMCPServer:
438
887
  tool_name, latency, False, type(e).__name__
439
888
  )
440
889
 
441
- logger.error(f"Error in tool {tool_name}: {e}")
442
- raise
890
+ # Update registry stats
891
+ self._tool_registry[tool_name]["error_count"] += 1
892
+ self._tool_registry[tool_name]["last_called"] = time.time()
893
+
894
+ logger.error(f"Error in tool {tool_name}: {mcp_error}")
895
+ raise mcp_error
896
+
897
+ finally:
898
+ # Clean up session
899
+ if session_id in self._active_sessions:
900
+ del self._active_sessions[session_id]
443
901
 
444
902
  @functools.wraps(func)
445
903
  async def async_wrapper(*args, **kwargs):
446
- # Apply metrics tracking
447
- start_time = None
448
- if self.metrics.enabled:
449
- import time
450
-
451
- start_time = time.time()
904
+ # Generate session ID for tracking
905
+ session_id = str(uuid.uuid4())
906
+ start_time = time.time() if self.metrics.enabled else None
452
907
 
453
908
  try:
454
- # Try cache first if enabled
909
+ # Authentication check
910
+ if self.auth_manager and required_permission:
911
+ # Extract credentials from kwargs or context
912
+ credentials = self._extract_credentials_from_context(kwargs)
913
+
914
+ # Allow bypassing auth for direct calls when no credentials provided
915
+ # This enables testing and development scenarios
916
+ if not credentials and not any(
917
+ k.startswith("mcp_") for k in kwargs.keys()
918
+ ):
919
+ logger.debug(
920
+ f"Tool {tool_name}: No credentials provided, allowing direct call (development/testing)"
921
+ )
922
+ user_info = None
923
+ else:
924
+ try:
925
+ user_info = self.auth_manager.authenticate_and_authorize(
926
+ credentials, required_permission
927
+ )
928
+ # Add user info to session
929
+ self._active_sessions[session_id] = {
930
+ "user": user_info,
931
+ "tool": tool_name,
932
+ "start_time": start_time,
933
+ "permission": required_permission,
934
+ }
935
+ except (AuthenticationError, AuthorizationError) as e:
936
+ if self.error_aggregator:
937
+ self.error_aggregator.record_error(e)
938
+ raise ToolError(
939
+ f"Access denied for {tool_name}: {str(e)}",
940
+ tool_name=tool_name,
941
+ )
942
+
943
+ # Rate limiting check
944
+ if rate_limit and self.auth_manager:
945
+ user_id = (
946
+ self._active_sessions.get(session_id, {})
947
+ .get("user", {})
948
+ .get("id", "anonymous")
949
+ )
950
+ try:
951
+ self.auth_manager.rate_limiter.check_rate_limit(
952
+ user_id, tool_name, **rate_limit
953
+ )
954
+ except RateLimitError as e:
955
+ if self.error_aggregator:
956
+ self.error_aggregator.record_error(e)
957
+ raise
958
+
959
+ # Circuit breaker check
960
+ if enable_circuit_breaker and self.circuit_breaker:
961
+ if not self.circuit_breaker.should_retry(
962
+ MCPError("Circuit breaker check"), 1
963
+ ):
964
+ error = MCPError(
965
+ f"Circuit breaker open for {tool_name}",
966
+ error_code=MCPErrorCode.CIRCUIT_BREAKER_OPEN,
967
+ retryable=True,
968
+ )
969
+ if self.error_aggregator:
970
+ self.error_aggregator.record_error(error)
971
+ raise error
972
+
973
+ # Execute with caching and stampede prevention if enabled
455
974
  if cache_key and self.cache.enabled:
456
975
  cache = self.cache.get_cache(cache_key, ttl=cache_ttl)
457
976
  cache_lookup_key = self.cache._create_cache_key(
458
977
  tool_name, args, kwargs
459
978
  )
460
979
 
461
- result = cache.get(cache_lookup_key)
462
- if result is not None:
463
- logger.debug(f"Cache hit for {tool_name}")
464
- if self.metrics.enabled:
465
- latency = time.time() - start_time
466
- self.metrics.track_tool_call(tool_name, latency, True)
467
- return self._format_response(result, response_format)
468
-
469
- # Execute function
470
- result = await func(*args, **kwargs)
471
-
472
- # Cache result if enabled
473
- if cache_key and self.cache.enabled:
474
- cache.set(cache_lookup_key, result)
475
- logger.debug(f"Cached result for {tool_name}")
980
+ # Define the compute function for cache-or-compute
981
+ async def compute_result():
982
+ # Filter out auth credentials from kwargs before calling the function
983
+ clean_kwargs = {
984
+ k: v
985
+ for k, v in kwargs.items()
986
+ if k
987
+ not in [
988
+ "api_key",
989
+ "token",
990
+ "username",
991
+ "password",
992
+ "jwt",
993
+ "authorization",
994
+ "mcp_auth",
995
+ ]
996
+ }
997
+
998
+ # Execute function with timeout
999
+ if timeout:
1000
+ return await asyncio.wait_for(
1001
+ func(*args, **clean_kwargs), timeout=timeout
1002
+ )
1003
+ else:
1004
+ return await func(*args, **clean_kwargs)
1005
+
1006
+ # Use cache-or-compute with stampede prevention
1007
+ result = await cache.get_or_compute(
1008
+ cache_lookup_key, compute_result, cache_ttl
1009
+ )
1010
+ logger.debug(f"Got result for {tool_name} (cached or computed)")
1011
+ else:
1012
+ # No caching - execute directly
1013
+ # Filter out auth credentials from kwargs before calling the function
1014
+ clean_kwargs = {
1015
+ k: v
1016
+ for k, v in kwargs.items()
1017
+ if k
1018
+ not in [
1019
+ "api_key",
1020
+ "token",
1021
+ "username",
1022
+ "password",
1023
+ "jwt",
1024
+ "authorization",
1025
+ "mcp_auth",
1026
+ ]
1027
+ }
1028
+
1029
+ # Execute function with timeout
1030
+ if timeout:
1031
+ result = await asyncio.wait_for(
1032
+ func(*args, **clean_kwargs), timeout=timeout
1033
+ )
1034
+ else:
1035
+ result = await func(*args, **clean_kwargs)
476
1036
 
477
1037
  # Track success metrics
478
1038
  if self.metrics.enabled:
479
1039
  latency = time.time() - start_time
480
1040
  self.metrics.track_tool_call(tool_name, latency, True)
481
1041
 
482
- return self._format_response(result, response_format)
1042
+ # Update circuit breaker on success
1043
+ if enable_circuit_breaker and self.circuit_breaker:
1044
+ self.circuit_breaker.on_success()
1045
+
1046
+ # Update registry stats
1047
+ self._tool_registry[tool_name]["call_count"] += 1
1048
+ self._tool_registry[tool_name]["last_called"] = time.time()
1049
+
1050
+ return self._format_response(result, response_format, stream_response)
483
1051
 
484
1052
  except Exception as e:
1053
+ # Convert to MCP error if needed
1054
+ if not isinstance(e, MCPError):
1055
+ mcp_error = ToolError(
1056
+ f"Tool execution failed: {str(e)}",
1057
+ tool_name=tool_name,
1058
+ retryable=retryable,
1059
+ cause=e,
1060
+ )
1061
+ else:
1062
+ mcp_error = e
1063
+
1064
+ # Record error
1065
+ if self.error_aggregator:
1066
+ self.error_aggregator.record_error(mcp_error)
1067
+
1068
+ # Update circuit breaker on failure
1069
+ if enable_circuit_breaker and self.circuit_breaker:
1070
+ self.circuit_breaker.on_failure(mcp_error)
1071
+
485
1072
  # Track error metrics
486
1073
  if self.metrics.enabled and start_time:
487
1074
  latency = time.time() - start_time
@@ -489,8 +1076,17 @@ class EnhancedMCPServer:
489
1076
  tool_name, latency, False, type(e).__name__
490
1077
  )
491
1078
 
492
- logger.error(f"Error in tool {tool_name}: {e}")
493
- raise
1079
+ # Update registry stats
1080
+ self._tool_registry[tool_name]["error_count"] += 1
1081
+ self._tool_registry[tool_name]["last_called"] = time.time()
1082
+
1083
+ logger.error(f"Error in tool {tool_name}: {mcp_error}")
1084
+ raise mcp_error
1085
+
1086
+ finally:
1087
+ # Clean up session
1088
+ if session_id in self._active_sessions:
1089
+ del self._active_sessions[session_id]
494
1090
 
495
1091
  # Return appropriate wrapper based on function type
496
1092
  if asyncio.iscoroutinefunction(func):
@@ -498,17 +1094,86 @@ class EnhancedMCPServer:
498
1094
  else:
499
1095
  return sync_wrapper
500
1096
 
501
- def _format_response(self, result: Any, response_format: Optional[str]) -> Any:
502
- """Format response if formatting is enabled."""
1097
+ def _format_response(
1098
+ self, result: Any, response_format: Optional[str], stream_response: bool = False
1099
+ ) -> Any:
1100
+ """Format response if formatting is enabled, with optional streaming support."""
503
1101
  if not self.config.get("formatting.enabled", True) or not response_format:
1102
+ if (
1103
+ stream_response
1104
+ and isinstance(result, (list, dict))
1105
+ and len(str(result)) > 1000
1106
+ ):
1107
+ # For large results, consider streaming (simplified implementation)
1108
+ return {
1109
+ "streaming": True,
1110
+ "data": result,
1111
+ "chunks": self._chunk_large_response(result),
1112
+ }
504
1113
  return result
505
1114
 
506
1115
  try:
507
- return format_response(result, response_format)
1116
+ formatted = format_response(result, response_format)
1117
+ if stream_response and isinstance(formatted, str) and len(formatted) > 1000:
1118
+ return {
1119
+ "streaming": True,
1120
+ "data": formatted,
1121
+ "chunks": self._chunk_large_response(formatted),
1122
+ }
1123
+ return formatted
508
1124
  except Exception as e:
509
1125
  logger.warning(f"Failed to format response: {e}")
510
1126
  return result
511
1127
 
1128
+ def _chunk_large_response(self, data: Any, chunk_size: int = 1000) -> List[str]:
1129
+ """Chunk large responses for streaming."""
1130
+ if isinstance(data, str):
1131
+ return [data[i : i + chunk_size] for i in range(0, len(data), chunk_size)]
1132
+ elif isinstance(data, (list, dict)):
1133
+ data_str = str(data)
1134
+ return [
1135
+ data_str[i : i + chunk_size]
1136
+ for i in range(0, len(data_str), chunk_size)
1137
+ ]
1138
+ else:
1139
+ return [str(data)]
1140
+
1141
+ def _extract_credentials_from_context(
1142
+ self, kwargs: Dict[str, Any]
1143
+ ) -> Dict[str, Any]:
1144
+ """Extract credentials from function context or kwargs."""
1145
+ # Look for common credential patterns in kwargs
1146
+ credentials = {}
1147
+
1148
+ # Check for MCP-style authentication headers
1149
+ if "mcp_auth" in kwargs:
1150
+ credentials.update(kwargs["mcp_auth"])
1151
+
1152
+ # Check for common auth patterns
1153
+ auth_fields = ["api_key", "token", "username", "password", "jwt"]
1154
+ for field in auth_fields:
1155
+ if field in kwargs:
1156
+ credentials[field] = kwargs[field]
1157
+
1158
+ # Check for Authorization header pattern
1159
+ if "authorization" in kwargs:
1160
+ auth_header = kwargs["authorization"]
1161
+ if auth_header.startswith("Bearer "):
1162
+ credentials["token"] = auth_header[7:]
1163
+ elif auth_header.startswith("Basic "):
1164
+ import base64
1165
+
1166
+ try:
1167
+ decoded = base64.b64decode(auth_header[6:]).decode()
1168
+ if ":" in decoded:
1169
+ username, password = decoded.split(":", 1)
1170
+ credentials["username"] = username
1171
+ credentials["password"] = password
1172
+ except Exception:
1173
+ pass
1174
+
1175
+ return credentials
1176
+
512
1177
  def resource(self, uri: str):
513
1178
  """
514
1179
  Add resource with metrics tracking.
@@ -579,8 +1244,24 @@ class EnhancedMCPServer:
579
1244
  "name": self.name,
580
1245
  "running": self._running,
581
1246
  "config": self.config.to_dict(),
1247
+ "active_sessions": len(self._active_sessions),
1248
+ "transport": {
1249
+ "http_enabled": self.enable_http_transport,
1250
+ "sse_enabled": self.enable_sse_transport,
1251
+ "streaming_enabled": self.enable_streaming,
1252
+ "timeout": self.transport_timeout,
1253
+ "max_request_size": self.max_request_size,
1254
+ },
1255
+ "features": {
1256
+ "auth_enabled": self.auth_manager is not None,
1257
+ "circuit_breaker_enabled": self.circuit_breaker is not None,
1258
+ "error_aggregation_enabled": self.error_aggregator is not None,
1259
+ "discovery_enabled": self.enable_discovery,
1260
+ },
582
1261
  },
583
1262
  "tools": self.get_tool_stats(),
1263
+ "resources": self.get_resource_stats(),
1264
+ "prompts": self.get_prompt_stats(),
584
1265
  }
585
1266
 
586
1267
  if self.metrics.enabled:
@@ -589,8 +1270,127 @@ class EnhancedMCPServer:
589
1270
  if self.cache.enabled:
590
1271
  stats["cache"] = self.cache.stats()
591
1272
 
1273
+ if self.error_aggregator:
1274
+ stats["errors"] = self.error_aggregator.get_error_stats(
1275
+ time_window=3600
1276
+ ) # Last hour
1277
+
1278
+ if self.circuit_breaker:
1279
+ stats["circuit_breaker"] = {
1280
+ "state": self.circuit_breaker.state,
1281
+ "failure_count": self.circuit_breaker.failure_count,
1282
+ "success_count": self.circuit_breaker.success_count,
1283
+ }
1284
+
592
1285
  return stats
593
1286
 
1287
+ def get_resource_stats(self) -> Dict[str, Any]:
1288
+ """Get resource statistics."""
1289
+ return {
1290
+ "registered_resources": len(self._resource_registry),
1291
+ "resources": {
1292
+ uri: {
1293
+ "call_count": info.get("call_count", 0),
1294
+ "error_count": info.get("error_count", 0),
1295
+ "last_accessed": info.get("last_accessed"),
1296
+ }
1297
+ for uri, info in self._resource_registry.items()
1298
+ },
1299
+ }
1300
+
1301
+ def get_prompt_stats(self) -> Dict[str, Any]:
1302
+ """Get prompt statistics."""
1303
+ return {
1304
+ "registered_prompts": len(self._prompt_registry),
1305
+ "prompts": {
1306
+ name: {
1307
+ "call_count": info.get("call_count", 0),
1308
+ "error_count": info.get("error_count", 0),
1309
+ "last_used": info.get("last_used"),
1310
+ }
1311
+ for name, info in self._prompt_registry.items()
1312
+ },
1313
+ }
1314
+
1315
+ def get_active_sessions(self) -> Dict[str, Dict[str, Any]]:
1316
+ """Get information about active sessions."""
1317
+ return {
1318
+ session_id: {
1319
+ "user": session_info.get("user", {}),
1320
+ "tool": session_info.get("tool"),
1321
+ "permission": session_info.get("permission"),
1322
+ "duration": time.time() - session_info.get("start_time", time.time()),
1323
+ }
1324
+ for session_id, session_info in self._active_sessions.items()
1325
+ }
1326
+
1327
+ def get_error_trends(
1328
+ self, time_window: Optional[float] = None
1329
+ ) -> List[Dict[str, Any]]:
1330
+ """Get error trends over time."""
1331
+ if not self.error_aggregator:
1332
+ return []
1333
+ return self.error_aggregator.get_error_trends()
1334
+
1335
+ def health_check(self) -> Dict[str, Any]:
1336
+ """Perform comprehensive health check."""
1337
+ health_status = {
1338
+ "status": "healthy",
1339
+ "timestamp": time.time(),
1340
+ "server": {
1341
+ "name": self.name,
1342
+ "running": self._running,
1343
+ "uptime": time.time()
1344
+ - self.config.get("server.start_time", time.time()),
1345
+ },
1346
+ "components": {
1347
+ "mcp": self._mcp is not None,
1348
+ "cache": self.cache.enabled if self.cache else False,
1349
+ "metrics": self.metrics.enabled if self.metrics else False,
1350
+ "auth": self.auth_manager is not None,
1351
+ "circuit_breaker": self.circuit_breaker is not None,
1352
+ },
1353
+ "resources": {
1354
+ "active_sessions": len(self._active_sessions),
1355
+ "tools_registered": len(self._tool_registry),
1356
+ "resources_registered": len(self._resource_registry),
1357
+ "prompts_registered": len(self._prompt_registry),
1358
+ },
1359
+ }
1360
+
1361
+ # Check for issues
1362
+ issues = []
1363
+
1364
+ # Check error rates
1365
+ if self.error_aggregator:
1366
+ error_stats = self.error_aggregator.get_error_stats(
1367
+ time_window=300
1368
+ ) # Last 5 minutes
1369
+ if error_stats.get("error_rate", 0) > 10: # More than 10 errors per second
1370
+ issues.append("High error rate detected")
1371
+ health_status["status"] = "degraded"
1372
+
1373
+ # Check circuit breaker state
1374
+ if self.circuit_breaker and self.circuit_breaker.state == "open":
1375
+ issues.append("Circuit breaker is open")
1376
+ health_status["status"] = "degraded"
1377
+
1378
+ # Check memory usage for caches
1379
+ if self.cache and self.cache.enabled:
1380
+ cache_stats = self.cache.stats()
1381
+ # Simple heuristic - if any cache is over 90% full
1382
+ for cache_name, stats in cache_stats.items():
1383
+ if isinstance(stats, dict) and stats.get("utilization", 0) > 0.9:
1384
+ issues.append(f"Cache {cache_name} is over 90% full")
1385
+ health_status["status"] = "degraded"
1386
+
1387
+ health_status["issues"] = issues
1388
+
1389
+ if issues and health_status["status"] == "healthy":
1390
+ health_status["status"] = "degraded"
1391
+
1392
+ return health_status
1393
+
594
1394
  def clear_cache(self, cache_name: Optional[str] = None) -> None:
595
1395
  """Clear cache(s)."""
596
1396
  if cache_name:
@@ -601,32 +1401,347 @@ class EnhancedMCPServer:
601
1401
  self.cache.clear_all()
602
1402
  logger.info("Cleared all caches")
603
1403
 
1404
+ def reset_circuit_breaker(self) -> None:
1405
+ """Reset circuit breaker to closed state."""
1406
+ if self.circuit_breaker:
1407
+ self.circuit_breaker.state = "closed"
1408
+ self.circuit_breaker.failure_count = 0
1409
+ self.circuit_breaker.success_count = 0
1410
+ logger.info("Circuit breaker reset to closed state")
1411
+
1412
+ def terminate_session(self, session_id: str) -> bool:
1413
+ """Terminate an active session."""
1414
+ if session_id in self._active_sessions:
1415
+ del self._active_sessions[session_id]
1416
+ logger.info(f"Terminated session: {session_id}")
1417
+ return True
1418
+ return False
1419
+
1420
+ def get_tool_by_name(self, tool_name: str) -> Optional[Dict[str, Any]]:
1421
+ """Get tool information by name."""
1422
+ return self._tool_registry.get(tool_name)
1423
+
1424
+ def disable_tool(self, tool_name: str) -> bool:
1425
+ """Temporarily disable a tool."""
1426
+ if tool_name in self._tool_registry:
1427
+ self._tool_registry[tool_name]["disabled"] = True
1428
+ logger.info(f"Disabled tool: {tool_name}")
1429
+ return True
1430
+ return False
1431
+
1432
+ def enable_tool(self, tool_name: str) -> bool:
1433
+ """Re-enable a disabled tool."""
1434
+ if tool_name in self._tool_registry:
1435
+ self._tool_registry[tool_name]["disabled"] = False
1436
+ logger.info(f"Enabled tool: {tool_name}")
1437
+ return True
1438
+ return False
1439
+
604
1440
  def run(self):
605
- """Run the MCP server."""
1441
+ """Run the enhanced MCP server with all features."""
606
1442
  if self._mcp is None:
607
1443
  self._init_mcp()
608
1444
 
1445
+ # Record server start time
1446
+ self.config.update({"server.start_time": time.time()})
1447
+
1448
+ # Log enhanced server startup
609
1449
  logger.info(f"Starting enhanced MCP server: {self.name}")
610
- logger.info(f"Cache enabled: {self.cache.enabled}")
611
- logger.info(f"Metrics enabled: {self.metrics.enabled}")
1450
+ logger.info("Features enabled:")
1451
+ logger.info(f" - Cache: {self.cache.enabled if self.cache else False}")
1452
+ logger.info(f" - Metrics: {self.metrics.enabled if self.metrics else False}")
1453
+ logger.info(f" - Authentication: {self.auth_manager is not None}")
1454
+ logger.info(f" - HTTP Transport: {self.enable_http_transport}")
1455
+ logger.info(f" - SSE Transport: {self.enable_sse_transport}")
1456
+ logger.info(f" - Streaming: {self.enable_streaming}")
1457
+ logger.info(f" - Circuit Breaker: {self.circuit_breaker is not None}")
1458
+ logger.info(f" - Error Aggregation: {self.error_aggregator is not None}")
1459
+ logger.info(f" - Service Discovery: {self.enable_discovery}")
1460
+
1461
+ logger.info("Server configuration:")
1462
+ logger.info(f" - Tools registered: {len(self._tool_registry)}")
1463
+ logger.info(f" - Resources registered: {len(self._resource_registry)}")
1464
+ logger.info(f" - Prompts registered: {len(self._prompt_registry)}")
1465
+ logger.info(f" - Transport timeout: {self.transport_timeout}s")
1466
+ logger.info(f" - Max request size: {self.max_request_size} bytes")
612
1467
 
613
1468
  self._running = True
614
1469
 
615
1470
  try:
1471
+ # Perform health check before starting
1472
+ health = self.health_check()
1473
+ if health["status"] != "healthy":
1474
+ logger.warning(f"Server health check shows issues: {health['issues']}")
1475
+
1476
+ # Run the FastMCP server
1477
+ logger.info("Starting FastMCP server...")
616
1478
  self._mcp.run()
1479
+
617
1480
  except KeyboardInterrupt:
618
1481
  logger.info("Server stopped by user")
619
1482
  except Exception as e:
620
1483
  logger.error(f"Server error: {e}")
1484
+
1485
+ # Record error if aggregator is enabled
1486
+ if self.error_aggregator:
1487
+ error = MCPError(
1488
+ f"Server startup/runtime error: {str(e)}",
1489
+ error_code=MCPErrorCode.SERVER_UNAVAILABLE,
1490
+ cause=e,
1491
+ )
1492
+ self.error_aggregator.record_error(error)
1493
+
621
1494
  raise
622
1495
  finally:
1496
+ logger.info("Shutting down enhanced MCP server...")
1497
+
1498
+ # Clean up active sessions
1499
+ if self._active_sessions:
1500
+ logger.info(f"Terminating {len(self._active_sessions)} active sessions")
1501
+ self._active_sessions.clear()
1502
+
1503
+ # Log final stats
1504
+ if self.metrics and self.metrics.enabled:
1505
+ final_stats = self.get_server_stats()
1506
+ logger.info(
1507
+ f"Final server statistics: {final_stats.get('metrics', {})}"
1508
+ )
1509
+
623
1510
  self._running = False
1511
+ logger.info(f"Enhanced MCP server '{self.name}' stopped")
1512
+
1513
+ async def run_stdio(self):
1514
+ """Run the server using stdio transport for testing."""
1515
+ if self._mcp is None:
1516
+ self._init_mcp()
1517
+
1518
+ # For testing, we'll implement a simple stdio server
1519
+ import json
1520
+ import sys
1521
+
1522
+ logger.info(f"Starting MCP server '{self.name}' in stdio mode")
1523
+ self._running = True
1524
+
1525
+ try:
1526
+ while self._running:
1527
+ # Read JSON-RPC request from stdin
1528
+ line = sys.stdin.readline()
1529
+ if not line:
1530
+ break
1531
+
1532
+ try:
1533
+ request = json.loads(line.strip())
1534
+
1535
+ # Handle different request types
1536
+ if request.get("method") == "tools/list":
1537
+ # Return list of tools
1538
+ tools = []
1539
+ for name, info in self._tool_registry.items():
1540
+ if not info.get("disabled", False):
1541
+ tools.append(
1542
+ {
1543
+ "name": name,
1544
+ "description": info.get("description", ""),
1545
+ "inputSchema": info.get("input_schema", {}),
1546
+ }
1547
+ )
1548
+
1549
+ response = {"id": request.get("id"), "result": {"tools": tools}}
1550
+
1551
+ elif request.get("method") == "tools/call":
1552
+ # Call a tool
1553
+ params = request.get("params", {})
1554
+ tool_name = params.get("name")
1555
+ arguments = params.get("arguments", {})
1556
+
1557
+ if tool_name in self._tool_registry:
1558
+ handler = self._tool_registry[tool_name]["handler"]
1559
+ try:
1560
+ # Execute tool
1561
+ if asyncio.iscoroutinefunction(handler):
1562
+ result = await handler(**arguments)
1563
+ else:
1564
+ result = handler(**arguments)
1565
+
1566
+ response = {
1567
+ "id": request.get("id"),
1568
+ "result": {
1569
+ "content": [
1570
+ {"type": "text", "text": str(result)}
1571
+ ]
1572
+ },
1573
+ }
1574
+ except Exception as e:
1575
+ response = {
1576
+ "id": request.get("id"),
1577
+ "error": {"code": -32603, "message": str(e)},
1578
+ }
1579
+ else:
1580
+ response = {
1581
+ "id": request.get("id"),
1582
+ "error": {
1583
+ "code": -32601,
1584
+ "message": f"Tool not found: {tool_name}",
1585
+ },
1586
+ }
1587
+
1588
+ else:
1589
+ # Unknown method
1590
+ response = {
1591
+ "id": request.get("id"),
1592
+ "error": {
1593
+ "code": -32601,
1594
+ "message": f"Method not found: {request.get('method')}",
1595
+ },
1596
+ }
1597
+
1598
+ # Write response to stdout
1599
+ sys.stdout.write(json.dumps(response) + "\n")
1600
+ sys.stdout.flush()
1601
+
1602
+ except json.JSONDecodeError:
1603
+ # Invalid JSON
1604
+ error_response = {
1605
+ "id": None,
1606
+ "error": {"code": -32700, "message": "Parse error"},
1607
+ }
1608
+ sys.stdout.write(json.dumps(error_response) + "\n")
1609
+ sys.stdout.flush()
1610
+
1611
+ except KeyboardInterrupt:
1612
+ logger.info("Server stopped by user")
1613
+ except Exception as e:
1614
+ logger.error(f"Server error: {e}")
1615
+ raise
1616
+ finally:
1617
+ self._running = False
1618
+
1619
+
1620
+ class SimpleMCPServer(MCPServerBase):
1621
+ """Simple MCP Server for prototyping and development.
1622
+
1623
+ This is a lightweight version of MCPServer without authentication,
1624
+ metrics, caching, or other production features. Perfect for:
1625
+ - Quick prototyping
1626
+ - Development and testing
1627
+ - Simple use cases without advanced features
1628
+
1629
+ Example:
1630
+ >>> server = SimpleMCPServer("my-prototype")
1631
+ >>> @server.tool()
1632
+ ... def hello(name: str) -> str:
1633
+ ... return f"Hello, {name}!"
1634
+ >>> server.run()
1635
+ """
1636
+
1637
+ def __init__(self, name: str, description: str = None):
1638
+ """Initialize simple MCP server.
1639
+
1640
+ Args:
1641
+ name: Server name
1642
+ description: Server description
1643
+ """
1644
+ super().__init__(name, description)
1645
+
1646
+ # Disable all advanced features for simplicity
1647
+ self.enable_cache = False
1648
+ self.enable_metrics = False
1649
+ self.enable_http_transport = False
1650
+ self.rate_limit_config = None
1651
+ self.circuit_breaker_config = None
1652
+ self.auth_provider = None
1653
+
1654
+ # Simple in-memory storage
1655
+ self._simple_tools = {}
1656
+ self._simple_resources = {}
1657
+ self._simple_prompts = {}
1658
+
1659
+ logger.info(f"SimpleMCPServer '{name}' initialized for prototyping")
1660
+
1661
+ def setup(self):
1662
+ """Setup method - no additional setup needed for SimpleMCPServer."""
1663
+ pass
1664
+
1665
+ def tool(self, description: str = None):
1666
+ """Register a simple tool (no auth, caching, or metrics).
1667
+
1668
+ Args:
1669
+ description: Tool description
1670
+
1671
+ Returns:
1672
+ Decorator function
1673
+ """
1674
+
1675
+ def decorator(func):
1676
+ # Initialize MCP if needed
1677
+ if self._mcp is None:
1678
+ self._init_mcp()
1679
+
1680
+ tool_name = func.__name__
1681
+ self._simple_tools[tool_name] = {
1682
+ "function": func,
1683
+ "description": description or f"Tool: {tool_name}",
1684
+ "created_at": time.time(),
1685
+ }
1686
+
1687
+ # Register with FastMCP
1688
+ self._mcp.tool(description or f"Tool: {tool_name}")(func)
1689
+
1690
+ logger.debug(f"SimpleMCPServer: Registered tool '{tool_name}'")
1691
+ return func
1692
+
1693
+ return decorator
1694
+
1695
+ def resource(self, uri: str, description: str = None):
1696
+ """Register a simple resource.
1697
+
1698
+ Args:
1699
+ uri: Resource URI
1700
+ description: Resource description
1701
+
1702
+ Returns:
1703
+ Decorator function
1704
+ """
1705
+
1706
+ def decorator(func):
1707
+ # Initialize MCP if needed
1708
+ if self._mcp is None:
1709
+ self._init_mcp()
1710
+
1711
+ self._simple_resources[uri] = {
1712
+ "function": func,
1713
+ "description": description or f"Resource: {uri}",
1714
+ "created_at": time.time(),
1715
+ }
1716
+
1717
+ # Register with FastMCP
1718
+ self._mcp.resource(uri, description or f"Resource: {uri}")(func)
1719
+
1720
+ logger.debug(f"SimpleMCPServer: Registered resource '{uri}'")
1721
+ return func
1722
+
1723
+ return decorator
1724
+
1725
+ def get_stats(self) -> dict:
1726
+ """Get simple server statistics.
1727
+
1728
+ Returns:
1729
+ Dictionary with basic stats
1730
+ """
1731
+ return {
1732
+ "server_name": self.name,
1733
+ "server_type": "SimpleMCPServer",
1734
+ "tools_count": len(self._simple_tools),
1735
+ "resources_count": len(self._simple_resources),
1736
+ "prompts_count": len(self._simple_prompts),
1737
+ "features": {
1738
+ "authentication": False,
1739
+ "caching": False,
1740
+ "metrics": False,
1741
+ "rate_limiting": False,
1742
+ "circuit_breaker": False,
1743
+ },
1744
+ }
624
1745
 
625
1746
 
626
- # Clean public API design:
627
- # - MCPServerBase: Abstract base for custom implementations (e.g., AIRegistryServer)
628
- # - MCPServer: Main concrete server with all production features
629
- # - SimpleMCPServer: Alias for backward compatibility
630
- # - EnhancedMCPServer: Alias for backward compatibility
631
- MCPServer = EnhancedMCPServer
632
- SimpleMCPServer = EnhancedMCPServer
1747
+ # Note: EnhancedMCPServer alias removed - use MCPServer directly