kailash 0.6.3__py3-none-any.whl → 0.6.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. kailash/__init__.py +3 -3
  2. kailash/api/custom_nodes_secure.py +3 -3
  3. kailash/api/gateway.py +1 -1
  4. kailash/api/studio.py +1 -1
  5. kailash/api/workflow_api.py +2 -2
  6. kailash/core/resilience/bulkhead.py +475 -0
  7. kailash/core/resilience/circuit_breaker.py +92 -10
  8. kailash/core/resilience/health_monitor.py +578 -0
  9. kailash/edge/discovery.py +86 -0
  10. kailash/mcp_server/__init__.py +309 -33
  11. kailash/mcp_server/advanced_features.py +1022 -0
  12. kailash/mcp_server/ai_registry_server.py +27 -2
  13. kailash/mcp_server/auth.py +789 -0
  14. kailash/mcp_server/client.py +645 -378
  15. kailash/mcp_server/discovery.py +1593 -0
  16. kailash/mcp_server/errors.py +673 -0
  17. kailash/mcp_server/oauth.py +1727 -0
  18. kailash/mcp_server/protocol.py +1126 -0
  19. kailash/mcp_server/registry_integration.py +587 -0
  20. kailash/mcp_server/server.py +1228 -96
  21. kailash/mcp_server/transports.py +1169 -0
  22. kailash/mcp_server/utils/__init__.py +6 -1
  23. kailash/mcp_server/utils/cache.py +250 -7
  24. kailash/middleware/auth/auth_manager.py +3 -3
  25. kailash/middleware/communication/api_gateway.py +1 -1
  26. kailash/middleware/communication/realtime.py +1 -1
  27. kailash/middleware/mcp/enhanced_server.py +1 -1
  28. kailash/nodes/__init__.py +2 -0
  29. kailash/nodes/admin/audit_log.py +6 -6
  30. kailash/nodes/admin/permission_check.py +8 -8
  31. kailash/nodes/admin/role_management.py +32 -28
  32. kailash/nodes/admin/schema.sql +6 -1
  33. kailash/nodes/admin/schema_manager.py +13 -13
  34. kailash/nodes/admin/security_event.py +15 -15
  35. kailash/nodes/admin/tenant_isolation.py +3 -3
  36. kailash/nodes/admin/transaction_utils.py +3 -3
  37. kailash/nodes/admin/user_management.py +21 -21
  38. kailash/nodes/ai/a2a.py +11 -11
  39. kailash/nodes/ai/ai_providers.py +9 -12
  40. kailash/nodes/ai/embedding_generator.py +13 -14
  41. kailash/nodes/ai/intelligent_agent_orchestrator.py +19 -19
  42. kailash/nodes/ai/iterative_llm_agent.py +2 -2
  43. kailash/nodes/ai/llm_agent.py +210 -33
  44. kailash/nodes/ai/self_organizing.py +2 -2
  45. kailash/nodes/alerts/discord.py +4 -4
  46. kailash/nodes/api/graphql.py +6 -6
  47. kailash/nodes/api/http.py +10 -10
  48. kailash/nodes/api/rate_limiting.py +4 -4
  49. kailash/nodes/api/rest.py +15 -15
  50. kailash/nodes/auth/mfa.py +3 -3
  51. kailash/nodes/auth/risk_assessment.py +2 -2
  52. kailash/nodes/auth/session_management.py +5 -5
  53. kailash/nodes/auth/sso.py +143 -0
  54. kailash/nodes/base.py +8 -2
  55. kailash/nodes/base_async.py +16 -2
  56. kailash/nodes/base_with_acl.py +2 -2
  57. kailash/nodes/cache/__init__.py +9 -0
  58. kailash/nodes/cache/cache.py +1172 -0
  59. kailash/nodes/cache/cache_invalidation.py +874 -0
  60. kailash/nodes/cache/redis_pool_manager.py +595 -0
  61. kailash/nodes/code/async_python.py +2 -1
  62. kailash/nodes/code/python.py +194 -30
  63. kailash/nodes/compliance/data_retention.py +6 -6
  64. kailash/nodes/compliance/gdpr.py +5 -5
  65. kailash/nodes/data/__init__.py +10 -0
  66. kailash/nodes/data/async_sql.py +1956 -129
  67. kailash/nodes/data/optimistic_locking.py +906 -0
  68. kailash/nodes/data/readers.py +8 -8
  69. kailash/nodes/data/redis.py +378 -0
  70. kailash/nodes/data/sql.py +314 -3
  71. kailash/nodes/data/streaming.py +21 -0
  72. kailash/nodes/enterprise/__init__.py +8 -0
  73. kailash/nodes/enterprise/audit_logger.py +285 -0
  74. kailash/nodes/enterprise/batch_processor.py +22 -3
  75. kailash/nodes/enterprise/data_lineage.py +1 -1
  76. kailash/nodes/enterprise/mcp_executor.py +205 -0
  77. kailash/nodes/enterprise/service_discovery.py +150 -0
  78. kailash/nodes/enterprise/tenant_assignment.py +108 -0
  79. kailash/nodes/logic/async_operations.py +2 -2
  80. kailash/nodes/logic/convergence.py +1 -1
  81. kailash/nodes/logic/operations.py +1 -1
  82. kailash/nodes/monitoring/__init__.py +11 -1
  83. kailash/nodes/monitoring/health_check.py +456 -0
  84. kailash/nodes/monitoring/log_processor.py +817 -0
  85. kailash/nodes/monitoring/metrics_collector.py +627 -0
  86. kailash/nodes/monitoring/performance_benchmark.py +137 -11
  87. kailash/nodes/rag/advanced.py +7 -7
  88. kailash/nodes/rag/agentic.py +49 -2
  89. kailash/nodes/rag/conversational.py +3 -3
  90. kailash/nodes/rag/evaluation.py +3 -3
  91. kailash/nodes/rag/federated.py +3 -3
  92. kailash/nodes/rag/graph.py +3 -3
  93. kailash/nodes/rag/multimodal.py +3 -3
  94. kailash/nodes/rag/optimized.py +5 -5
  95. kailash/nodes/rag/privacy.py +3 -3
  96. kailash/nodes/rag/query_processing.py +6 -6
  97. kailash/nodes/rag/realtime.py +1 -1
  98. kailash/nodes/rag/registry.py +1 -1
  99. kailash/nodes/rag/router.py +1 -1
  100. kailash/nodes/rag/similarity.py +7 -7
  101. kailash/nodes/rag/strategies.py +4 -4
  102. kailash/nodes/security/abac_evaluator.py +6 -6
  103. kailash/nodes/security/behavior_analysis.py +5 -5
  104. kailash/nodes/security/credential_manager.py +1 -1
  105. kailash/nodes/security/rotating_credentials.py +11 -11
  106. kailash/nodes/security/threat_detection.py +8 -8
  107. kailash/nodes/testing/credential_testing.py +2 -2
  108. kailash/nodes/transform/processors.py +5 -5
  109. kailash/runtime/local.py +163 -9
  110. kailash/runtime/parameter_injection.py +425 -0
  111. kailash/runtime/parameter_injector.py +657 -0
  112. kailash/runtime/testing.py +2 -2
  113. kailash/testing/fixtures.py +2 -2
  114. kailash/workflow/builder.py +99 -14
  115. kailash/workflow/builder_improvements.py +207 -0
  116. kailash/workflow/input_handling.py +170 -0
  117. {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/METADATA +22 -9
  118. {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/RECORD +122 -95
  119. {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/WHEEL +0 -0
  120. {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/entry_points.txt +0 -0
  121. {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/licenses/LICENSE +0 -0
  122. {kailash-0.6.3.dist-info → kailash-0.6.5.dist-info}/top_level.txt +0 -0
@@ -1,10 +1,21 @@
1
1
  """
2
- MCP Server Framework with production-ready capabilities.
2
+ Enhanced MCP Server Framework with production-ready capabilities.
3
3
 
4
4
  This module provides both basic and enhanced MCP server implementations using
5
5
  the official FastMCP framework from Anthropic. Servers run as long-lived
6
6
  services that expose tools, resources, and prompts to MCP clients.
7
7
 
8
+ Enhanced Features:
9
+ - Multiple transport support (STDIO, SSE, HTTP)
10
+ - Authentication and authorization
11
+ - Rate limiting and circuit breaker patterns
12
+ - Metrics collection and monitoring
13
+ - Error handling with structured codes
14
+ - Service discovery integration
15
+ - Resource streaming
16
+ - Connection pooling
17
+ - Caching with TTL support
18
+
8
19
  Basic Usage:
9
20
  Abstract base class for custom servers:
10
21
 
@@ -25,16 +36,44 @@ Production Usage:
25
36
  ... def search(query: str) -> dict:
26
37
  ... return {"results": f"Found data for {query}"}
27
38
  >>> server.run()
39
+
40
+ Enhanced Production Usage:
41
+ Server with authentication and monitoring:
42
+
43
+ >>> from kailash.mcp_server.auth import APIKeyAuth
44
+ >>> auth = APIKeyAuth({"user1": "secret-key"})
45
+ >>> server = MCPServer(
46
+ ... "my-server",
47
+ ... auth_provider=auth,
48
+ ... enable_metrics=True,
49
+ ... enable_http_transport=True,
50
+ ... rate_limit_config={"requests_per_minute": 100}
51
+ ... )
52
+ >>> server.run()
28
53
  """
29
54
 
30
55
  import asyncio
31
56
  import functools
32
57
  import logging
58
+ import time
59
+ import uuid
33
60
  from abc import ABC, abstractmethod
34
61
  from collections.abc import Callable
35
62
  from pathlib import Path
36
- from typing import Any, Dict, Optional, TypeVar, Union
37
-
63
+ from typing import Any, Dict, List, Optional, TypeVar, Union
64
+
65
+ from .auth import AuthManager, AuthProvider, PermissionManager, RateLimiter
66
+ from .errors import (
67
+ AuthenticationError,
68
+ AuthorizationError,
69
+ ErrorAggregator,
70
+ MCPError,
71
+ MCPErrorCode,
72
+ RateLimitError,
73
+ ResourceError,
74
+ RetryableOperation,
75
+ ToolError,
76
+ )
38
77
  from .utils import CacheManager, ConfigManager, MetricsCollector, format_response
39
78
 
40
79
  logger = logging.getLogger(__name__)
@@ -162,14 +201,50 @@ class MCPServerBase(ABC):
162
201
  def _init_mcp(self):
163
202
  """Initialize the FastMCP instance."""
164
203
  try:
165
- from mcp.server import FastMCP
204
+ # Try independent FastMCP package first (when available)
205
+ from fastmcp import FastMCP
166
206
 
167
207
  self._mcp = FastMCP(self.name)
168
208
  except ImportError:
169
- logger.error(
170
- "FastMCP not available. Install with: pip install 'mcp[server]'"
171
- )
172
- raise
209
+ logger.warning("FastMCP not available, using fallback mode")
210
+ # Use same fallback as MCPServer
211
+ self._mcp = self._create_fallback_server()
212
+
213
+ def _create_fallback_server(self):
214
+ """Create a fallback server when FastMCP is not available."""
215
+
216
+ class FallbackMCPServer:
217
+ def __init__(self, name: str):
218
+ self.name = name
219
+ self._tools = {}
220
+ self._resources = {}
221
+ self._prompts = {}
222
+
223
+ def tool(self, *args, **kwargs):
224
+ def decorator(func):
225
+ self._tools[func.__name__] = func
226
+ return func
227
+
228
+ return decorator
229
+
230
+ def resource(self, uri):
231
+ def decorator(func):
232
+ self._resources[uri] = func
233
+ return func
234
+
235
+ return decorator
236
+
237
+ def prompt(self, name):
238
+ def decorator(func):
239
+ self._prompts[name] = func
240
+ return func
241
+
242
+ return decorator
243
+
244
+ def run(self, **kwargs):
245
+ raise NotImplementedError("FastMCP not available")
246
+
247
+ return FallbackMCPServer(self.name)
173
248
 
174
249
  def start(self):
175
250
  """Start the MCP server.
@@ -206,35 +281,64 @@ class MCPServerBase(ABC):
206
281
  # In a real implementation, we'd need to handle graceful shutdown
207
282
 
208
283
 
209
- class EnhancedMCPServer:
284
+ class MCPServer:
210
285
  """
211
- Production-ready MCP server (available as SimpleMCPServer).
286
+ Kailash MCP Server - Node-based Model Context Protocol server.
212
287
 
213
- This is the main concrete MCP server implementation with all production
214
- features available. Features can be enabled/disabled as needed.
288
+ This MCP server follows Kailash philosophy by integrating with the node
289
+ and workflow system. Tools can be implemented as nodes, and complex
290
+ MCP capabilities can be built using workflows.
215
291
 
216
- Features available:
217
- - Caching with TTL support (enable_cache=True)
218
- - Metrics collection and monitoring (enable_metrics=True)
219
- - Response formatting utilities (enable_formatting=True)
220
- - Hierarchical configuration management
221
- - Error handling and logging
292
+ Core Features:
293
+ - Node-based tool implementation using Kailash nodes
294
+ - Workflow-based complex operations
295
+ - Production-ready with authentication, caching, and monitoring
296
+ - Multiple transport support (STDIO, SSE, HTTP)
297
+ - Integration with Kailash runtime and infrastructure
222
298
 
223
- Examples:
224
- Basic usage (recommended):
299
+ Kailash Philosophy Integration:
300
+ Using nodes as MCP tools:
225
301
  >>> from kailash.mcp_server import MCPServer
302
+ >>> from kailash.nodes import PythonCodeNode
303
+ >>>
226
304
  >>> server = MCPServer("my-server")
227
- >>> @server.tool()
228
- ... def search(query: str) -> dict:
229
- ... return {"results": f"Found: {query}"}
305
+ >>>
306
+ >>> # Register a node as an MCP tool
307
+ >>> @server.node_tool(PythonCodeNode)
308
+ ... def calculate(a: int, b: int) -> int:
309
+ ... return a + b
310
+ >>>
230
311
  >>> server.run()
231
312
 
232
- With production features enabled:
313
+ Using workflows as MCP tools:
314
+ >>> from kailash.workflows import WorkflowBuilder
315
+ >>>
316
+ >>> # Create workflow for complex MCP operation
317
+ >>> workflow = WorkflowBuilder()
318
+ >>> workflow.add_node("csv_reader", "CSVReaderNode", {"file_path": "data.csv"})
319
+ >>> workflow.add_node("processor", "PythonCodeNode", {"code": "process_data"})
320
+ >>> workflow.add_connection("csv_reader", "processor", "data", "input_data")
321
+ >>>
322
+ >>> server.register_workflow_tool("process_csv", workflow)
323
+
324
+ Traditional usage (for compatibility):
233
325
  >>> server = MCPServer("my-server", enable_cache=True, enable_metrics=True)
234
326
  >>> @server.tool(cache_key="search", cache_ttl=600)
235
327
  ... def search(query: str) -> dict:
236
328
  ... return {"results": f"Found: {query}"}
237
329
  >>> server.run()
330
+
331
+ With authentication and advanced features:
332
+ >>> from kailash.mcp_server.auth import APIKeyAuth
333
+ >>> auth = APIKeyAuth({"user1": "secret-key"})
334
+ >>> server = MCPServer(
335
+ ... "my-server",
336
+ ... auth_provider=auth,
337
+ ... enable_http_transport=True,
338
+ ... rate_limit_config={"requests_per_minute": 100},
339
+ ... circuit_breaker_config={"failure_threshold": 5}
340
+ ... )
341
+ >>> server.run()
238
342
  """
239
343
 
240
344
  def __init__(
@@ -243,8 +347,23 @@ class EnhancedMCPServer:
243
347
  config_file: Optional[Union[str, Path]] = None,
244
348
  enable_cache: bool = True,
245
349
  cache_ttl: int = 300,
350
+ cache_backend: str = "memory", # "memory" or "redis"
351
+ cache_config: Optional[Dict[str, Any]] = None,
246
352
  enable_metrics: bool = True,
247
353
  enable_formatting: bool = True,
354
+ enable_monitoring: bool = False, # Health checks, alerts, observability
355
+ # Enhanced features (optional for backward compatibility)
356
+ auth_provider: Optional[AuthProvider] = None,
357
+ enable_http_transport: bool = False,
358
+ enable_sse_transport: bool = False,
359
+ rate_limit_config: Optional[Dict[str, Any]] = None,
360
+ circuit_breaker_config: Optional[Dict[str, Any]] = None,
361
+ enable_discovery: bool = False,
362
+ connection_pool_config: Optional[Dict[str, Any]] = None,
363
+ error_aggregation: bool = True,
364
+ transport_timeout: float = 30.0,
365
+ max_request_size: int = 10_000_000, # 10MB
366
+ enable_streaming: bool = False,
248
367
  ):
249
368
  """
250
369
  Initialize enhanced MCP server.
@@ -254,22 +373,56 @@ class EnhancedMCPServer:
254
373
  config_file: Optional configuration file path
255
374
  enable_cache: Whether to enable caching (default: True)
256
375
  cache_ttl: Default cache TTL in seconds (default: 300)
376
+ cache_backend: Cache backend ("memory" or "redis")
377
+ cache_config: Cache configuration (for Redis: {"redis_url": "redis://...", "prefix": "mcp:"})
257
378
  enable_metrics: Whether to enable metrics collection (default: True)
258
379
  enable_formatting: Whether to enable response formatting (default: True)
380
+ auth_provider: Optional authentication provider
381
+ enable_http_transport: Enable HTTP transport support
382
+ enable_sse_transport: Enable SSE transport support
383
+ rate_limit_config: Rate limiting configuration
384
+ circuit_breaker_config: Circuit breaker configuration
385
+ enable_discovery: Enable service discovery
386
+ connection_pool_config: Connection pooling configuration
387
+ error_aggregation: Enable error aggregation
388
+ transport_timeout: Transport timeout in seconds
389
+ max_request_size: Maximum request size in bytes
390
+ enable_streaming: Enable streaming support
259
391
  """
260
392
  self.name = name
261
393
 
394
+ # Enhanced features
395
+ self.auth_provider = auth_provider
396
+ self.enable_http_transport = enable_http_transport
397
+ self.enable_sse_transport = enable_sse_transport
398
+ self.enable_discovery = enable_discovery
399
+ self.enable_streaming = enable_streaming
400
+ self.enable_monitoring = enable_monitoring
401
+ self.transport_timeout = transport_timeout
402
+ self.max_request_size = max_request_size
403
+
262
404
  # Initialize configuration
263
405
  self.config = ConfigManager(config_file)
264
406
 
265
- # Set default configuration values
407
+ # Set default configuration values including enhanced features
266
408
  self.config.update(
267
409
  {
268
- "server": {"name": name, "version": "1.0.0", "transport": "stdio"},
410
+ "server": {
411
+ "name": name,
412
+ "version": "1.0.0",
413
+ "transport": "stdio",
414
+ "enable_http": enable_http_transport,
415
+ "enable_sse": enable_sse_transport,
416
+ "timeout": transport_timeout,
417
+ "max_request_size": max_request_size,
418
+ "enable_streaming": enable_streaming,
419
+ },
269
420
  "cache": {
270
421
  "enabled": enable_cache,
271
422
  "default_ttl": cache_ttl,
272
423
  "max_size": 128,
424
+ "backend": cache_backend,
425
+ "config": cache_config or {},
273
426
  },
274
427
  "metrics": {
275
428
  "enabled": enable_metrics,
@@ -280,13 +433,40 @@ class EnhancedMCPServer:
280
433
  "enabled": enable_formatting,
281
434
  "default_format": "markdown",
282
435
  },
436
+ "monitoring": {
437
+ "enabled": enable_monitoring,
438
+ "health_checks": enable_monitoring,
439
+ "observability": enable_monitoring,
440
+ },
441
+ "auth": {
442
+ "enabled": auth_provider is not None,
443
+ "provider_type": (
444
+ type(auth_provider).__name__ if auth_provider else None
445
+ ),
446
+ },
447
+ "rate_limiting": rate_limit_config or {},
448
+ "circuit_breaker": circuit_breaker_config or {},
449
+ "discovery": {"enabled": enable_discovery},
450
+ "connection_pool": connection_pool_config or {},
283
451
  }
284
452
  )
285
453
 
454
+ # Initialize authentication manager
455
+ if auth_provider:
456
+ self.auth_manager = AuthManager(
457
+ provider=auth_provider,
458
+ permission_manager=PermissionManager(),
459
+ rate_limiter=RateLimiter(**(rate_limit_config or {})),
460
+ )
461
+ else:
462
+ self.auth_manager = None
463
+
286
464
  # Initialize components
287
465
  self.cache = CacheManager(
288
466
  enabled=self.config.get("cache.enabled", enable_cache),
289
467
  default_ttl=self.config.get("cache.default_ttl", cache_ttl),
468
+ backend=self.config.get("cache.backend", cache_backend),
469
+ config=self.config.get("cache.config", cache_config or {}),
290
470
  )
291
471
 
292
472
  self.metrics = MetricsCollector(
@@ -295,12 +475,30 @@ class EnhancedMCPServer:
295
475
  collect_usage=self.config.get("metrics.collect_usage", True),
296
476
  )
297
477
 
478
+ # Error aggregation
479
+ if error_aggregation:
480
+ self.error_aggregator = ErrorAggregator()
481
+ else:
482
+ self.error_aggregator = None
483
+
484
+ # Circuit breaker for tool calls
485
+ if circuit_breaker_config:
486
+ from .errors import CircuitBreakerRetry
487
+
488
+ self.circuit_breaker = CircuitBreakerRetry(**circuit_breaker_config)
489
+ else:
490
+ self.circuit_breaker = None
491
+
298
492
  # FastMCP server instance (initialized lazily)
299
493
  self._mcp = None
300
494
  self._running = False
495
+ self._active_sessions: Dict[str, Dict[str, Any]] = {}
496
+ self._connection_pools: Dict[str, List[Any]] = {}
301
497
 
302
498
  # Tool registry for management
303
499
  self._tool_registry: Dict[str, Dict[str, Any]] = {}
500
+ self._resource_registry: Dict[str, Dict[str, Any]] = {}
501
+ self._prompt_registry: Dict[str, Dict[str, Any]] = {}
304
502
 
305
503
  def _init_mcp(self):
306
504
  """Initialize FastMCP server."""
@@ -308,41 +506,129 @@ class EnhancedMCPServer:
308
506
  return
309
507
 
310
508
  try:
311
- # Now we can safely import from external mcp.server (no namespace collision)
312
- from mcp.server import FastMCP
509
+ # Try independent FastMCP package first (when available)
510
+ from fastmcp import FastMCP
313
511
 
314
512
  self._mcp = FastMCP(self.name)
315
513
  logger.info(f"Initialized FastMCP server: {self.name}")
316
- except ImportError as e:
317
- logger.error(
318
- f"FastMCP import failed with: {e}. Details: {type(e).__name__}"
319
- )
320
- logger.error(
321
- "FastMCP not available. Install with: pip install 'mcp[server]'"
322
- )
323
- raise ImportError(
324
- "FastMCP not available. Install with: pip install 'mcp[server]'"
325
- ) from e
514
+ except ImportError as e1:
515
+ logger.warning(f"Independent FastMCP not available: {e1}")
516
+ try:
517
+ # Fallback to official MCP FastMCP (when fixed)
518
+ from mcp.server import FastMCP
519
+
520
+ self._mcp = FastMCP(self.name)
521
+ logger.info(f"Initialized official FastMCP server: {self.name}")
522
+ except ImportError as e2:
523
+ logger.warning(f"Official FastMCP not available: {e2}")
524
+ # Final fallback: Create a minimal FastMCP-compatible wrapper
525
+ logger.info(f"Using low-level MCP Server fallback for: {self.name}")
526
+ self._mcp = self._create_fallback_server()
527
+
528
+ def _create_fallback_server(self):
529
+ """Create a fallback server when FastMCP is not available."""
530
+ logger.info("Creating fallback server implementation")
531
+
532
+ class FallbackMCPServer:
533
+ """Minimal FastMCP-compatible server for when FastMCP is unavailable."""
534
+
535
+ def __init__(self, name: str):
536
+ self.name = name
537
+ self._tools = {}
538
+ self._resources = {}
539
+ self._prompts = {}
540
+ logger.info(f"Fallback MCP server '{name}' initialized")
541
+
542
+ def tool(self, *args, **kwargs):
543
+ """Tool decorator that stores tool registration."""
544
+
545
+ def decorator(func):
546
+ tool_name = func.__name__
547
+ self._tools[tool_name] = func
548
+ logger.debug(f"Registered fallback tool: {tool_name}")
549
+ return func
550
+
551
+ return decorator
552
+
553
+ def resource(self, uri):
554
+ """Resource decorator that stores resource registration."""
555
+
556
+ def decorator(func):
557
+ self._resources[uri] = func
558
+ logger.debug(f"Registered fallback resource: {uri}")
559
+ return func
560
+
561
+ return decorator
562
+
563
+ def prompt(self, name):
564
+ """Prompt decorator that stores prompt registration."""
565
+
566
+ def decorator(func):
567
+ self._prompts[name] = func
568
+ logger.debug(f"Registered fallback prompt: {name}")
569
+ return func
570
+
571
+ return decorator
572
+
573
+ def run(self, **kwargs):
574
+ """Placeholder run method."""
575
+ logger.warning(
576
+ f"Fallback server '{self.name}' run() called - FastMCP features limited"
577
+ )
578
+ logger.info(
579
+ f"Registered: {len(self._tools)} tools, {len(self._resources)} resources, {len(self._prompts)} prompts"
580
+ )
581
+ # In a real implementation, we would set up low-level MCP protocol here
582
+ raise NotImplementedError(
583
+ "Full MCP protocol not implemented in fallback mode. "
584
+ "Install 'fastmcp>=2.10.0' or wait for official MCP package fix."
585
+ )
586
+
587
+ return FallbackMCPServer(self.name)
326
588
 
327
589
  def tool(
328
590
  self,
329
591
  cache_key: Optional[str] = None,
330
592
  cache_ttl: Optional[int] = None,
331
593
  format_response: Optional[str] = None,
594
+ # Enhanced features
595
+ required_permission: Optional[str] = None,
596
+ required_permissions: Optional[
597
+ List[str]
598
+ ] = None, # Added for backward compatibility
599
+ rate_limit: Optional[Dict[str, Any]] = None,
600
+ enable_circuit_breaker: bool = True,
601
+ timeout: Optional[float] = None,
602
+ retryable: bool = True,
603
+ stream_response: bool = False,
332
604
  ):
333
605
  """
334
- Enhanced tool decorator with optional caching and metrics.
606
+ Enhanced tool decorator with authentication, caching, metrics, and error handling.
335
607
 
336
608
  Args:
337
609
  cache_key: Optional cache key for caching results
338
610
  cache_ttl: Optional TTL override for this tool
339
611
  format_response: Optional response format ("json", "markdown", "table", etc.)
612
+ required_permission: Single required permission for tool access
613
+ required_permissions: List of required permissions (alternative to required_permission)
614
+ rate_limit: Tool-specific rate limiting configuration
615
+ enable_circuit_breaker: Enable circuit breaker for this tool
616
+ timeout: Tool execution timeout in seconds
617
+ retryable: Whether tool failures are retryable
618
+ stream_response: Enable streaming response for large results
340
619
 
341
620
  Returns:
342
621
  Decorated function with enhanced capabilities
343
622
 
344
623
  Example:
345
- @server.tool(cache_key="weather", cache_ttl=600, format_response="markdown")
624
+ @server.tool(
625
+ cache_key="weather",
626
+ cache_ttl=600,
627
+ format_response="markdown",
628
+ required_permission="weather.read",
629
+ rate_limit={"requests_per_minute": 10},
630
+ timeout=30.0
631
+ )
346
632
  async def get_weather(city: str) -> dict:
347
633
  # Expensive API call - will be cached for 10 minutes
348
634
  return await fetch_weather_data(city)
@@ -355,15 +641,43 @@ class EnhancedMCPServer:
355
641
  # Get function name for registration
356
642
  tool_name = func.__name__
357
643
 
644
+ # Normalize permissions - support both singular and plural
645
+ normalized_permission = None
646
+ if required_permissions is not None and required_permission is not None:
647
+ raise ValueError(
648
+ "Cannot specify both required_permission and required_permissions"
649
+ )
650
+ elif required_permissions is not None:
651
+ if len(required_permissions) == 1:
652
+ normalized_permission = required_permissions[0]
653
+ elif len(required_permissions) > 1:
654
+ # For now, take the first permission. Future enhancement could support multiple.
655
+ normalized_permission = required_permissions[0]
656
+ logger.warning(
657
+ f"Tool {tool_name}: Multiple permissions specified, using first: {normalized_permission}"
658
+ )
659
+ elif required_permission is not None:
660
+ normalized_permission = required_permission
661
+
358
662
  # Create enhanced wrapper
359
663
  enhanced_func = self._create_enhanced_tool(
360
- func, tool_name, cache_key, cache_ttl, format_response
664
+ func,
665
+ tool_name,
666
+ cache_key,
667
+ cache_ttl,
668
+ format_response,
669
+ normalized_permission,
670
+ rate_limit,
671
+ enable_circuit_breaker,
672
+ timeout,
673
+ retryable,
674
+ stream_response,
361
675
  )
362
676
 
363
677
  # Register with FastMCP
364
678
  mcp_tool = self._mcp.tool()(enhanced_func)
365
679
 
366
- # Track in registry
680
+ # Track in registry with enhanced metadata
367
681
  self._tool_registry[tool_name] = {
368
682
  "function": mcp_tool,
369
683
  "original_function": func,
@@ -371,10 +685,22 @@ class EnhancedMCPServer:
371
685
  "cache_key": cache_key,
372
686
  "cache_ttl": cache_ttl,
373
687
  "format_response": format_response,
688
+ "required_permission": normalized_permission,
689
+ "rate_limit": rate_limit,
690
+ "enable_circuit_breaker": enable_circuit_breaker,
691
+ "timeout": timeout,
692
+ "retryable": retryable,
693
+ "stream_response": stream_response,
694
+ "call_count": 0,
695
+ "error_count": 0,
696
+ "last_called": None,
374
697
  }
375
698
 
376
699
  logger.debug(
377
- f"Registered tool: {tool_name} (cached: {cache_key is not None})"
700
+ f"Registered enhanced tool: {tool_name} "
701
+ f"(cached: {cache_key is not None}, "
702
+ f"auth: {required_permission is not None}, "
703
+ f"rate_limited: {rate_limit is not None})"
378
704
  )
379
705
  return mcp_tool
380
706
 
@@ -387,19 +713,75 @@ class EnhancedMCPServer:
387
713
  cache_key: Optional[str],
388
714
  cache_ttl: Optional[int],
389
715
  response_format: Optional[str],
716
+ required_permission: Optional[str],
717
+ rate_limit: Optional[Dict[str, Any]],
718
+ enable_circuit_breaker: bool,
719
+ timeout: Optional[float],
720
+ retryable: bool,
721
+ stream_response: bool,
390
722
  ) -> F:
391
- """Create enhanced tool function with caching, metrics, and formatting."""
723
+ """Create enhanced tool function with authentication, caching, metrics, error handling, and more."""
392
724
 
393
725
  @functools.wraps(func)
394
726
  def sync_wrapper(*args, **kwargs):
395
- # Apply metrics tracking
396
- start_time = None
397
- if self.metrics.enabled:
398
- import time
399
-
400
- start_time = time.time()
727
+ # Generate session ID for tracking
728
+ session_id = str(uuid.uuid4())
729
+ start_time = time.time() if self.metrics.enabled else None
401
730
 
402
731
  try:
732
+ # Authentication check
733
+ if self.auth_manager and required_permission:
734
+ # Extract credentials from kwargs or context
735
+ credentials = self._extract_credentials_from_context(kwargs)
736
+ try:
737
+ user_info = self.auth_manager.authenticate_and_authorize(
738
+ credentials, required_permission
739
+ )
740
+ # Add user info to session
741
+ self._active_sessions[session_id] = {
742
+ "user": user_info,
743
+ "tool": tool_name,
744
+ "start_time": start_time,
745
+ "permission": required_permission,
746
+ }
747
+ except (AuthenticationError, AuthorizationError) as e:
748
+ if self.error_aggregator:
749
+ self.error_aggregator.record_error(e)
750
+ raise ToolError(
751
+ f"Access denied for {tool_name}: {str(e)}",
752
+ tool_name=tool_name,
753
+ )
754
+
755
+ # Rate limiting check
756
+ if rate_limit and self.auth_manager:
757
+ user_id = (
758
+ self._active_sessions.get(session_id, {})
759
+ .get("user", {})
760
+ .get("id", "anonymous")
761
+ )
762
+ try:
763
+ self.auth_manager.rate_limiter.check_rate_limit(
764
+ user_id, tool_name, **rate_limit
765
+ )
766
+ except RateLimitError as e:
767
+ if self.error_aggregator:
768
+ self.error_aggregator.record_error(e)
769
+ raise
770
+
771
+ # Circuit breaker check
772
+ if enable_circuit_breaker and self.circuit_breaker:
773
+ if not self.circuit_breaker.should_retry(
774
+ MCPError("Circuit breaker check"), 1
775
+ ):
776
+ error = MCPError(
777
+ f"Circuit breaker open for {tool_name}",
778
+ error_code=MCPErrorCode.CIRCUIT_BREAKER_OPEN,
779
+ retryable=True,
780
+ )
781
+ if self.error_aggregator:
782
+ self.error_aggregator.record_error(error)
783
+ raise error
784
+
403
785
  # Try cache first if enabled
404
786
  if cache_key and self.cache.enabled:
405
787
  cache = self.cache.get_cache(cache_key, ttl=cache_ttl)
@@ -407,20 +789,77 @@ class EnhancedMCPServer:
407
789
  tool_name, args, kwargs
408
790
  )
409
791
 
410
- result = cache.get(cache_lookup_key)
792
+ # For sync functions with Redis, we need to handle async operations
793
+ if cache.is_redis:
794
+ # Try to run async cache operations in sync context
795
+ try:
796
+ # Check if we're already in an async context
797
+ try:
798
+ asyncio.get_running_loop()
799
+ # We're in an async context, but this is a sync function
800
+ # Fall back to memory cache behavior (no caching for now)
801
+ result = None
802
+ except RuntimeError:
803
+ # Not in async context, we can use asyncio.run
804
+ result = asyncio.run(cache.aget(cache_lookup_key))
805
+ except Exception as e:
806
+ logger.debug(f"Redis cache error in sync context: {e}")
807
+ result = None
808
+ else:
809
+ result = cache.get(cache_lookup_key)
810
+
411
811
  if result is not None:
412
812
  logger.debug(f"Cache hit for {tool_name}")
413
813
  if self.metrics.enabled:
414
814
  latency = time.time() - start_time
415
815
  self.metrics.track_tool_call(tool_name, latency, True)
416
- return self._format_response(result, response_format)
417
816
 
418
- # Execute function
419
- result = func(*args, **kwargs)
817
+ # Update registry stats
818
+ self._tool_registry[tool_name]["call_count"] += 1
819
+ self._tool_registry[tool_name]["last_called"] = time.time()
820
+
821
+ return self._format_response(
822
+ result, response_format, stream_response
823
+ )
824
+
825
+ # Execute function with timeout
826
+ if timeout:
827
+ import signal
828
+
829
+ def timeout_handler(signum, frame):
830
+ raise TimeoutError(
831
+ f"Tool {tool_name} timed out after {timeout}s"
832
+ )
833
+
834
+ old_handler = signal.signal(signal.SIGALRM, timeout_handler)
835
+ signal.alarm(int(timeout))
836
+
837
+ try:
838
+ result = func(*args, **kwargs)
839
+ finally:
840
+ signal.alarm(0)
841
+ signal.signal(signal.SIGALRM, old_handler)
842
+ else:
843
+ result = func(*args, **kwargs)
420
844
 
421
845
  # Cache result if enabled
422
846
  if cache_key and self.cache.enabled:
423
- cache.set(cache_lookup_key, result)
847
+ # For sync functions with Redis, handle async operations
848
+ if cache.is_redis:
849
+ try:
850
+ # Check if we're already in an async context
851
+ try:
852
+ asyncio.get_running_loop()
853
+ # We're in an async context, but this is a sync function
854
+ # Fall back to memory cache behavior (no caching for now)
855
+ pass
856
+ except RuntimeError:
857
+ # Not in async context, we can use asyncio.run
858
+ asyncio.run(cache.aset(cache_lookup_key, result))
859
+ except Exception as e:
860
+ logger.debug(f"Redis cache set error in sync context: {e}")
861
+ else:
862
+ cache.set(cache_lookup_key, result)
424
863
  logger.debug(f"Cached result for {tool_name}")
425
864
 
426
865
  # Track success metrics
@@ -428,9 +867,36 @@ class EnhancedMCPServer:
428
867
  latency = time.time() - start_time
429
868
  self.metrics.track_tool_call(tool_name, latency, True)
430
869
 
431
- return self._format_response(result, response_format)
870
+ # Update circuit breaker on success
871
+ if enable_circuit_breaker and self.circuit_breaker:
872
+ self.circuit_breaker.on_success()
873
+
874
+ # Update registry stats
875
+ self._tool_registry[tool_name]["call_count"] += 1
876
+ self._tool_registry[tool_name]["last_called"] = time.time()
877
+
878
+ return self._format_response(result, response_format, stream_response)
432
879
 
433
880
  except Exception as e:
881
+ # Convert to MCP error if needed
882
+ if not isinstance(e, MCPError):
883
+ mcp_error = ToolError(
884
+ f"Tool execution failed: {str(e)}",
885
+ tool_name=tool_name,
886
+ retryable=retryable,
887
+ cause=e,
888
+ )
889
+ else:
890
+ mcp_error = e
891
+
892
+ # Record error
893
+ if self.error_aggregator:
894
+ self.error_aggregator.record_error(mcp_error)
895
+
896
+ # Update circuit breaker on failure
897
+ if enable_circuit_breaker and self.circuit_breaker:
898
+ self.circuit_breaker.on_failure(mcp_error)
899
+
434
900
  # Track error metrics
435
901
  if self.metrics.enabled and start_time:
436
902
  latency = time.time() - start_time
@@ -438,50 +904,188 @@ class EnhancedMCPServer:
438
904
  tool_name, latency, False, type(e).__name__
439
905
  )
440
906
 
441
- logger.error(f"Error in tool {tool_name}: {e}")
442
- raise
907
+ # Update registry stats
908
+ self._tool_registry[tool_name]["error_count"] += 1
909
+ self._tool_registry[tool_name]["last_called"] = time.time()
910
+
911
+ logger.error(f"Error in tool {tool_name}: {mcp_error}")
912
+ raise mcp_error
913
+
914
+ finally:
915
+ # Clean up session
916
+ if session_id in self._active_sessions:
917
+ del self._active_sessions[session_id]
443
918
 
444
919
  @functools.wraps(func)
445
920
  async def async_wrapper(*args, **kwargs):
446
- # Apply metrics tracking
447
- start_time = None
448
- if self.metrics.enabled:
449
- import time
450
-
451
- start_time = time.time()
921
+ # Generate session ID for tracking
922
+ session_id = str(uuid.uuid4())
923
+ start_time = time.time() if self.metrics.enabled else None
452
924
 
453
925
  try:
454
- # Try cache first if enabled
926
+ # Authentication check
927
+ if self.auth_manager and required_permission:
928
+ # Extract credentials from kwargs or context
929
+ credentials = self._extract_credentials_from_context(kwargs)
930
+
931
+ # Allow bypassing auth for direct calls when no credentials provided
932
+ # This enables testing and development scenarios
933
+ if not credentials and not any(
934
+ k.startswith("mcp_") for k in kwargs.keys()
935
+ ):
936
+ logger.debug(
937
+ f"Tool {tool_name}: No credentials provided, allowing direct call (development/testing)"
938
+ )
939
+ user_info = None
940
+ else:
941
+ try:
942
+ user_info = self.auth_manager.authenticate_and_authorize(
943
+ credentials, required_permission
944
+ )
945
+ # Add user info to session
946
+ self._active_sessions[session_id] = {
947
+ "user": user_info,
948
+ "tool": tool_name,
949
+ "start_time": start_time,
950
+ "permission": required_permission,
951
+ }
952
+ except (AuthenticationError, AuthorizationError) as e:
953
+ if self.error_aggregator:
954
+ self.error_aggregator.record_error(e)
955
+ raise ToolError(
956
+ f"Access denied for {tool_name}: {str(e)}",
957
+ tool_name=tool_name,
958
+ )
959
+
960
+ # Rate limiting check
961
+ if rate_limit and self.auth_manager:
962
+ user_id = (
963
+ self._active_sessions.get(session_id, {})
964
+ .get("user", {})
965
+ .get("id", "anonymous")
966
+ )
967
+ try:
968
+ self.auth_manager.rate_limiter.check_rate_limit(
969
+ user_id, tool_name, **rate_limit
970
+ )
971
+ except RateLimitError as e:
972
+ if self.error_aggregator:
973
+ self.error_aggregator.record_error(e)
974
+ raise
975
+
976
+ # Circuit breaker check
977
+ if enable_circuit_breaker and self.circuit_breaker:
978
+ if not self.circuit_breaker.should_retry(
979
+ MCPError("Circuit breaker check"), 1
980
+ ):
981
+ error = MCPError(
982
+ f"Circuit breaker open for {tool_name}",
983
+ error_code=MCPErrorCode.CIRCUIT_BREAKER_OPEN,
984
+ retryable=True,
985
+ )
986
+ if self.error_aggregator:
987
+ self.error_aggregator.record_error(error)
988
+ raise error
989
+
990
+ # Execute with caching and stampede prevention if enabled
455
991
  if cache_key and self.cache.enabled:
456
992
  cache = self.cache.get_cache(cache_key, ttl=cache_ttl)
457
993
  cache_lookup_key = self.cache._create_cache_key(
458
994
  tool_name, args, kwargs
459
995
  )
460
996
 
461
- result = cache.get(cache_lookup_key)
462
- if result is not None:
463
- logger.debug(f"Cache hit for {tool_name}")
464
- if self.metrics.enabled:
465
- latency = time.time() - start_time
466
- self.metrics.track_tool_call(tool_name, latency, True)
467
- return self._format_response(result, response_format)
468
-
469
- # Execute function
470
- result = await func(*args, **kwargs)
471
-
472
- # Cache result if enabled
473
- if cache_key and self.cache.enabled:
474
- cache.set(cache_lookup_key, result)
475
- logger.debug(f"Cached result for {tool_name}")
997
+ # Define the compute function for cache-or-compute
998
+ async def compute_result():
999
+ # Filter out auth credentials from kwargs before calling the function
1000
+ clean_kwargs = {
1001
+ k: v
1002
+ for k, v in kwargs.items()
1003
+ if k
1004
+ not in [
1005
+ "api_key",
1006
+ "token",
1007
+ "username",
1008
+ "password",
1009
+ "jwt",
1010
+ "authorization",
1011
+ "mcp_auth",
1012
+ ]
1013
+ }
1014
+
1015
+ # Execute function with timeout
1016
+ if timeout:
1017
+ return await asyncio.wait_for(
1018
+ func(*args, **clean_kwargs), timeout=timeout
1019
+ )
1020
+ else:
1021
+ return await func(*args, **clean_kwargs)
1022
+
1023
+ # Use cache-or-compute with stampede prevention
1024
+ result = await cache.get_or_compute(
1025
+ cache_lookup_key, compute_result, cache_ttl
1026
+ )
1027
+ logger.debug(f"Got result for {tool_name} (cached or computed)")
1028
+ else:
1029
+ # No caching - execute directly
1030
+ # Filter out auth credentials from kwargs before calling the function
1031
+ clean_kwargs = {
1032
+ k: v
1033
+ for k, v in kwargs.items()
1034
+ if k
1035
+ not in [
1036
+ "api_key",
1037
+ "token",
1038
+ "username",
1039
+ "password",
1040
+ "jwt",
1041
+ "authorization",
1042
+ "mcp_auth",
1043
+ ]
1044
+ }
1045
+
1046
+ # Execute function with timeout
1047
+ if timeout:
1048
+ result = await asyncio.wait_for(
1049
+ func(*args, **clean_kwargs), timeout=timeout
1050
+ )
1051
+ else:
1052
+ result = await func(*args, **clean_kwargs)
476
1053
 
477
1054
  # Track success metrics
478
1055
  if self.metrics.enabled:
479
1056
  latency = time.time() - start_time
480
1057
  self.metrics.track_tool_call(tool_name, latency, True)
481
1058
 
482
- return self._format_response(result, response_format)
1059
+ # Update circuit breaker on success
1060
+ if enable_circuit_breaker and self.circuit_breaker:
1061
+ self.circuit_breaker.on_success()
1062
+
1063
+ # Update registry stats
1064
+ self._tool_registry[tool_name]["call_count"] += 1
1065
+ self._tool_registry[tool_name]["last_called"] = time.time()
1066
+
1067
+ return self._format_response(result, response_format, stream_response)
483
1068
 
484
1069
  except Exception as e:
1070
+ # Convert to MCP error if needed
1071
+ if not isinstance(e, MCPError):
1072
+ mcp_error = ToolError(
1073
+ f"Tool execution failed: {str(e)}",
1074
+ tool_name=tool_name,
1075
+ retryable=retryable,
1076
+ cause=e,
1077
+ )
1078
+ else:
1079
+ mcp_error = e
1080
+
1081
+ # Record error
1082
+ if self.error_aggregator:
1083
+ self.error_aggregator.record_error(mcp_error)
1084
+
1085
+ # Update circuit breaker on failure
1086
+ if enable_circuit_breaker and self.circuit_breaker:
1087
+ self.circuit_breaker.on_failure(mcp_error)
1088
+
485
1089
  # Track error metrics
486
1090
  if self.metrics.enabled and start_time:
487
1091
  latency = time.time() - start_time
@@ -489,8 +1093,17 @@ class EnhancedMCPServer:
489
1093
  tool_name, latency, False, type(e).__name__
490
1094
  )
491
1095
 
492
- logger.error(f"Error in tool {tool_name}: {e}")
493
- raise
1096
+ # Update registry stats
1097
+ self._tool_registry[tool_name]["error_count"] += 1
1098
+ self._tool_registry[tool_name]["last_called"] = time.time()
1099
+
1100
+ logger.error(f"Error in tool {tool_name}: {mcp_error}")
1101
+ raise mcp_error
1102
+
1103
+ finally:
1104
+ # Clean up session
1105
+ if session_id in self._active_sessions:
1106
+ del self._active_sessions[session_id]
494
1107
 
495
1108
  # Return appropriate wrapper based on function type
496
1109
  if asyncio.iscoroutinefunction(func):
@@ -498,17 +1111,86 @@ class EnhancedMCPServer:
498
1111
  else:
499
1112
  return sync_wrapper
500
1113
 
501
- def _format_response(self, result: Any, response_format: Optional[str]) -> Any:
502
- """Format response if formatting is enabled."""
1114
+ def _format_response(
1115
+ self, result: Any, response_format: Optional[str], stream_response: bool = False
1116
+ ) -> Any:
1117
+ """Format response if formatting is enabled, with optional streaming support."""
503
1118
  if not self.config.get("formatting.enabled", True) or not response_format:
1119
+ if (
1120
+ stream_response
1121
+ and isinstance(result, (list, dict))
1122
+ and len(str(result)) > 1000
1123
+ ):
1124
+ # For large results, consider streaming (simplified implementation)
1125
+ return {
1126
+ "streaming": True,
1127
+ "data": result,
1128
+ "chunks": self._chunk_large_response(result),
1129
+ }
504
1130
  return result
505
1131
 
506
1132
  try:
507
- return format_response(result, response_format)
1133
+ formatted = format_response(result, response_format)
1134
+ if stream_response and isinstance(formatted, str) and len(formatted) > 1000:
1135
+ return {
1136
+ "streaming": True,
1137
+ "data": formatted,
1138
+ "chunks": self._chunk_large_response(formatted),
1139
+ }
1140
+ return formatted
508
1141
  except Exception as e:
509
1142
  logger.warning(f"Failed to format response: {e}")
510
1143
  return result
511
1144
 
1145
+ def _chunk_large_response(self, data: Any, chunk_size: int = 1000) -> List[str]:
1146
+ """Chunk large responses for streaming."""
1147
+ if isinstance(data, str):
1148
+ return [data[i : i + chunk_size] for i in range(0, len(data), chunk_size)]
1149
+ elif isinstance(data, (list, dict)):
1150
+ data_str = str(data)
1151
+ return [
1152
+ data_str[i : i + chunk_size]
1153
+ for i in range(0, len(data_str), chunk_size)
1154
+ ]
1155
+ else:
1156
+ return [str(data)]
1157
+
1158
+ def _extract_credentials_from_context(
1159
+ self, kwargs: Dict[str, Any]
1160
+ ) -> Dict[str, Any]:
1161
+ """Extract credentials from function context or kwargs."""
1162
+ # Look for common credential patterns in kwargs
1163
+ credentials = {}
1164
+
1165
+ # Check for MCP-style authentication headers
1166
+ if "mcp_auth" in kwargs:
1167
+ credentials.update(kwargs["mcp_auth"])
1168
+
1169
+ # Check for common auth patterns
1170
+ auth_fields = ["api_key", "token", "username", "password", "jwt"]
1171
+ for field in auth_fields:
1172
+ if field in kwargs:
1173
+ credentials[field] = kwargs[field]
1174
+
1175
+ # Check for Authorization header pattern
1176
+ if "authorization" in kwargs:
1177
+ auth_header = kwargs["authorization"]
1178
+ if auth_header.startswith("Bearer "):
1179
+ credentials["token"] = auth_header[7:]
1180
+ elif auth_header.startswith("Basic "):
1181
+ import base64
1182
+
1183
+ try:
1184
+ decoded = base64.b64decode(auth_header[6:]).decode()
1185
+ if ":" in decoded:
1186
+ username, password = decoded.split(":", 1)
1187
+ credentials["username"] = username
1188
+ credentials["password"] = password
1189
+ except Exception:
1190
+ pass
1191
+
1192
+ return credentials
1193
+
512
1194
  def resource(self, uri: str):
513
1195
  """
514
1196
  Add resource with metrics tracking.
@@ -579,8 +1261,24 @@ class EnhancedMCPServer:
579
1261
  "name": self.name,
580
1262
  "running": self._running,
581
1263
  "config": self.config.to_dict(),
1264
+ "active_sessions": len(self._active_sessions),
1265
+ "transport": {
1266
+ "http_enabled": self.enable_http_transport,
1267
+ "sse_enabled": self.enable_sse_transport,
1268
+ "streaming_enabled": self.enable_streaming,
1269
+ "timeout": self.transport_timeout,
1270
+ "max_request_size": self.max_request_size,
1271
+ },
1272
+ "features": {
1273
+ "auth_enabled": self.auth_manager is not None,
1274
+ "circuit_breaker_enabled": self.circuit_breaker is not None,
1275
+ "error_aggregation_enabled": self.error_aggregator is not None,
1276
+ "discovery_enabled": self.enable_discovery,
1277
+ },
582
1278
  },
583
1279
  "tools": self.get_tool_stats(),
1280
+ "resources": self.get_resource_stats(),
1281
+ "prompts": self.get_prompt_stats(),
584
1282
  }
585
1283
 
586
1284
  if self.metrics.enabled:
@@ -589,8 +1287,127 @@ class EnhancedMCPServer:
589
1287
  if self.cache.enabled:
590
1288
  stats["cache"] = self.cache.stats()
591
1289
 
1290
+ if self.error_aggregator:
1291
+ stats["errors"] = self.error_aggregator.get_error_stats(
1292
+ time_window=3600
1293
+ ) # Last hour
1294
+
1295
+ if self.circuit_breaker:
1296
+ stats["circuit_breaker"] = {
1297
+ "state": self.circuit_breaker.state,
1298
+ "failure_count": self.circuit_breaker.failure_count,
1299
+ "success_count": self.circuit_breaker.success_count,
1300
+ }
1301
+
592
1302
  return stats
593
1303
 
1304
+ def get_resource_stats(self) -> Dict[str, Any]:
1305
+ """Get resource statistics."""
1306
+ return {
1307
+ "registered_resources": len(self._resource_registry),
1308
+ "resources": {
1309
+ uri: {
1310
+ "call_count": info.get("call_count", 0),
1311
+ "error_count": info.get("error_count", 0),
1312
+ "last_accessed": info.get("last_accessed"),
1313
+ }
1314
+ for uri, info in self._resource_registry.items()
1315
+ },
1316
+ }
1317
+
1318
+ def get_prompt_stats(self) -> Dict[str, Any]:
1319
+ """Get prompt statistics."""
1320
+ return {
1321
+ "registered_prompts": len(self._prompt_registry),
1322
+ "prompts": {
1323
+ name: {
1324
+ "call_count": info.get("call_count", 0),
1325
+ "error_count": info.get("error_count", 0),
1326
+ "last_used": info.get("last_used"),
1327
+ }
1328
+ for name, info in self._prompt_registry.items()
1329
+ },
1330
+ }
1331
+
1332
+ def get_active_sessions(self) -> Dict[str, Dict[str, Any]]:
1333
+ """Get information about active sessions."""
1334
+ return {
1335
+ session_id: {
1336
+ "user": session_info.get("user", {}),
1337
+ "tool": session_info.get("tool"),
1338
+ "permission": session_info.get("permission"),
1339
+ "duration": time.time() - session_info.get("start_time", time.time()),
1340
+ }
1341
+ for session_id, session_info in self._active_sessions.items()
1342
+ }
1343
+
1344
+ def get_error_trends(
1345
+ self, time_window: Optional[float] = None
1346
+ ) -> List[Dict[str, Any]]:
1347
+ """Get error trends over time."""
1348
+ if not self.error_aggregator:
1349
+ return []
1350
+ return self.error_aggregator.get_error_trends()
1351
+
1352
+ def health_check(self) -> Dict[str, Any]:
1353
+ """Perform comprehensive health check."""
1354
+ health_status = {
1355
+ "status": "healthy",
1356
+ "timestamp": time.time(),
1357
+ "server": {
1358
+ "name": self.name,
1359
+ "running": self._running,
1360
+ "uptime": time.time()
1361
+ - self.config.get("server.start_time", time.time()),
1362
+ },
1363
+ "components": {
1364
+ "mcp": self._mcp is not None,
1365
+ "cache": self.cache.enabled if self.cache else False,
1366
+ "metrics": self.metrics.enabled if self.metrics else False,
1367
+ "auth": self.auth_manager is not None,
1368
+ "circuit_breaker": self.circuit_breaker is not None,
1369
+ },
1370
+ "resources": {
1371
+ "active_sessions": len(self._active_sessions),
1372
+ "tools_registered": len(self._tool_registry),
1373
+ "resources_registered": len(self._resource_registry),
1374
+ "prompts_registered": len(self._prompt_registry),
1375
+ },
1376
+ }
1377
+
1378
+ # Check for issues
1379
+ issues = []
1380
+
1381
+ # Check error rates
1382
+ if self.error_aggregator:
1383
+ error_stats = self.error_aggregator.get_error_stats(
1384
+ time_window=300
1385
+ ) # Last 5 minutes
1386
+ if error_stats.get("error_rate", 0) > 10: # More than 10 errors per second
1387
+ issues.append("High error rate detected")
1388
+ health_status["status"] = "degraded"
1389
+
1390
+ # Check circuit breaker state
1391
+ if self.circuit_breaker and self.circuit_breaker.state == "open":
1392
+ issues.append("Circuit breaker is open")
1393
+ health_status["status"] = "degraded"
1394
+
1395
+ # Check memory usage for caches
1396
+ if self.cache and self.cache.enabled:
1397
+ cache_stats = self.cache.stats()
1398
+ # Simple heuristic - if any cache is over 90% full
1399
+ for cache_name, stats in cache_stats.items():
1400
+ if isinstance(stats, dict) and stats.get("utilization", 0) > 0.9:
1401
+ issues.append(f"Cache {cache_name} is over 90% full")
1402
+ health_status["status"] = "degraded"
1403
+
1404
+ health_status["issues"] = issues
1405
+
1406
+ if issues and health_status["status"] == "healthy":
1407
+ health_status["status"] = "degraded"
1408
+
1409
+ return health_status
1410
+
594
1411
  def clear_cache(self, cache_name: Optional[str] = None) -> None:
595
1412
  """Clear cache(s)."""
596
1413
  if cache_name:
@@ -601,32 +1418,347 @@ class EnhancedMCPServer:
601
1418
  self.cache.clear_all()
602
1419
  logger.info("Cleared all caches")
603
1420
 
1421
+ def reset_circuit_breaker(self) -> None:
1422
+ """Reset circuit breaker to closed state."""
1423
+ if self.circuit_breaker:
1424
+ self.circuit_breaker.state = "closed"
1425
+ self.circuit_breaker.failure_count = 0
1426
+ self.circuit_breaker.success_count = 0
1427
+ logger.info("Circuit breaker reset to closed state")
1428
+
1429
+ def terminate_session(self, session_id: str) -> bool:
1430
+ """Terminate an active session."""
1431
+ if session_id in self._active_sessions:
1432
+ del self._active_sessions[session_id]
1433
+ logger.info(f"Terminated session: {session_id}")
1434
+ return True
1435
+ return False
1436
+
1437
+ def get_tool_by_name(self, tool_name: str) -> Optional[Dict[str, Any]]:
1438
+ """Get tool information by name."""
1439
+ return self._tool_registry.get(tool_name)
1440
+
1441
+ def disable_tool(self, tool_name: str) -> bool:
1442
+ """Temporarily disable a tool."""
1443
+ if tool_name in self._tool_registry:
1444
+ self._tool_registry[tool_name]["disabled"] = True
1445
+ logger.info(f"Disabled tool: {tool_name}")
1446
+ return True
1447
+ return False
1448
+
1449
+ def enable_tool(self, tool_name: str) -> bool:
1450
+ """Re-enable a disabled tool."""
1451
+ if tool_name in self._tool_registry:
1452
+ self._tool_registry[tool_name]["disabled"] = False
1453
+ logger.info(f"Enabled tool: {tool_name}")
1454
+ return True
1455
+ return False
1456
+
604
1457
  def run(self):
605
- """Run the MCP server."""
1458
+ """Run the enhanced MCP server with all features."""
606
1459
  if self._mcp is None:
607
1460
  self._init_mcp()
608
1461
 
1462
+ # Record server start time
1463
+ self.config.update({"server.start_time": time.time()})
1464
+
1465
+ # Log enhanced server startup
609
1466
  logger.info(f"Starting enhanced MCP server: {self.name}")
610
- logger.info(f"Cache enabled: {self.cache.enabled}")
611
- logger.info(f"Metrics enabled: {self.metrics.enabled}")
1467
+ logger.info("Features enabled:")
1468
+ logger.info(f" - Cache: {self.cache.enabled if self.cache else False}")
1469
+ logger.info(f" - Metrics: {self.metrics.enabled if self.metrics else False}")
1470
+ logger.info(f" - Authentication: {self.auth_manager is not None}")
1471
+ logger.info(f" - HTTP Transport: {self.enable_http_transport}")
1472
+ logger.info(f" - SSE Transport: {self.enable_sse_transport}")
1473
+ logger.info(f" - Streaming: {self.enable_streaming}")
1474
+ logger.info(f" - Circuit Breaker: {self.circuit_breaker is not None}")
1475
+ logger.info(f" - Error Aggregation: {self.error_aggregator is not None}")
1476
+ logger.info(f" - Service Discovery: {self.enable_discovery}")
1477
+
1478
+ logger.info("Server configuration:")
1479
+ logger.info(f" - Tools registered: {len(self._tool_registry)}")
1480
+ logger.info(f" - Resources registered: {len(self._resource_registry)}")
1481
+ logger.info(f" - Prompts registered: {len(self._prompt_registry)}")
1482
+ logger.info(f" - Transport timeout: {self.transport_timeout}s")
1483
+ logger.info(f" - Max request size: {self.max_request_size} bytes")
612
1484
 
613
1485
  self._running = True
614
1486
 
615
1487
  try:
1488
+ # Perform health check before starting
1489
+ health = self.health_check()
1490
+ if health["status"] != "healthy":
1491
+ logger.warning(f"Server health check shows issues: {health['issues']}")
1492
+
1493
+ # Run the FastMCP server
1494
+ logger.info("Starting FastMCP server...")
616
1495
  self._mcp.run()
1496
+
617
1497
  except KeyboardInterrupt:
618
1498
  logger.info("Server stopped by user")
619
1499
  except Exception as e:
620
1500
  logger.error(f"Server error: {e}")
1501
+
1502
+ # Record error if aggregator is enabled
1503
+ if self.error_aggregator:
1504
+ error = MCPError(
1505
+ f"Server startup/runtime error: {str(e)}",
1506
+ error_code=MCPErrorCode.SERVER_UNAVAILABLE,
1507
+ cause=e,
1508
+ )
1509
+ self.error_aggregator.record_error(error)
1510
+
621
1511
  raise
622
1512
  finally:
1513
+ logger.info("Shutting down enhanced MCP server...")
1514
+
1515
+ # Clean up active sessions
1516
+ if self._active_sessions:
1517
+ logger.info(f"Terminating {len(self._active_sessions)} active sessions")
1518
+ self._active_sessions.clear()
1519
+
1520
+ # Log final stats
1521
+ if self.metrics and self.metrics.enabled:
1522
+ final_stats = self.get_server_stats()
1523
+ logger.info(
1524
+ f"Final server statistics: {final_stats.get('metrics', {})}"
1525
+ )
1526
+
623
1527
  self._running = False
1528
+ logger.info(f"Enhanced MCP server '{self.name}' stopped")
1529
+
1530
+ async def run_stdio(self):
1531
+ """Run the server using stdio transport for testing."""
1532
+ if self._mcp is None:
1533
+ self._init_mcp()
1534
+
1535
+ # For testing, we'll implement a simple stdio server
1536
+ import json
1537
+ import sys
1538
+
1539
+ logger.info(f"Starting MCP server '{self.name}' in stdio mode")
1540
+ self._running = True
1541
+
1542
+ try:
1543
+ while self._running:
1544
+ # Read JSON-RPC request from stdin
1545
+ line = sys.stdin.readline()
1546
+ if not line:
1547
+ break
1548
+
1549
+ try:
1550
+ request = json.loads(line.strip())
1551
+
1552
+ # Handle different request types
1553
+ if request.get("method") == "tools/list":
1554
+ # Return list of tools
1555
+ tools = []
1556
+ for name, info in self._tool_registry.items():
1557
+ if not info.get("disabled", False):
1558
+ tools.append(
1559
+ {
1560
+ "name": name,
1561
+ "description": info.get("description", ""),
1562
+ "inputSchema": info.get("input_schema", {}),
1563
+ }
1564
+ )
1565
+
1566
+ response = {"id": request.get("id"), "result": {"tools": tools}}
1567
+
1568
+ elif request.get("method") == "tools/call":
1569
+ # Call a tool
1570
+ params = request.get("params", {})
1571
+ tool_name = params.get("name")
1572
+ arguments = params.get("arguments", {})
1573
+
1574
+ if tool_name in self._tool_registry:
1575
+ handler = self._tool_registry[tool_name]["handler"]
1576
+ try:
1577
+ # Execute tool
1578
+ if asyncio.iscoroutinefunction(handler):
1579
+ result = await handler(**arguments)
1580
+ else:
1581
+ result = handler(**arguments)
1582
+
1583
+ response = {
1584
+ "id": request.get("id"),
1585
+ "result": {
1586
+ "content": [
1587
+ {"type": "text", "text": str(result)}
1588
+ ]
1589
+ },
1590
+ }
1591
+ except Exception as e:
1592
+ response = {
1593
+ "id": request.get("id"),
1594
+ "error": {"code": -32603, "message": str(e)},
1595
+ }
1596
+ else:
1597
+ response = {
1598
+ "id": request.get("id"),
1599
+ "error": {
1600
+ "code": -32601,
1601
+ "message": f"Tool not found: {tool_name}",
1602
+ },
1603
+ }
1604
+
1605
+ else:
1606
+ # Unknown method
1607
+ response = {
1608
+ "id": request.get("id"),
1609
+ "error": {
1610
+ "code": -32601,
1611
+ "message": f"Method not found: {request.get('method')}",
1612
+ },
1613
+ }
1614
+
1615
+ # Write response to stdout
1616
+ sys.stdout.write(json.dumps(response) + "\n")
1617
+ sys.stdout.flush()
1618
+
1619
+ except json.JSONDecodeError:
1620
+ # Invalid JSON
1621
+ error_response = {
1622
+ "id": None,
1623
+ "error": {"code": -32700, "message": "Parse error"},
1624
+ }
1625
+ sys.stdout.write(json.dumps(error_response) + "\n")
1626
+ sys.stdout.flush()
1627
+
1628
+ except KeyboardInterrupt:
1629
+ logger.info("Server stopped by user")
1630
+ except Exception as e:
1631
+ logger.error(f"Server error: {e}")
1632
+ raise
1633
+ finally:
1634
+ self._running = False
1635
+
1636
+
1637
+ class SimpleMCPServer(MCPServerBase):
1638
+ """Simple MCP Server for prototyping and development.
1639
+
1640
+ This is a lightweight version of MCPServer without authentication,
1641
+ metrics, caching, or other production features. Perfect for:
1642
+ - Quick prototyping
1643
+ - Development and testing
1644
+ - Simple use cases without advanced features
1645
+
1646
+ Example:
1647
+ >>> server = SimpleMCPServer("my-prototype")
1648
+ >>> @server.tool()
1649
+ ... def hello(name: str) -> str:
1650
+ ... return f"Hello, {name}!"
1651
+ >>> server.run()
1652
+ """
1653
+
1654
+ def __init__(self, name: str, description: str = None):
1655
+ """Initialize simple MCP server.
1656
+
1657
+ Args:
1658
+ name: Server name
1659
+ description: Server description
1660
+ """
1661
+ super().__init__(name, description)
1662
+
1663
+ # Disable all advanced features for simplicity
1664
+ self.enable_cache = False
1665
+ self.enable_metrics = False
1666
+ self.enable_http_transport = False
1667
+ self.rate_limit_config = None
1668
+ self.circuit_breaker_config = None
1669
+ self.auth_provider = None
1670
+
1671
+ # Simple in-memory storage
1672
+ self._simple_tools = {}
1673
+ self._simple_resources = {}
1674
+ self._simple_prompts = {}
1675
+
1676
+ logger.info(f"SimpleMCPServer '{name}' initialized for prototyping")
1677
+
1678
+ def setup(self):
1679
+ """Setup method - no additional setup needed for SimpleMCPServer."""
1680
+ pass
1681
+
1682
+ def tool(self, description: str = None):
1683
+ """Register a simple tool (no auth, caching, or metrics).
1684
+
1685
+ Args:
1686
+ description: Tool description
1687
+
1688
+ Returns:
1689
+ Decorator function
1690
+ """
1691
+
1692
+ def decorator(func):
1693
+ # Initialize MCP if needed
1694
+ if self._mcp is None:
1695
+ self._init_mcp()
1696
+
1697
+ tool_name = func.__name__
1698
+ self._simple_tools[tool_name] = {
1699
+ "function": func,
1700
+ "description": description or f"Tool: {tool_name}",
1701
+ "created_at": time.time(),
1702
+ }
1703
+
1704
+ # Register with FastMCP
1705
+ self._mcp.tool(description or f"Tool: {tool_name}")(func)
1706
+
1707
+ logger.debug(f"SimpleMCPServer: Registered tool '{tool_name}'")
1708
+ return func
1709
+
1710
+ return decorator
1711
+
1712
+ def resource(self, uri: str, description: str = None):
1713
+ """Register a simple resource.
1714
+
1715
+ Args:
1716
+ uri: Resource URI
1717
+ description: Resource description
1718
+
1719
+ Returns:
1720
+ Decorator function
1721
+ """
1722
+
1723
+ def decorator(func):
1724
+ # Initialize MCP if needed
1725
+ if self._mcp is None:
1726
+ self._init_mcp()
1727
+
1728
+ self._simple_resources[uri] = {
1729
+ "function": func,
1730
+ "description": description or f"Resource: {uri}",
1731
+ "created_at": time.time(),
1732
+ }
1733
+
1734
+ # Register with FastMCP
1735
+ self._mcp.resource(uri, description or f"Resource: {uri}")(func)
1736
+
1737
+ logger.debug(f"SimpleMCPServer: Registered resource '{uri}'")
1738
+ return func
1739
+
1740
+ return decorator
1741
+
1742
+ def get_stats(self) -> dict:
1743
+ """Get simple server statistics.
1744
+
1745
+ Returns:
1746
+ Dictionary with basic stats
1747
+ """
1748
+ return {
1749
+ "server_name": self.name,
1750
+ "server_type": "SimpleMCPServer",
1751
+ "tools_count": len(self._simple_tools),
1752
+ "resources_count": len(self._simple_resources),
1753
+ "prompts_count": len(self._simple_prompts),
1754
+ "features": {
1755
+ "authentication": False,
1756
+ "caching": False,
1757
+ "metrics": False,
1758
+ "rate_limiting": False,
1759
+ "circuit_breaker": False,
1760
+ },
1761
+ }
624
1762
 
625
1763
 
626
- # Clean public API design:
627
- # - MCPServerBase: Abstract base for custom implementations (e.g., AIRegistryServer)
628
- # - MCPServer: Main concrete server with all production features
629
- # - SimpleMCPServer: Alias for backward compatibility
630
- # - EnhancedMCPServer: Alias for backward compatibility
631
- MCPServer = EnhancedMCPServer
632
- SimpleMCPServer = EnhancedMCPServer
1764
+ # Note: EnhancedMCPServer alias removed - use MCPServer directly