mcp-mesh 0.4.1__py3-none-any.whl → 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- _mcp_mesh/__init__.py +14 -3
- _mcp_mesh/engine/async_mcp_client.py +6 -19
- _mcp_mesh/engine/dependency_injector.py +161 -74
- _mcp_mesh/engine/full_mcp_proxy.py +25 -20
- _mcp_mesh/engine/mcp_client_proxy.py +5 -19
- _mcp_mesh/generated/.openapi-generator/FILES +2 -0
- _mcp_mesh/generated/mcp_mesh_registry_client/__init__.py +2 -0
- _mcp_mesh/generated/mcp_mesh_registry_client/api/__init__.py +1 -0
- _mcp_mesh/generated/mcp_mesh_registry_client/api/tracing_api.py +305 -0
- _mcp_mesh/generated/mcp_mesh_registry_client/models/__init__.py +1 -0
- _mcp_mesh/generated/mcp_mesh_registry_client/models/agent_info.py +10 -1
- _mcp_mesh/generated/mcp_mesh_registry_client/models/mesh_agent_registration.py +4 -4
- _mcp_mesh/generated/mcp_mesh_registry_client/models/trace_event.py +108 -0
- _mcp_mesh/pipeline/__init__.py +2 -2
- _mcp_mesh/pipeline/api_heartbeat/__init__.py +16 -0
- _mcp_mesh/pipeline/api_heartbeat/api_dependency_resolution.py +515 -0
- _mcp_mesh/pipeline/api_heartbeat/api_fast_heartbeat_check.py +117 -0
- _mcp_mesh/pipeline/api_heartbeat/api_health_check.py +140 -0
- _mcp_mesh/pipeline/api_heartbeat/api_heartbeat_orchestrator.py +247 -0
- _mcp_mesh/pipeline/api_heartbeat/api_heartbeat_pipeline.py +309 -0
- _mcp_mesh/pipeline/api_heartbeat/api_heartbeat_send.py +332 -0
- _mcp_mesh/pipeline/api_heartbeat/api_lifespan_integration.py +147 -0
- _mcp_mesh/pipeline/api_heartbeat/api_registry_connection.py +97 -0
- _mcp_mesh/pipeline/api_startup/__init__.py +20 -0
- _mcp_mesh/pipeline/api_startup/api_pipeline.py +61 -0
- _mcp_mesh/pipeline/api_startup/api_server_setup.py +292 -0
- _mcp_mesh/pipeline/api_startup/fastapi_discovery.py +302 -0
- _mcp_mesh/pipeline/api_startup/route_collection.py +56 -0
- _mcp_mesh/pipeline/api_startup/route_integration.py +318 -0
- _mcp_mesh/pipeline/{startup → mcp_startup}/fastmcpserver_discovery.py +4 -4
- _mcp_mesh/pipeline/{startup → mcp_startup}/heartbeat_loop.py +1 -1
- _mcp_mesh/pipeline/{startup → mcp_startup}/startup_orchestrator.py +170 -5
- _mcp_mesh/shared/config_resolver.py +0 -3
- _mcp_mesh/shared/logging_config.py +2 -1
- _mcp_mesh/shared/sse_parser.py +217 -0
- {mcp_mesh-0.4.1.dist-info → mcp_mesh-0.5.0.dist-info}/METADATA +1 -1
- {mcp_mesh-0.4.1.dist-info → mcp_mesh-0.5.0.dist-info}/RECORD +55 -37
- mesh/__init__.py +6 -2
- mesh/decorators.py +143 -1
- /_mcp_mesh/pipeline/{heartbeat → mcp_heartbeat}/__init__.py +0 -0
- /_mcp_mesh/pipeline/{heartbeat → mcp_heartbeat}/dependency_resolution.py +0 -0
- /_mcp_mesh/pipeline/{heartbeat → mcp_heartbeat}/fast_heartbeat_check.py +0 -0
- /_mcp_mesh/pipeline/{heartbeat → mcp_heartbeat}/heartbeat_orchestrator.py +0 -0
- /_mcp_mesh/pipeline/{heartbeat → mcp_heartbeat}/heartbeat_pipeline.py +0 -0
- /_mcp_mesh/pipeline/{heartbeat → mcp_heartbeat}/heartbeat_send.py +0 -0
- /_mcp_mesh/pipeline/{heartbeat → mcp_heartbeat}/lifespan_integration.py +0 -0
- /_mcp_mesh/pipeline/{heartbeat → mcp_heartbeat}/registry_connection.py +0 -0
- /_mcp_mesh/pipeline/{startup → mcp_startup}/__init__.py +0 -0
- /_mcp_mesh/pipeline/{startup → mcp_startup}/configuration.py +0 -0
- /_mcp_mesh/pipeline/{startup → mcp_startup}/decorator_collection.py +0 -0
- /_mcp_mesh/pipeline/{startup → mcp_startup}/fastapiserver_setup.py +0 -0
- /_mcp_mesh/pipeline/{startup → mcp_startup}/heartbeat_preparation.py +0 -0
- /_mcp_mesh/pipeline/{startup → mcp_startup}/startup_pipeline.py +0 -0
- {mcp_mesh-0.4.1.dist-info → mcp_mesh-0.5.0.dist-info}/WHEEL +0 -0
- {mcp_mesh-0.4.1.dist-info → mcp_mesh-0.5.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -41,7 +41,7 @@ class FastMCPServerDiscoveryStep(PipelineStep):
|
|
|
41
41
|
server_info = []
|
|
42
42
|
total_registered_functions = 0
|
|
43
43
|
|
|
44
|
-
for server_name, server_instance in discovered_servers.items():
|
|
44
|
+
for server_name, server_instance in list(discovered_servers.items()):
|
|
45
45
|
info = self._extract_server_info(server_name, server_instance)
|
|
46
46
|
server_info.append(info)
|
|
47
47
|
total_registered_functions += info.get("function_count", 0)
|
|
@@ -119,7 +119,7 @@ class FastMCPServerDiscoveryStep(PipelineStep):
|
|
|
119
119
|
"pkgutil",
|
|
120
120
|
}
|
|
121
121
|
|
|
122
|
-
for module_name, module in sys.modules.items():
|
|
122
|
+
for module_name, module in list(sys.modules.items()):
|
|
123
123
|
if (
|
|
124
124
|
module
|
|
125
125
|
and not module_name.startswith("_")
|
|
@@ -166,7 +166,7 @@ class FastMCPServerDiscoveryStep(PipelineStep):
|
|
|
166
166
|
module_globals = vars(module)
|
|
167
167
|
# Only log if we find FastMCP instances to reduce noise
|
|
168
168
|
|
|
169
|
-
for var_name, var_value in module_globals.items():
|
|
169
|
+
for var_name, var_value in list(module_globals.items()):
|
|
170
170
|
if self._is_fastmcp_instance(var_value):
|
|
171
171
|
instance_key = f"{module_name}.{var_name}"
|
|
172
172
|
found[instance_key] = var_value
|
|
@@ -230,7 +230,7 @@ class FastMCPServerDiscoveryStep(PipelineStep):
|
|
|
230
230
|
info["function_count"] += len(tools)
|
|
231
231
|
|
|
232
232
|
self.logger.debug(f"Server '{server_name}' has {len(tools)} tools:")
|
|
233
|
-
for tool_name, tool in tools.items():
|
|
233
|
+
for tool_name, tool in list(tools.items()):
|
|
234
234
|
function_ptr = getattr(tool, "fn", None)
|
|
235
235
|
self.logger.debug(f" - {tool_name}: {function_ptr}")
|
|
236
236
|
|
|
@@ -49,7 +49,7 @@ class HeartbeatLoopStep(PipelineStep):
|
|
|
49
49
|
standalone_mode = self._get_standalone_mode()
|
|
50
50
|
|
|
51
51
|
# Import heartbeat task function
|
|
52
|
-
from ..
|
|
52
|
+
from ..mcp_heartbeat import heartbeat_lifespan_task
|
|
53
53
|
|
|
54
54
|
# Create heartbeat config - registry connection will be attempted in heartbeat pipeline
|
|
55
55
|
heartbeat_config = {
|
|
@@ -72,6 +72,36 @@ class DebounceCoordinator:
|
|
|
72
72
|
f"⏰ Scheduled processing in {self.delay_seconds} seconds"
|
|
73
73
|
)
|
|
74
74
|
|
|
75
|
+
def _determine_pipeline_type(self) -> str:
|
|
76
|
+
"""
|
|
77
|
+
Determine which pipeline to execute based on registered decorators.
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
"mcp": Only MCP agents/tools found
|
|
81
|
+
"api": Only API routes found
|
|
82
|
+
"mixed": Both MCP and API decorators found (throws exception)
|
|
83
|
+
"none": No decorators found
|
|
84
|
+
"""
|
|
85
|
+
from ...engine.decorator_registry import DecoratorRegistry
|
|
86
|
+
|
|
87
|
+
agents = DecoratorRegistry.get_mesh_agents()
|
|
88
|
+
tools = DecoratorRegistry.get_mesh_tools()
|
|
89
|
+
routes = DecoratorRegistry.get_all_by_type("mesh_route")
|
|
90
|
+
|
|
91
|
+
has_mcp = len(agents) > 0 or len(tools) > 0
|
|
92
|
+
has_api = len(routes) > 0
|
|
93
|
+
|
|
94
|
+
self.logger.debug(f"🔍 Pipeline type detection: MCP={has_mcp} ({len(agents)} agents, {len(tools)} tools), API={has_api} ({len(routes)} routes)")
|
|
95
|
+
|
|
96
|
+
if has_api and has_mcp:
|
|
97
|
+
return "mixed"
|
|
98
|
+
elif has_api:
|
|
99
|
+
return "api"
|
|
100
|
+
elif has_mcp:
|
|
101
|
+
return "mcp"
|
|
102
|
+
else:
|
|
103
|
+
return "none"
|
|
104
|
+
|
|
75
105
|
def _execute_processing(self) -> None:
|
|
76
106
|
"""Execute the processing (called by timer)."""
|
|
77
107
|
try:
|
|
@@ -83,6 +113,21 @@ class DebounceCoordinator:
|
|
|
83
113
|
f"🚀 Debounce delay ({self.delay_seconds}s) complete, processing all decorators"
|
|
84
114
|
)
|
|
85
115
|
|
|
116
|
+
# Determine which pipeline to execute
|
|
117
|
+
pipeline_type = self._determine_pipeline_type()
|
|
118
|
+
|
|
119
|
+
if pipeline_type == "mixed":
|
|
120
|
+
error_msg = (
|
|
121
|
+
"❌ Mixed mode not supported: Cannot use @mesh.route decorators "
|
|
122
|
+
"together with @mesh.tool/@mesh.agent decorators in the same process. "
|
|
123
|
+
"Please use either MCP agent decorators OR API route decorators, not both."
|
|
124
|
+
)
|
|
125
|
+
self.logger.error(error_msg)
|
|
126
|
+
raise RuntimeError(error_msg)
|
|
127
|
+
elif pipeline_type == "none":
|
|
128
|
+
self.logger.warning("⚠️ No decorators found - nothing to process")
|
|
129
|
+
return
|
|
130
|
+
|
|
86
131
|
# Execute the pipeline using asyncio.run
|
|
87
132
|
import asyncio
|
|
88
133
|
|
|
@@ -90,18 +135,36 @@ class DebounceCoordinator:
|
|
|
90
135
|
auto_run_enabled = self._check_auto_run_enabled()
|
|
91
136
|
|
|
92
137
|
self.logger.debug(f"🔍 Auto-run enabled: {auto_run_enabled}")
|
|
138
|
+
self.logger.info(f"🎯 Pipeline type: {pipeline_type}")
|
|
93
139
|
|
|
94
140
|
if auto_run_enabled:
|
|
95
141
|
self.logger.info("🔄 Auto-run enabled - using FastAPI natural blocking")
|
|
96
|
-
|
|
97
|
-
|
|
142
|
+
|
|
143
|
+
# Execute appropriate pipeline based on type
|
|
144
|
+
if pipeline_type == "mcp":
|
|
145
|
+
# Phase 1: Run async MCP pipeline setup
|
|
146
|
+
result = asyncio.run(self._orchestrator.process_once())
|
|
147
|
+
elif pipeline_type == "api":
|
|
148
|
+
# Phase 1: Run async API pipeline setup
|
|
149
|
+
result = asyncio.run(self._orchestrator.process_api_once())
|
|
150
|
+
else:
|
|
151
|
+
raise RuntimeError(f"Unsupported pipeline type: {pipeline_type}")
|
|
98
152
|
|
|
99
153
|
# Phase 2: Extract FastAPI app and start synchronous server
|
|
100
154
|
pipeline_context = result.get("context", {}).get("pipeline_context", {})
|
|
101
155
|
fastapi_app = pipeline_context.get("fastapi_app")
|
|
102
156
|
binding_config = pipeline_context.get("fastapi_binding_config", {})
|
|
103
|
-
|
|
104
|
-
|
|
157
|
+
heartbeat_config = pipeline_context.get("heartbeat_config", {})
|
|
158
|
+
|
|
159
|
+
if pipeline_type == "api":
|
|
160
|
+
# For API services, ONLY do dependency injection - user controls their FastAPI server
|
|
161
|
+
# Dependency injection is already complete from pipeline execution
|
|
162
|
+
# Optionally start heartbeat in background (non-blocking)
|
|
163
|
+
self._setup_api_heartbeat_background(heartbeat_config, pipeline_context)
|
|
164
|
+
self.logger.info("✅ API dependency injection complete - user's FastAPI server can now start")
|
|
165
|
+
return # Don't block - let user's uvicorn run
|
|
166
|
+
elif fastapi_app and binding_config:
|
|
167
|
+
# For MCP agents with FastAPI server
|
|
105
168
|
self._start_blocking_fastapi_server(fastapi_app, binding_config)
|
|
106
169
|
else:
|
|
107
170
|
self.logger.warning(
|
|
@@ -110,11 +173,20 @@ class DebounceCoordinator:
|
|
|
110
173
|
else:
|
|
111
174
|
# Single execution mode (for testing/debugging)
|
|
112
175
|
self.logger.info("🏁 Auto-run disabled - single execution mode")
|
|
113
|
-
|
|
176
|
+
|
|
177
|
+
if pipeline_type == "mcp":
|
|
178
|
+
result = asyncio.run(self._orchestrator.process_once())
|
|
179
|
+
elif pipeline_type == "api":
|
|
180
|
+
result = asyncio.run(self._orchestrator.process_api_once())
|
|
181
|
+
else:
|
|
182
|
+
raise RuntimeError(f"Unsupported pipeline type: {pipeline_type}")
|
|
183
|
+
|
|
114
184
|
self.logger.info("✅ Pipeline execution completed, exiting")
|
|
115
185
|
|
|
116
186
|
except Exception as e:
|
|
117
187
|
self.logger.error(f"❌ Error in debounced processing: {e}")
|
|
188
|
+
# Re-raise to ensure the system exits on mixed mode or other critical errors
|
|
189
|
+
raise
|
|
118
190
|
|
|
119
191
|
def _start_blocking_fastapi_server(
|
|
120
192
|
self, app: Any, binding_config: dict[str, Any]
|
|
@@ -148,6 +220,58 @@ class DebounceCoordinator:
|
|
|
148
220
|
self.logger.error(f"❌ FastAPI server error: {e}")
|
|
149
221
|
raise
|
|
150
222
|
|
|
223
|
+
def _setup_api_heartbeat_background(
|
|
224
|
+
self, heartbeat_config: dict[str, Any], pipeline_context: dict[str, Any]
|
|
225
|
+
) -> None:
|
|
226
|
+
"""Setup API heartbeat to run in background - non-blocking."""
|
|
227
|
+
try:
|
|
228
|
+
# Populate heartbeat context with current pipeline context
|
|
229
|
+
heartbeat_config["context"] = pipeline_context
|
|
230
|
+
service_id = heartbeat_config.get("service_id", "unknown")
|
|
231
|
+
standalone_mode = heartbeat_config.get("standalone_mode", False)
|
|
232
|
+
|
|
233
|
+
if standalone_mode:
|
|
234
|
+
self.logger.info(
|
|
235
|
+
f"📝 API service '{service_id}' configured in standalone mode - no heartbeat"
|
|
236
|
+
)
|
|
237
|
+
return
|
|
238
|
+
|
|
239
|
+
self.logger.info(
|
|
240
|
+
f"🔗 Setting up background API heartbeat for service '{service_id}'"
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
# Import heartbeat functionality
|
|
244
|
+
from ..api_heartbeat.api_lifespan_integration import api_heartbeat_lifespan_task
|
|
245
|
+
import threading
|
|
246
|
+
import asyncio
|
|
247
|
+
|
|
248
|
+
def run_heartbeat():
|
|
249
|
+
"""Run heartbeat in separate thread with its own event loop."""
|
|
250
|
+
self.logger.debug(f"Starting background heartbeat thread for {service_id}")
|
|
251
|
+
try:
|
|
252
|
+
# Create new event loop for this thread
|
|
253
|
+
loop = asyncio.new_event_loop()
|
|
254
|
+
asyncio.set_event_loop(loop)
|
|
255
|
+
|
|
256
|
+
# Run heartbeat task
|
|
257
|
+
loop.run_until_complete(api_heartbeat_lifespan_task(heartbeat_config))
|
|
258
|
+
except Exception as e:
|
|
259
|
+
self.logger.error(f"❌ Background heartbeat error: {e}")
|
|
260
|
+
finally:
|
|
261
|
+
loop.close()
|
|
262
|
+
|
|
263
|
+
# Start heartbeat in daemon thread (won't prevent process exit)
|
|
264
|
+
heartbeat_thread = threading.Thread(target=run_heartbeat, daemon=True)
|
|
265
|
+
heartbeat_thread.start()
|
|
266
|
+
|
|
267
|
+
self.logger.info(
|
|
268
|
+
f"💓 Background API heartbeat thread started for service '{service_id}'"
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
except Exception as e:
|
|
272
|
+
self.logger.warning(f"⚠️ Could not setup API heartbeat: {e}")
|
|
273
|
+
# Don't fail - heartbeat is optional for API services
|
|
274
|
+
|
|
151
275
|
def _perform_graceful_shutdown(self) -> None:
|
|
152
276
|
"""Perform graceful shutdown by unregistering from registry."""
|
|
153
277
|
try:
|
|
@@ -272,6 +396,43 @@ class MeshOrchestrator:
|
|
|
272
396
|
"timestamp": result.timestamp.isoformat(),
|
|
273
397
|
}
|
|
274
398
|
|
|
399
|
+
async def process_api_once(self) -> dict:
|
|
400
|
+
"""
|
|
401
|
+
Execute the API pipeline once for @mesh.route decorators.
|
|
402
|
+
|
|
403
|
+
This handles FastAPI route integration and dependency injection setup.
|
|
404
|
+
"""
|
|
405
|
+
self.logger.info(f"🚀 Starting API pipeline execution: {self.name}")
|
|
406
|
+
|
|
407
|
+
try:
|
|
408
|
+
# Import API pipeline here to avoid circular imports
|
|
409
|
+
from ..api_startup import APIPipeline
|
|
410
|
+
|
|
411
|
+
# Create and execute API pipeline
|
|
412
|
+
api_pipeline = APIPipeline(name=f"{self.name}-api")
|
|
413
|
+
result = await api_pipeline.execute()
|
|
414
|
+
|
|
415
|
+
# Convert result to dict for return type (same format as MCP pipeline)
|
|
416
|
+
return {
|
|
417
|
+
"status": result.status.value,
|
|
418
|
+
"message": result.message,
|
|
419
|
+
"errors": result.errors,
|
|
420
|
+
"context": result.context,
|
|
421
|
+
"timestamp": result.timestamp.isoformat(),
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
except Exception as e:
|
|
425
|
+
error_msg = f"API pipeline execution failed: {e}"
|
|
426
|
+
self.logger.error(f"❌ {error_msg}")
|
|
427
|
+
|
|
428
|
+
return {
|
|
429
|
+
"status": "failed",
|
|
430
|
+
"message": error_msg,
|
|
431
|
+
"errors": [str(e)],
|
|
432
|
+
"context": {},
|
|
433
|
+
"timestamp": "unknown",
|
|
434
|
+
}
|
|
435
|
+
|
|
275
436
|
async def start_service(self, auto_run_config: Optional[dict] = None) -> None:
|
|
276
437
|
"""
|
|
277
438
|
Start the service with optional auto-run behavior.
|
|
@@ -366,6 +527,10 @@ def start_runtime() -> None:
|
|
|
366
527
|
Actual pipeline execution will be triggered by decorator registration
|
|
367
528
|
with a configurable delay to ensure all decorators are captured.
|
|
368
529
|
"""
|
|
530
|
+
# Configure logging FIRST before any log messages
|
|
531
|
+
from ...shared.logging_config import configure_logging
|
|
532
|
+
configure_logging()
|
|
533
|
+
|
|
369
534
|
logger.info("🔧 Starting MCP Mesh runtime with debouncing")
|
|
370
535
|
|
|
371
536
|
# Install signal handlers in main thread FIRST (before any threading)
|
|
@@ -71,12 +71,9 @@ def get_config_value(
|
|
|
71
71
|
raw_value = default
|
|
72
72
|
source = "default"
|
|
73
73
|
|
|
74
|
-
logger.debug(f"Config {env_var}: raw_value={raw_value} (from {source})")
|
|
75
|
-
|
|
76
74
|
# Step 2: Validate and convert the value
|
|
77
75
|
try:
|
|
78
76
|
validated_value = _validate_value(raw_value, rule, env_var)
|
|
79
|
-
logger.debug(f"Config {env_var}: validated_value={validated_value}")
|
|
80
77
|
return validated_value
|
|
81
78
|
|
|
82
79
|
except ConfigResolutionError as e:
|
|
@@ -69,8 +69,9 @@ def configure_logging():
|
|
|
69
69
|
root_logger.addHandler(handler)
|
|
70
70
|
root_logger.setLevel(log_level)
|
|
71
71
|
|
|
72
|
-
# Set level for all mcp_mesh loggers
|
|
72
|
+
# Set level for all mcp_mesh loggers (both mcp_mesh and _mcp_mesh namespaces)
|
|
73
73
|
logging.getLogger("mcp_mesh").setLevel(log_level)
|
|
74
|
+
logging.getLogger("_mcp_mesh").setLevel(log_level)
|
|
74
75
|
|
|
75
76
|
# Return the configured level for reference
|
|
76
77
|
return log_level
|
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
"""Server-Sent Events (SSE) parsing utilities for MCP responses."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
from typing import Any, Dict, Optional
|
|
6
|
+
|
|
7
|
+
logger = logging.getLogger(__name__)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class SSEParser:
|
|
11
|
+
"""Utility class for parsing Server-Sent Events responses from FastMCP servers.
|
|
12
|
+
|
|
13
|
+
Handles the common issue where large JSON responses get split across multiple
|
|
14
|
+
SSE 'data:' lines, which would cause JSON parsing failures if processed line-by-line.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
@staticmethod
|
|
18
|
+
def parse_sse_response(
|
|
19
|
+
response_text: str, context: str = "unknown"
|
|
20
|
+
) -> dict[str, Any]:
|
|
21
|
+
"""
|
|
22
|
+
Parse SSE response text and extract JSON data.
|
|
23
|
+
|
|
24
|
+
Handles multi-line JSON responses by accumulating all 'data:' lines
|
|
25
|
+
before attempting to parse JSON.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
response_text: Raw SSE response text
|
|
29
|
+
context: Context string for error logging
|
|
30
|
+
|
|
31
|
+
Returns:
|
|
32
|
+
Parsed JSON data as dictionary
|
|
33
|
+
|
|
34
|
+
Raises:
|
|
35
|
+
RuntimeError: If SSE response cannot be parsed
|
|
36
|
+
"""
|
|
37
|
+
logger.debug(f"🔧 SSEParser.parse_sse_response called from {context}")
|
|
38
|
+
logger.debug(
|
|
39
|
+
f"🔧 Response text length: {len(response_text)}, starts with 'event:': {response_text.startswith('event:')}"
|
|
40
|
+
)
|
|
41
|
+
logger.debug(f"🔧 Response preview: {repr(response_text[:100])}...")
|
|
42
|
+
|
|
43
|
+
# Check if this is SSE format (can be malformed and not start with "event:")
|
|
44
|
+
is_sse_format = (
|
|
45
|
+
response_text.startswith("event:")
|
|
46
|
+
or "event: message" in response_text
|
|
47
|
+
or "data: " in response_text
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
if not is_sse_format:
|
|
51
|
+
# Not an SSE response, try parsing as plain JSON
|
|
52
|
+
logger.debug(f"🔧 {context}: Parsing as plain JSON (not SSE format)")
|
|
53
|
+
logger.debug(
|
|
54
|
+
f"🔧 {context}: Response preview: {repr(response_text[:200])}..."
|
|
55
|
+
)
|
|
56
|
+
try:
|
|
57
|
+
result = json.loads(response_text)
|
|
58
|
+
logger.debug(f"🔧 {context}: Plain JSON parsed successfully")
|
|
59
|
+
return result
|
|
60
|
+
except json.JSONDecodeError as e:
|
|
61
|
+
logger.error(f"🔧 {context}: Plain JSON parse failed: {e}")
|
|
62
|
+
logger.error(
|
|
63
|
+
f"🔧 {context}: Invalid response content (first 500 chars): {repr(response_text[:500])}"
|
|
64
|
+
)
|
|
65
|
+
raise RuntimeError(f"Invalid JSON response in {context}: {e}")
|
|
66
|
+
|
|
67
|
+
# Parse SSE format: find first valid JSON in data lines
|
|
68
|
+
logger.debug(f"🔧 {context}: Parsing SSE format - looking for first valid JSON")
|
|
69
|
+
data_line_count = 0
|
|
70
|
+
first_valid_json = None
|
|
71
|
+
|
|
72
|
+
for line in response_text.split("\n"):
|
|
73
|
+
if line.startswith("data:"):
|
|
74
|
+
data_content = line[5:].strip() # Remove 'data:' prefix and whitespace
|
|
75
|
+
if data_content:
|
|
76
|
+
data_line_count += 1
|
|
77
|
+
try:
|
|
78
|
+
# Try to parse this line as JSON
|
|
79
|
+
parsed_json = json.loads(data_content)
|
|
80
|
+
if first_valid_json is None:
|
|
81
|
+
first_valid_json = parsed_json
|
|
82
|
+
logger.debug(f"🔧 {context}: Found first valid JSON in data line {data_line_count}")
|
|
83
|
+
except json.JSONDecodeError:
|
|
84
|
+
# Skip invalid JSON lines - this is expected behavior
|
|
85
|
+
logger.debug(f"🔧 {context}: Skipping invalid JSON in data line {data_line_count}: {data_content[:50]}...")
|
|
86
|
+
continue
|
|
87
|
+
|
|
88
|
+
logger.debug(
|
|
89
|
+
f"🔧 {context}: Processed {data_line_count} data lines"
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
# Return first valid JSON found
|
|
93
|
+
if first_valid_json is None:
|
|
94
|
+
logger.error(f"🔧 {context}: No valid JSON found in SSE response")
|
|
95
|
+
raise RuntimeError(f"Could not parse SSE response from FastMCP")
|
|
96
|
+
|
|
97
|
+
logger.debug(
|
|
98
|
+
f"🔧 {context}: SSE parsing successful! Result type: {type(first_valid_json)}"
|
|
99
|
+
)
|
|
100
|
+
return first_valid_json
|
|
101
|
+
|
|
102
|
+
@staticmethod
|
|
103
|
+
def parse_streaming_sse_chunk(chunk_data: str) -> Optional[dict[str, Any]]:
|
|
104
|
+
"""
|
|
105
|
+
Parse a single streaming SSE chunk.
|
|
106
|
+
|
|
107
|
+
Used for processing individual chunks in streaming responses.
|
|
108
|
+
|
|
109
|
+
Args:
|
|
110
|
+
chunk_data: Single data line content (without 'data:' prefix)
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
Parsed JSON if valid and complete, None if should be skipped
|
|
114
|
+
"""
|
|
115
|
+
if not chunk_data.strip():
|
|
116
|
+
return None
|
|
117
|
+
|
|
118
|
+
# Quick validation for complete JSON structures
|
|
119
|
+
chunk_data = chunk_data.strip()
|
|
120
|
+
|
|
121
|
+
# Must be complete JSON structures
|
|
122
|
+
if (
|
|
123
|
+
(chunk_data.startswith("{") and not chunk_data.endswith("}"))
|
|
124
|
+
or (chunk_data.startswith("[") and not chunk_data.endswith("]"))
|
|
125
|
+
or (chunk_data.startswith('"') and not chunk_data.endswith('"'))
|
|
126
|
+
):
|
|
127
|
+
# Incomplete JSON structure - should be accumulated elsewhere
|
|
128
|
+
return None
|
|
129
|
+
|
|
130
|
+
try:
|
|
131
|
+
return json.loads(chunk_data)
|
|
132
|
+
except json.JSONDecodeError:
|
|
133
|
+
# Invalid JSON - skip this chunk
|
|
134
|
+
return None
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
class SSEStreamProcessor:
|
|
138
|
+
"""Processor for streaming SSE responses with proper buffering."""
|
|
139
|
+
|
|
140
|
+
def __init__(self, context: str = "streaming"):
|
|
141
|
+
self.context = context
|
|
142
|
+
self.buffer = ""
|
|
143
|
+
self.logger = logger.getChild(f"sse_stream.{context}")
|
|
144
|
+
|
|
145
|
+
def process_chunk(self, chunk_bytes: bytes) -> list[dict[str, Any]]:
|
|
146
|
+
"""
|
|
147
|
+
Process a chunk of bytes and return any complete JSON objects found.
|
|
148
|
+
|
|
149
|
+
Args:
|
|
150
|
+
chunk_bytes: Raw bytes from streaming response
|
|
151
|
+
|
|
152
|
+
Returns:
|
|
153
|
+
List of complete JSON objects found in this chunk
|
|
154
|
+
"""
|
|
155
|
+
self.logger.debug(
|
|
156
|
+
f"🌊 SSEStreamProcessor.process_chunk called for {self.context}, chunk size: {len(chunk_bytes)}"
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
try:
|
|
160
|
+
chunk_text = chunk_bytes.decode("utf-8")
|
|
161
|
+
self.buffer += chunk_text
|
|
162
|
+
self.logger.debug(
|
|
163
|
+
f"🌊 {self.context}: Buffer size after chunk: {len(self.buffer)}"
|
|
164
|
+
)
|
|
165
|
+
except UnicodeDecodeError:
|
|
166
|
+
self.logger.warning(
|
|
167
|
+
f"🌊 {self.context}: Skipping chunk with unicode decode error"
|
|
168
|
+
)
|
|
169
|
+
return []
|
|
170
|
+
|
|
171
|
+
results = []
|
|
172
|
+
events_processed = 0
|
|
173
|
+
|
|
174
|
+
# Process complete SSE events (end with \n\n)
|
|
175
|
+
while True:
|
|
176
|
+
event_end = self.buffer.find("\n\n")
|
|
177
|
+
if event_end == -1:
|
|
178
|
+
break # No complete event yet
|
|
179
|
+
|
|
180
|
+
event_block = self.buffer[:event_end]
|
|
181
|
+
self.buffer = self.buffer[event_end + 2 :] # Remove processed event
|
|
182
|
+
events_processed += 1
|
|
183
|
+
|
|
184
|
+
# Extract data from SSE event
|
|
185
|
+
for line in event_block.split("\n"):
|
|
186
|
+
if line.startswith("data: "):
|
|
187
|
+
data_str = line[6:].strip() # Remove "data: " prefix
|
|
188
|
+
if data_str:
|
|
189
|
+
parsed = SSEParser.parse_streaming_sse_chunk(data_str)
|
|
190
|
+
if parsed:
|
|
191
|
+
results.append(parsed)
|
|
192
|
+
|
|
193
|
+
self.logger.debug(
|
|
194
|
+
f"🌊 {self.context}: Processed {events_processed} complete SSE events, yielding {len(results)} JSON objects"
|
|
195
|
+
)
|
|
196
|
+
return results
|
|
197
|
+
|
|
198
|
+
def finalize(self) -> list[dict[str, Any]]:
|
|
199
|
+
"""
|
|
200
|
+
Process any remaining data in buffer.
|
|
201
|
+
|
|
202
|
+
Returns:
|
|
203
|
+
List of any final JSON objects found
|
|
204
|
+
"""
|
|
205
|
+
results = []
|
|
206
|
+
|
|
207
|
+
if self.buffer.strip():
|
|
208
|
+
for line in self.buffer.split("\n"):
|
|
209
|
+
if line.startswith("data: "):
|
|
210
|
+
data_str = line[6:].strip()
|
|
211
|
+
if data_str:
|
|
212
|
+
parsed = SSEParser.parse_streaming_sse_chunk(data_str)
|
|
213
|
+
if parsed:
|
|
214
|
+
results.append(parsed)
|
|
215
|
+
|
|
216
|
+
self.buffer = "" # Clear buffer
|
|
217
|
+
return results
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: mcp-mesh
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.5.0
|
|
4
4
|
Summary: Kubernetes-native platform for distributed MCP applications
|
|
5
5
|
Project-URL: Homepage, https://github.com/dhyansraj/mcp-mesh
|
|
6
6
|
Project-URL: Documentation, https://github.com/dhyansraj/mcp-mesh/tree/main/docs
|