mcp-mesh 0.7.12__py3-none-any.whl → 0.7.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. _mcp_mesh/__init__.py +1 -1
  2. _mcp_mesh/engine/__init__.py +1 -22
  3. _mcp_mesh/engine/async_mcp_client.py +88 -25
  4. _mcp_mesh/engine/decorator_registry.py +10 -9
  5. _mcp_mesh/engine/dependency_injector.py +64 -53
  6. _mcp_mesh/engine/mesh_llm_agent.py +119 -5
  7. _mcp_mesh/engine/mesh_llm_agent_injector.py +30 -0
  8. _mcp_mesh/engine/session_aware_client.py +3 -3
  9. _mcp_mesh/engine/unified_mcp_proxy.py +82 -90
  10. _mcp_mesh/pipeline/api_heartbeat/api_dependency_resolution.py +0 -89
  11. _mcp_mesh/pipeline/api_heartbeat/api_fast_heartbeat_check.py +3 -3
  12. _mcp_mesh/pipeline/api_heartbeat/api_heartbeat_pipeline.py +30 -28
  13. _mcp_mesh/pipeline/mcp_heartbeat/dependency_resolution.py +16 -18
  14. _mcp_mesh/pipeline/mcp_heartbeat/fast_heartbeat_check.py +5 -5
  15. _mcp_mesh/pipeline/mcp_heartbeat/heartbeat_orchestrator.py +3 -3
  16. _mcp_mesh/pipeline/mcp_heartbeat/heartbeat_pipeline.py +6 -6
  17. _mcp_mesh/pipeline/mcp_heartbeat/heartbeat_send.py +1 -1
  18. _mcp_mesh/pipeline/mcp_heartbeat/llm_tools_resolution.py +15 -11
  19. _mcp_mesh/pipeline/mcp_heartbeat/registry_connection.py +3 -3
  20. _mcp_mesh/pipeline/mcp_startup/fastapiserver_setup.py +37 -268
  21. _mcp_mesh/pipeline/mcp_startup/lifespan_factory.py +142 -0
  22. _mcp_mesh/pipeline/mcp_startup/startup_orchestrator.py +57 -93
  23. _mcp_mesh/pipeline/shared/registry_connection.py +1 -1
  24. _mcp_mesh/shared/health_check_manager.py +313 -0
  25. _mcp_mesh/shared/logging_config.py +190 -7
  26. _mcp_mesh/shared/registry_client_wrapper.py +8 -8
  27. _mcp_mesh/shared/sse_parser.py +19 -17
  28. _mcp_mesh/tracing/execution_tracer.py +26 -1
  29. _mcp_mesh/tracing/fastapi_tracing_middleware.py +3 -4
  30. _mcp_mesh/tracing/trace_context_helper.py +25 -6
  31. {mcp_mesh-0.7.12.dist-info → mcp_mesh-0.7.14.dist-info}/METADATA +1 -1
  32. {mcp_mesh-0.7.12.dist-info → mcp_mesh-0.7.14.dist-info}/RECORD +38 -39
  33. mesh/__init__.py +3 -1
  34. mesh/decorators.py +81 -43
  35. mesh/helpers.py +72 -4
  36. mesh/types.py +48 -4
  37. _mcp_mesh/engine/full_mcp_proxy.py +0 -641
  38. _mcp_mesh/engine/mcp_client_proxy.py +0 -457
  39. _mcp_mesh/shared/health_check_cache.py +0 -246
  40. {mcp_mesh-0.7.12.dist-info → mcp_mesh-0.7.14.dist-info}/WHEEL +0 -0
  41. {mcp_mesh-0.7.12.dist-info → mcp_mesh-0.7.14.dist-info}/licenses/LICENSE +0 -0
@@ -1,641 +0,0 @@
1
- """Full MCP Protocol Proxy with streaming support and enhanced auto-configuration."""
2
-
3
- import asyncio
4
- import json
5
- import logging
6
- import uuid
7
- from collections.abc import AsyncIterator
8
- from typing import Any, Optional
9
-
10
- from ..shared.sse_parser import SSEStreamProcessor
11
- from .async_mcp_client import AsyncMCPClient
12
- from .mcp_client_proxy import MCPClientProxy
13
-
14
- logger = logging.getLogger(__name__)
15
-
16
-
17
- class FullMCPProxy(MCPClientProxy):
18
- """Full MCP Protocol Proxy with streaming support and complete MCP method access.
19
-
20
- This proxy extends MCPClientProxy to provide:
21
- 1. Full MCP protocol support (tools, resources, prompts)
22
- 2. Streaming tool calls using FastMCP's text/event-stream
23
- 3. Direct method access for developers (not just __call__)
24
- 4. Multihop streaming capabilities (A→B→C chains)
25
-
26
- Designed to replace the prototype McpMeshAgent with proper dependency injection.
27
- """
28
-
29
- def __init__(
30
- self, endpoint: str, function_name: str, kwargs_config: Optional[dict] = None
31
- ):
32
- """Initialize Full MCP Proxy.
33
-
34
- Args:
35
- endpoint: Base URL of the remote MCP service
36
- function_name: Specific tool function to call (for __call__ compatibility)
37
- kwargs_config: Optional kwargs configuration from @mesh.tool decorator
38
- """
39
- super().__init__(endpoint, function_name, kwargs_config)
40
- self.logger = logger.getChild(f"full_proxy.{function_name}")
41
-
42
- # Log kwargs configuration if provided
43
- if self.kwargs_config:
44
- self.logger.debug(
45
- f"🔧 FullMCPProxy initialized with kwargs: {self.kwargs_config}"
46
- )
47
-
48
- def _inject_trace_headers(self, headers: dict) -> dict:
49
- """Inject trace context headers for distributed tracing."""
50
- from ..tracing.trace_context_helper import TraceContextHelper
51
-
52
- TraceContextHelper.inject_trace_headers_to_request(
53
- headers, self.endpoint, self.logger
54
- )
55
- return headers
56
-
57
- # Phase 6: Streaming Support - THE BREAKTHROUGH METHOD!
58
- async def call_tool_streaming(
59
- self, name: str, arguments: dict = None
60
- ) -> AsyncIterator[dict]:
61
- """Call a tool with streaming response using FastMCP's text/event-stream.
62
-
63
- This is the breakthrough method that enables multihop streaming (A→B→C chains)
64
- by leveraging FastMCP's built-in streaming support.
65
-
66
- Args:
67
- name: Tool name to call
68
- arguments: Tool arguments
69
-
70
- Yields:
71
- Streaming response chunks as dictionaries
72
- """
73
- self.logger.debug(f"🌊 Streaming call to tool '{name}' with args: {arguments}")
74
-
75
- try:
76
- # Prepare JSON-RPC payload
77
- payload = {
78
- "jsonrpc": "2.0",
79
- "id": 1,
80
- "method": "tools/call",
81
- "params": {"name": name, "arguments": arguments or {}},
82
- }
83
-
84
- # Use httpx for streaming support
85
- try:
86
- import httpx
87
-
88
- url = f"{self.endpoint}/mcp"
89
-
90
- # Build headers with trace context
91
- headers = {
92
- "Content-Type": "application/json",
93
- "Accept": "text/event-stream", # THIS IS THE KEY!
94
- }
95
- headers = self._inject_trace_headers(headers)
96
-
97
- async with httpx.AsyncClient(timeout=30.0) as client:
98
- async with client.stream(
99
- "POST",
100
- url,
101
- json=payload,
102
- headers=headers,
103
- ) as response:
104
- if response.status_code >= 400:
105
- raise RuntimeError(f"HTTP error {response.status_code}")
106
-
107
- # Use shared SSE stream processor
108
- sse_processor = SSEStreamProcessor(f"FullMCPProxy.{name}")
109
-
110
- async for chunk_bytes in response.aiter_bytes(8192):
111
- chunks = sse_processor.process_chunk(chunk_bytes)
112
- for chunk in chunks:
113
- yield chunk
114
-
115
- # Process any remaining data
116
- final_chunks = sse_processor.finalize()
117
- for chunk in final_chunks:
118
- yield chunk
119
-
120
- except ImportError:
121
- # Fallback: if httpx not available, use sync call
122
- self.logger.warning(
123
- "httpx not available for streaming, falling back to sync call"
124
- )
125
- result = await self._async_call_tool(name, arguments)
126
- yield result
127
-
128
- except Exception as e:
129
- self.logger.error(f"❌ Streaming call to '{name}' failed: {e}")
130
- raise RuntimeError(f"Streaming call to '{name}' failed: {e}")
131
-
132
- async def _async_call_tool(self, name: str, arguments: dict = None) -> dict:
133
- """Async version of tool call (non-streaming fallback)."""
134
- client = AsyncMCPClient(self.endpoint)
135
- try:
136
- result = await client.call_tool(name, arguments or {})
137
- return result
138
- finally:
139
- await client.close()
140
-
141
- # Vanilla MCP Protocol Methods (100% compatibility)
142
- async def list_tools(self) -> list:
143
- """List available tools from remote agent (vanilla MCP method)."""
144
- client = AsyncMCPClient(self.endpoint)
145
- try:
146
- return await client.list_tools()
147
- finally:
148
- await client.close()
149
-
150
- async def list_resources(self) -> list:
151
- """List available resources from remote agent (vanilla MCP method)."""
152
- client = AsyncMCPClient(self.endpoint)
153
- try:
154
- return await client.list_resources()
155
- finally:
156
- await client.close()
157
-
158
- async def read_resource(self, uri: str) -> Any:
159
- """Read resource contents from remote agent (vanilla MCP method)."""
160
- client = AsyncMCPClient(self.endpoint)
161
- try:
162
- return await client.read_resource(uri)
163
- finally:
164
- await client.close()
165
-
166
- async def list_prompts(self) -> list:
167
- """List available prompts from remote agent (vanilla MCP method)."""
168
- client = AsyncMCPClient(self.endpoint)
169
- try:
170
- return await client.list_prompts()
171
- finally:
172
- await client.close()
173
-
174
- async def get_prompt(self, name: str, arguments: dict = None) -> Any:
175
- """Get prompt template from remote agent (vanilla MCP method)."""
176
- client = AsyncMCPClient(self.endpoint)
177
- try:
178
- return await client.get_prompt(name, arguments)
179
- finally:
180
- await client.close()
181
-
182
- # Phase 6: Explicit Session Management
183
- async def create_session(self) -> str:
184
- """
185
- Create a new session and return session ID.
186
-
187
- For Phase 6 explicit session management. In Phase 8, this will be
188
- automated based on @mesh.tool(session_required=True) annotations.
189
-
190
- Returns:
191
- New session ID string
192
- """
193
- # Generate unique session ID
194
- session_id = f"session:{uuid.uuid4().hex[:16]}"
195
-
196
- # For Phase 6, we just return the ID. The session routing middleware
197
- # will handle the actual session assignment when calls are made with
198
- # the session ID in headers.
199
- self.logger.debug(f"Created session ID: {session_id}")
200
- return session_id
201
-
202
- async def call_with_session(self, session_id: str, **kwargs) -> Any:
203
- """
204
- Call tool with explicit session ID for stateful operations.
205
-
206
- This ensures all calls with the same session_id route to the same
207
- agent instance for session affinity.
208
-
209
- Args:
210
- session_id: Session ID to include in request headers
211
- **kwargs: Tool arguments to pass
212
-
213
- Returns:
214
- Tool response
215
- """
216
- try:
217
- import httpx
218
-
219
- # Build MCP tool call request
220
- # Add session_id to function arguments if the function expects it
221
- function_args = kwargs.copy()
222
- function_args["session_id"] = (
223
- session_id # Pass session_id as function parameter
224
- )
225
-
226
- payload = {
227
- "jsonrpc": "2.0",
228
- "id": 1,
229
- "method": "tools/call",
230
- "params": {
231
- "name": self.function_name,
232
- "arguments": function_args,
233
- },
234
- }
235
-
236
- # URL for MCP protocol endpoint
237
- url = f"{self.endpoint.rstrip('/')}/mcp"
238
-
239
- # Add session ID to headers for session routing
240
- headers = {
241
- "Content-Type": "application/json",
242
- "Accept": "application/json, text/event-stream", # Required by FastMCP
243
- "X-Session-ID": session_id, # Key header for session routing
244
- }
245
- headers = self._inject_trace_headers(headers)
246
-
247
- async with httpx.AsyncClient() as client:
248
- response = await client.post(url, json=payload, headers=headers)
249
-
250
- if response.status_code == 404:
251
- raise RuntimeError(f"MCP endpoint not found at {url}")
252
- elif response.status_code >= 400:
253
- raise RuntimeError(
254
- f"HTTP error {response.status_code}: {response.reason_phrase}"
255
- )
256
-
257
- response_text = response.text
258
-
259
- # Handle Server-Sent Events format from FastMCP
260
- if response_text.startswith("event:"):
261
- # Parse SSE format: extract JSON from "data:" lines
262
- json_data = None
263
- for line in response_text.split("\n"):
264
- if line.startswith("data:"):
265
- json_str = line[5:].strip() # Remove 'data:' prefix
266
- try:
267
- json_data = json.loads(json_str)
268
- break
269
- except json.JSONDecodeError:
270
- continue
271
-
272
- if json_data is None:
273
- raise RuntimeError("Could not parse SSE response from FastMCP")
274
- data = json_data
275
- else:
276
- # Plain JSON response
277
- data = response.json()
278
-
279
- # Check for JSON-RPC error
280
- if "error" in data:
281
- error = data["error"]
282
- error_msg = error.get("message", "Unknown error")
283
- raise RuntimeError(f"Tool call error: {error_msg}")
284
-
285
- # Return the result
286
- if "result" in data:
287
- return data["result"]
288
- return data
289
-
290
- except httpx.RequestError as e:
291
- raise RuntimeError(f"Connection error to {url}: {e}")
292
- except json.JSONDecodeError as e:
293
- raise RuntimeError(f"Invalid JSON response: {e}")
294
- except ImportError:
295
- # Fallback error - session calls require httpx for header support
296
- raise RuntimeError("Session calls require httpx library for header support")
297
-
298
- async def close_session(self, session_id: str) -> bool:
299
- """
300
- Close session and cleanup session state.
301
-
302
- Args:
303
- session_id: Session ID to close
304
-
305
- Returns:
306
- True if session was closed successfully
307
- """
308
- # For Phase 6, session cleanup is handled by the session routing middleware
309
- # and Redis TTL. In Phase 8, this might send explicit cleanup requests.
310
- self.logger.debug(f"Session close requested for: {session_id}")
311
-
312
- # Always return True for Phase 6 - cleanup is automatic
313
- return True
314
-
315
- def __repr__(self) -> str:
316
- """String representation for debugging."""
317
- return (
318
- f"FullMCPProxy(endpoint='{self.endpoint}', function='{self.function_name}')"
319
- )
320
-
321
-
322
- class EnhancedFullMCPProxy(FullMCPProxy):
323
- """Enhanced Full MCP proxy with streaming auto-configuration and advanced features.
324
-
325
- Auto-configures based on kwargs from @mesh.tool decorator:
326
- - streaming: Enable/disable streaming responses
327
- - stream_timeout: Timeout for streaming requests
328
- - buffer_size: Streaming buffer size
329
- - Plus all EnhancedMCPClientProxy features (timeout, retries, headers, auth)
330
- """
331
-
332
- def __init__(
333
- self, endpoint: str, function_name: str, kwargs_config: Optional[dict] = None
334
- ):
335
- """Initialize Enhanced Full MCP Proxy.
336
-
337
- Args:
338
- endpoint: Base URL of the remote MCP service
339
- function_name: Specific tool function to call
340
- kwargs_config: Optional kwargs configuration from @mesh.tool decorator
341
- """
342
- super().__init__(endpoint, function_name, kwargs_config)
343
- self.kwargs_config = kwargs_config or {}
344
-
345
- # Configure streaming from kwargs
346
- self._configure_from_kwargs()
347
-
348
- self.logger = logger.getChild(f"enhanced_full_proxy.{function_name}")
349
-
350
- def _configure_from_kwargs(self):
351
- """Auto-configure proxy settings from kwargs."""
352
- # Basic configuration (inherited from EnhancedMCPClientProxy concepts)
353
- self.timeout = self.kwargs_config.get("timeout", 30)
354
- self.retry_count = self.kwargs_config.get("retry_count", 1)
355
- self.retry_delay = self.kwargs_config.get("retry_delay", 1.0)
356
- self.retry_backoff = self.kwargs_config.get("retry_backoff", 2.0)
357
- self.custom_headers = self.kwargs_config.get("custom_headers", {})
358
- self.auth_required = self.kwargs_config.get("auth_required", False)
359
-
360
- # Streaming-specific configuration
361
- self.streaming_capable = self.kwargs_config.get("streaming", False)
362
- self.stream_timeout = self.kwargs_config.get("stream_timeout", 300) # 5 minutes
363
- self.buffer_size = self.kwargs_config.get("buffer_size", 4096)
364
-
365
- # Session management configuration
366
- self.session_required = self.kwargs_config.get("session_required", False)
367
- self.stateful = self.kwargs_config.get("stateful", False)
368
- self.auto_session_management = self.kwargs_config.get(
369
- "auto_session_management", True
370
- ) # Enable by default
371
- self._current_session_id = None # Track current session for auto-management
372
-
373
- # Content handling
374
- self.accepted_content_types = self.kwargs_config.get(
375
- "accepts", ["application/json"]
376
- )
377
- self.default_content_type = self.kwargs_config.get(
378
- "content_type", "application/json"
379
- )
380
- self.max_response_size = self.kwargs_config.get(
381
- "max_response_size", 10 * 1024 * 1024
382
- ) # 10MB
383
-
384
- self.logger.info(
385
- f"🔧 Enhanced Full MCP proxy configured - timeout: {self.timeout}s, "
386
- f"retries: {self.retry_count}, streaming: {self.streaming_capable}, "
387
- f"stream_timeout: {self.stream_timeout}s, session_required: {self.session_required}, "
388
- f"auto_session_management: {self.auto_session_management}"
389
- )
390
-
391
- def call_tool_auto(self, name: str, arguments: dict = None) -> Any:
392
- """Automatically choose streaming vs non-streaming and handle sessions based on configuration."""
393
- # Handle automatic session management if required
394
- if self.session_required and self.auto_session_management:
395
- return self._call_with_auto_session(name, arguments)
396
-
397
- # Regular non-session calls
398
- if self.streaming_capable:
399
- # Return async generator for streaming
400
- return self.call_tool_streaming_enhanced(name, arguments)
401
- else:
402
- # Return coroutine for regular async call
403
- return self.call_tool_enhanced(name, arguments)
404
-
405
- async def _call_with_auto_session(self, name: str, arguments: dict = None) -> Any:
406
- """Automatically manage session creation and cleanup for session-required calls."""
407
- # Create session if we don't have one
408
- if not self._current_session_id:
409
- self._current_session_id = await self.create_session()
410
- self.logger.info(f"🎯 Auto-created session: {self._current_session_id}")
411
-
412
- try:
413
- # Make the call with session
414
- if self.streaming_capable:
415
- # For streaming calls, we need to handle session in headers
416
- # Note: call_with_session doesn't support streaming yet, so fall back to enhanced call
417
- self.logger.debug(
418
- "🌊 Session-required streaming call - using enhanced streaming with session headers"
419
- )
420
- return self.call_tool_streaming_enhanced(
421
- name, arguments, session_id=self._current_session_id
422
- )
423
- else:
424
- # Use the existing session-aware method
425
- # call_with_session expects function arguments as kwargs, not the function name
426
- function_args = arguments or {}
427
- result = await self.call_with_session(
428
- session_id=self._current_session_id, **function_args
429
- )
430
- return result
431
-
432
- except Exception as e:
433
- self.logger.error(f"❌ Auto-session call failed: {e}")
434
- # Clean up session on failure
435
- if self._current_session_id:
436
- try:
437
- await self.close_session(self._current_session_id)
438
- self._current_session_id = None
439
- self.logger.info("🧹 Cleaned up failed session")
440
- except Exception as cleanup_error:
441
- self.logger.warning(f"⚠️ Session cleanup failed: {cleanup_error}")
442
- raise
443
-
444
- async def call_tool_enhanced(self, name: str, arguments: dict = None) -> Any:
445
- """Enhanced non-streaming tool call with retry logic and custom configuration."""
446
- last_exception = None
447
-
448
- for attempt in range(self.retry_count + 1):
449
- try:
450
- return await self._make_enhanced_request(name, arguments or {})
451
-
452
- except Exception as e:
453
- last_exception = e
454
-
455
- if attempt < self.retry_count:
456
- # Calculate retry delay with backoff
457
- delay = self.retry_delay * (self.retry_backoff**attempt)
458
-
459
- self.logger.warning(
460
- f"🔄 Request failed (attempt {attempt + 1}/{self.retry_count + 1}), "
461
- f"retrying in {delay:.1f}s: {str(e)}"
462
- )
463
-
464
- await asyncio.sleep(delay)
465
- else:
466
- self.logger.error(
467
- f"❌ All {self.retry_count + 1} attempts failed for {name}"
468
- )
469
-
470
- raise last_exception
471
-
472
- async def _make_enhanced_request(self, name: str, arguments: dict) -> Any:
473
- """Make enhanced MCP request with custom headers and configuration."""
474
- import os
475
-
476
- payload = {
477
- "jsonrpc": "2.0",
478
- "id": str(uuid.uuid4()),
479
- "method": "tools/call",
480
- "params": {"name": name, "arguments": arguments},
481
- }
482
-
483
- # Build headers with custom configuration
484
- headers = {
485
- "Content-Type": self.default_content_type,
486
- "Accept": ", ".join(self.accepted_content_types),
487
- }
488
-
489
- # Add custom headers
490
- headers.update(self.custom_headers)
491
-
492
- # Add authentication headers if required
493
- if self.auth_required:
494
- auth_token = os.getenv("MCP_MESH_AUTH_TOKEN")
495
- if auth_token:
496
- headers["Authorization"] = f"Bearer {auth_token}"
497
- else:
498
- self.logger.warning("⚠️ Authentication required but no token available")
499
-
500
- # Inject trace context headers
501
- headers = self._inject_trace_headers(headers)
502
-
503
- url = f"{self.endpoint}/mcp"
504
-
505
- try:
506
- import httpx
507
-
508
- # Use configured timeout
509
- async with httpx.AsyncClient(timeout=self.timeout) as client:
510
- response = await client.post(url, json=payload, headers=headers)
511
-
512
- # Check response size
513
- content_length = response.headers.get("content-length")
514
- if content_length and int(content_length) > self.max_response_size:
515
- raise ValueError(
516
- f"Response too large: {content_length} bytes > {self.max_response_size}"
517
- )
518
-
519
- response.raise_for_status()
520
-
521
- result = response.json()
522
- if "error" in result:
523
- raise Exception(f"MCP request failed: {result['error']}")
524
-
525
- # Apply existing content extraction
526
- from ..shared.content_extractor import ContentExtractor
527
-
528
- return ContentExtractor.extract_content(result.get("result"))
529
-
530
- except httpx.TimeoutException:
531
- raise Exception(f"Request timeout after {self.timeout}s")
532
- except httpx.ConnectError as e:
533
- raise Exception(f"Connection failed: {str(e)}")
534
- except Exception as e:
535
- self.logger.error(f"Enhanced request failed: {e}")
536
- raise
537
-
538
- async def call_tool_streaming_enhanced(
539
- self, name: str, arguments: dict = None, session_id: str = None
540
- ) -> AsyncIterator[dict]:
541
- """Enhanced streaming with auto-configuration and retry logic."""
542
- if not self.streaming_capable:
543
- raise ValueError(
544
- f"Tool {name} not configured for streaming (streaming=False in kwargs)"
545
- )
546
-
547
- async for chunk in self._make_streaming_request_enhanced(
548
- name, arguments or {}, session_id=session_id
549
- ):
550
- yield chunk
551
-
552
- async def _make_streaming_request_enhanced(
553
- self, name: str, arguments: dict, session_id: str = None
554
- ) -> AsyncIterator[dict]:
555
- """Make enhanced streaming request with kwargs configuration."""
556
- import os
557
-
558
- payload = {
559
- "jsonrpc": "2.0",
560
- "id": str(uuid.uuid4()),
561
- "method": "tools/call",
562
- "params": {"name": name, "arguments": arguments},
563
- }
564
-
565
- headers = {"Content-Type": "application/json", "Accept": "text/event-stream"}
566
-
567
- # Add custom headers
568
- headers.update(self.custom_headers)
569
-
570
- # Add authentication headers if required
571
- if self.auth_required:
572
- auth_token = os.getenv("MCP_MESH_AUTH_TOKEN")
573
- if auth_token:
574
- headers["Authorization"] = f"Bearer {auth_token}"
575
-
576
- # Add session ID header if provided
577
- if session_id:
578
- headers["X-Session-ID"] = session_id
579
-
580
- # Inject trace context headers
581
- headers = self._inject_trace_headers(headers)
582
-
583
- url = f"{self.endpoint}/mcp"
584
-
585
- try:
586
- import httpx
587
-
588
- # Use stream-specific timeout
589
- async with httpx.AsyncClient(timeout=self.stream_timeout) as client:
590
- async with client.stream(
591
- "POST", url, json=payload, headers=headers
592
- ) as response:
593
- response.raise_for_status()
594
-
595
- # Use shared SSE stream processor
596
- sse_processor = SSEStreamProcessor(f"EnhancedFullMCPProxy.{name}")
597
-
598
- async for chunk_bytes in response.aiter_bytes(
599
- max(self.buffer_size, 8192)
600
- ):
601
- chunks = sse_processor.process_chunk(chunk_bytes)
602
- for chunk in chunks:
603
- yield chunk
604
-
605
- # Process any remaining data
606
- final_chunks = sse_processor.finalize()
607
- for chunk in final_chunks:
608
- yield chunk
609
-
610
- except httpx.TimeoutException:
611
- raise Exception(f"Streaming timeout after {self.stream_timeout}s")
612
- except Exception as e:
613
- self.logger.error(f"Enhanced streaming request failed: {e}")
614
- raise
615
-
616
- async def cleanup_auto_session(self):
617
- """Clean up automatically created session."""
618
- if self._current_session_id and self.auto_session_management:
619
- try:
620
- await self.close_session(self._current_session_id)
621
- self.logger.info(
622
- f"🧹 Auto-session cleaned up: {self._current_session_id}"
623
- )
624
- self._current_session_id = None
625
- except Exception as e:
626
- self.logger.warning(f"⚠️ Failed to cleanup auto-session: {e}")
627
-
628
- def __del__(self):
629
- """Cleanup on object destruction."""
630
- # Note: async cleanup in __del__ is not ideal, but provides a safety net
631
- if hasattr(self, "_current_session_id") and self._current_session_id:
632
- import asyncio
633
-
634
- try:
635
- # Try to cleanup session on deletion
636
- loop = asyncio.get_event_loop()
637
- if not loop.is_closed():
638
- loop.create_task(self.cleanup_auto_session())
639
- except Exception:
640
- # Silent failure in destructor
641
- pass