mcp-mesh 0.4.0__py3-none-any.whl → 0.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
_mcp_mesh/__init__.py CHANGED
@@ -31,7 +31,7 @@ from .engine.decorator_registry import (
31
31
  get_decorator_stats,
32
32
  )
33
33
 
34
- __version__ = "0.4.0"
34
+ __version__ = "0.4.2"
35
35
 
36
36
  # Store reference to runtime processor if initialized
37
37
  _runtime_processor = None
@@ -6,6 +6,8 @@ import urllib.error
6
6
  import urllib.request
7
7
  from typing import Any
8
8
 
9
+ from ..shared.sse_parser import SSEParser
10
+
9
11
  logger = logging.getLogger(__name__)
10
12
 
11
13
 
@@ -62,25 +64,10 @@ class AsyncMCPClient:
62
64
 
63
65
  response_text = response.text
64
66
 
65
- # Handle Server-Sent Events format from FastMCP
66
- if response_text.startswith("event:"):
67
- # Parse SSE format: extract JSON from "data:" lines
68
- json_data = None
69
- for line in response_text.split("\n"):
70
- if line.startswith("data:"):
71
- json_str = line[5:].strip() # Remove 'data:' prefix
72
- try:
73
- json_data = json.loads(json_str)
74
- break
75
- except json.JSONDecodeError:
76
- continue
77
-
78
- if json_data is None:
79
- raise RuntimeError("Could not parse SSE response from FastMCP")
80
- data = json_data
81
- else:
82
- # Plain JSON response
83
- data = response.json()
67
+ # Use shared SSE parser
68
+ data = SSEParser.parse_sse_response(
69
+ response_text, f"AsyncMCPClient.{self.endpoint}"
70
+ )
84
71
 
85
72
  # Check for JSON-RPC error
86
73
  if "error" in data:
@@ -7,6 +7,7 @@ import uuid
7
7
  from collections.abc import AsyncIterator
8
8
  from typing import Any, Optional
9
9
 
10
+ from ..shared.sse_parser import SSEStreamProcessor
10
11
  from .async_mcp_client import AsyncMCPClient
11
12
  from .mcp_client_proxy import MCPClientProxy
12
13
 
@@ -103,15 +104,18 @@ class FullMCPProxy(MCPClientProxy):
103
104
  if response.status_code >= 400:
104
105
  raise RuntimeError(f"HTTP error {response.status_code}")
105
106
 
106
- async for line in response.aiter_lines():
107
- if line.startswith("data: "):
108
- try:
109
- data_str = line[6:] # Remove "data: " prefix
110
- if data_str.strip():
111
- chunk = json.loads(data_str)
112
- yield chunk
113
- except json.JSONDecodeError:
114
- continue
107
+ # Use shared SSE stream processor
108
+ sse_processor = SSEStreamProcessor(f"FullMCPProxy.{name}")
109
+
110
+ async for chunk_bytes in response.aiter_bytes(8192):
111
+ chunks = sse_processor.process_chunk(chunk_bytes)
112
+ for chunk in chunks:
113
+ yield chunk
114
+
115
+ # Process any remaining data
116
+ final_chunks = sse_processor.finalize()
117
+ for chunk in final_chunks:
118
+ yield chunk
115
119
 
116
120
  except ImportError:
117
121
  # Fallback: if httpx not available, use sync call
@@ -588,19 +592,20 @@ class EnhancedFullMCPProxy(FullMCPProxy):
588
592
  ) as response:
589
593
  response.raise_for_status()
590
594
 
591
- buffer = ""
592
- async for chunk in response.aiter_bytes(self.buffer_size):
593
- buffer += chunk.decode("utf-8")
595
+ # Use shared SSE stream processor
596
+ sse_processor = SSEStreamProcessor(f"EnhancedFullMCPProxy.{name}")
594
597
 
595
- while "\n" in buffer:
596
- line, buffer = buffer.split("\n", 1)
598
+ async for chunk_bytes in response.aiter_bytes(
599
+ max(self.buffer_size, 8192)
600
+ ):
601
+ chunks = sse_processor.process_chunk(chunk_bytes)
602
+ for chunk in chunks:
603
+ yield chunk
597
604
 
598
- if line.startswith("data: "):
599
- try:
600
- data = json.loads(line[6:])
601
- yield data
602
- except json.JSONDecodeError:
603
- continue
605
+ # Process any remaining data
606
+ final_chunks = sse_processor.finalize()
607
+ for chunk in final_chunks:
608
+ yield chunk
604
609
 
605
610
  except httpx.TimeoutException:
606
611
  raise Exception(f"Streaming timeout after {self.stream_timeout}s")
@@ -10,6 +10,7 @@ import uuid
10
10
  from typing import Any, Optional
11
11
 
12
12
  from ..shared.content_extractor import ContentExtractor
13
+ from ..shared.sse_parser import SSEParser
13
14
  from .async_mcp_client import AsyncMCPClient
14
15
 
15
16
  logger = logging.getLogger(__name__)
@@ -113,25 +114,10 @@ class MCPClientProxy:
113
114
  with urllib.request.urlopen(req, timeout=30.0) as response:
114
115
  response_data = response.read().decode("utf-8")
115
116
 
116
- # Handle Server-Sent Events format from FastMCP
117
- if response_data.startswith("event:"):
118
- # Parse SSE format: extract JSON from "data:" lines
119
- json_data = None
120
- for line in response_data.split("\n"):
121
- if line.startswith("data:"):
122
- json_str = line[5:].strip() # Remove 'data:' prefix
123
- try:
124
- json_data = json.loads(json_str)
125
- break
126
- except json.JSONDecodeError:
127
- continue
128
-
129
- if json_data is None:
130
- raise RuntimeError("Could not parse SSE response from FastMCP")
131
- data = json_data
132
- else:
133
- # Plain JSON response
134
- data = json.loads(response_data)
117
+ # Use shared SSE parser
118
+ data = SSEParser.parse_sse_response(
119
+ response_data, f"MCPClientProxy.{self.function_name}"
120
+ )
135
121
 
136
122
  # Check for JSON-RPC error
137
123
  if "error" in data:
@@ -41,7 +41,7 @@ class FastMCPServerDiscoveryStep(PipelineStep):
41
41
  server_info = []
42
42
  total_registered_functions = 0
43
43
 
44
- for server_name, server_instance in discovered_servers.items():
44
+ for server_name, server_instance in list(discovered_servers.items()):
45
45
  info = self._extract_server_info(server_name, server_instance)
46
46
  server_info.append(info)
47
47
  total_registered_functions += info.get("function_count", 0)
@@ -119,7 +119,7 @@ class FastMCPServerDiscoveryStep(PipelineStep):
119
119
  "pkgutil",
120
120
  }
121
121
 
122
- for module_name, module in sys.modules.items():
122
+ for module_name, module in list(sys.modules.items()):
123
123
  if (
124
124
  module
125
125
  and not module_name.startswith("_")
@@ -166,7 +166,7 @@ class FastMCPServerDiscoveryStep(PipelineStep):
166
166
  module_globals = vars(module)
167
167
  # Only log if we find FastMCP instances to reduce noise
168
168
 
169
- for var_name, var_value in module_globals.items():
169
+ for var_name, var_value in list(module_globals.items()):
170
170
  if self._is_fastmcp_instance(var_value):
171
171
  instance_key = f"{module_name}.{var_name}"
172
172
  found[instance_key] = var_value
@@ -230,7 +230,7 @@ class FastMCPServerDiscoveryStep(PipelineStep):
230
230
  info["function_count"] += len(tools)
231
231
 
232
232
  self.logger.debug(f"Server '{server_name}' has {len(tools)} tools:")
233
- for tool_name, tool in tools.items():
233
+ for tool_name, tool in list(tools.items()):
234
234
  function_ptr = getattr(tool, "fn", None)
235
235
  self.logger.debug(f" - {tool_name}: {function_ptr}")
236
236
 
@@ -0,0 +1,217 @@
1
+ """Server-Sent Events (SSE) parsing utilities for MCP responses."""
2
+
3
+ import json
4
+ import logging
5
+ from typing import Any, Dict, Optional
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+
10
+ class SSEParser:
11
+ """Utility class for parsing Server-Sent Events responses from FastMCP servers.
12
+
13
+ Handles the common issue where large JSON responses get split across multiple
14
+ SSE 'data:' lines, which would cause JSON parsing failures if processed line-by-line.
15
+ """
16
+
17
+ @staticmethod
18
+ def parse_sse_response(
19
+ response_text: str, context: str = "unknown"
20
+ ) -> dict[str, Any]:
21
+ """
22
+ Parse SSE response text and extract JSON data.
23
+
24
+ Handles multi-line JSON responses by accumulating all 'data:' lines
25
+ before attempting to parse JSON.
26
+
27
+ Args:
28
+ response_text: Raw SSE response text
29
+ context: Context string for error logging
30
+
31
+ Returns:
32
+ Parsed JSON data as dictionary
33
+
34
+ Raises:
35
+ RuntimeError: If SSE response cannot be parsed
36
+ """
37
+ logger.debug(f"🔧 SSEParser.parse_sse_response called from {context}")
38
+ logger.debug(
39
+ f"🔧 Response text length: {len(response_text)}, starts with 'event:': {response_text.startswith('event:')}"
40
+ )
41
+ logger.debug(f"🔧 Response preview: {repr(response_text[:100])}...")
42
+
43
+ # Check if this is SSE format (can be malformed and not start with "event:")
44
+ is_sse_format = (
45
+ response_text.startswith("event:")
46
+ or "event: message" in response_text
47
+ or "data: " in response_text
48
+ )
49
+
50
+ if not is_sse_format:
51
+ # Not an SSE response, try parsing as plain JSON
52
+ logger.debug(f"🔧 {context}: Parsing as plain JSON (not SSE format)")
53
+ logger.debug(
54
+ f"🔧 {context}: Response preview: {repr(response_text[:200])}..."
55
+ )
56
+ try:
57
+ result = json.loads(response_text)
58
+ logger.debug(f"🔧 {context}: Plain JSON parsed successfully")
59
+ return result
60
+ except json.JSONDecodeError as e:
61
+ logger.error(f"🔧 {context}: Plain JSON parse failed: {e}")
62
+ logger.error(
63
+ f"🔧 {context}: Invalid response content (first 500 chars): {repr(response_text[:500])}"
64
+ )
65
+ raise RuntimeError(f"Invalid JSON response in {context}: {e}")
66
+
67
+ # Parse SSE format: find first valid JSON in data lines
68
+ logger.debug(f"🔧 {context}: Parsing SSE format - looking for first valid JSON")
69
+ data_line_count = 0
70
+ first_valid_json = None
71
+
72
+ for line in response_text.split("\n"):
73
+ if line.startswith("data:"):
74
+ data_content = line[5:].strip() # Remove 'data:' prefix and whitespace
75
+ if data_content:
76
+ data_line_count += 1
77
+ try:
78
+ # Try to parse this line as JSON
79
+ parsed_json = json.loads(data_content)
80
+ if first_valid_json is None:
81
+ first_valid_json = parsed_json
82
+ logger.debug(f"🔧 {context}: Found first valid JSON in data line {data_line_count}")
83
+ except json.JSONDecodeError:
84
+ # Skip invalid JSON lines - this is expected behavior
85
+ logger.debug(f"🔧 {context}: Skipping invalid JSON in data line {data_line_count}: {data_content[:50]}...")
86
+ continue
87
+
88
+ logger.debug(
89
+ f"🔧 {context}: Processed {data_line_count} data lines"
90
+ )
91
+
92
+ # Return first valid JSON found
93
+ if first_valid_json is None:
94
+ logger.error(f"🔧 {context}: No valid JSON found in SSE response")
95
+ raise RuntimeError(f"Could not parse SSE response from FastMCP")
96
+
97
+ logger.debug(
98
+ f"🔧 {context}: SSE parsing successful! Result type: {type(first_valid_json)}"
99
+ )
100
+ return first_valid_json
101
+
102
+ @staticmethod
103
+ def parse_streaming_sse_chunk(chunk_data: str) -> Optional[dict[str, Any]]:
104
+ """
105
+ Parse a single streaming SSE chunk.
106
+
107
+ Used for processing individual chunks in streaming responses.
108
+
109
+ Args:
110
+ chunk_data: Single data line content (without 'data:' prefix)
111
+
112
+ Returns:
113
+ Parsed JSON if valid and complete, None if should be skipped
114
+ """
115
+ if not chunk_data.strip():
116
+ return None
117
+
118
+ # Quick validation for complete JSON structures
119
+ chunk_data = chunk_data.strip()
120
+
121
+ # Must be complete JSON structures
122
+ if (
123
+ (chunk_data.startswith("{") and not chunk_data.endswith("}"))
124
+ or (chunk_data.startswith("[") and not chunk_data.endswith("]"))
125
+ or (chunk_data.startswith('"') and not chunk_data.endswith('"'))
126
+ ):
127
+ # Incomplete JSON structure - should be accumulated elsewhere
128
+ return None
129
+
130
+ try:
131
+ return json.loads(chunk_data)
132
+ except json.JSONDecodeError:
133
+ # Invalid JSON - skip this chunk
134
+ return None
135
+
136
+
137
+ class SSEStreamProcessor:
138
+ """Processor for streaming SSE responses with proper buffering."""
139
+
140
+ def __init__(self, context: str = "streaming"):
141
+ self.context = context
142
+ self.buffer = ""
143
+ self.logger = logger.getChild(f"sse_stream.{context}")
144
+
145
+ def process_chunk(self, chunk_bytes: bytes) -> list[dict[str, Any]]:
146
+ """
147
+ Process a chunk of bytes and return any complete JSON objects found.
148
+
149
+ Args:
150
+ chunk_bytes: Raw bytes from streaming response
151
+
152
+ Returns:
153
+ List of complete JSON objects found in this chunk
154
+ """
155
+ self.logger.debug(
156
+ f"🌊 SSEStreamProcessor.process_chunk called for {self.context}, chunk size: {len(chunk_bytes)}"
157
+ )
158
+
159
+ try:
160
+ chunk_text = chunk_bytes.decode("utf-8")
161
+ self.buffer += chunk_text
162
+ self.logger.debug(
163
+ f"🌊 {self.context}: Buffer size after chunk: {len(self.buffer)}"
164
+ )
165
+ except UnicodeDecodeError:
166
+ self.logger.warning(
167
+ f"🌊 {self.context}: Skipping chunk with unicode decode error"
168
+ )
169
+ return []
170
+
171
+ results = []
172
+ events_processed = 0
173
+
174
+ # Process complete SSE events (end with \n\n)
175
+ while True:
176
+ event_end = self.buffer.find("\n\n")
177
+ if event_end == -1:
178
+ break # No complete event yet
179
+
180
+ event_block = self.buffer[:event_end]
181
+ self.buffer = self.buffer[event_end + 2 :] # Remove processed event
182
+ events_processed += 1
183
+
184
+ # Extract data from SSE event
185
+ for line in event_block.split("\n"):
186
+ if line.startswith("data: "):
187
+ data_str = line[6:].strip() # Remove "data: " prefix
188
+ if data_str:
189
+ parsed = SSEParser.parse_streaming_sse_chunk(data_str)
190
+ if parsed:
191
+ results.append(parsed)
192
+
193
+ self.logger.debug(
194
+ f"🌊 {self.context}: Processed {events_processed} complete SSE events, yielding {len(results)} JSON objects"
195
+ )
196
+ return results
197
+
198
+ def finalize(self) -> list[dict[str, Any]]:
199
+ """
200
+ Process any remaining data in buffer.
201
+
202
+ Returns:
203
+ List of any final JSON objects found
204
+ """
205
+ results = []
206
+
207
+ if self.buffer.strip():
208
+ for line in self.buffer.split("\n"):
209
+ if line.startswith("data: "):
210
+ data_str = line[6:].strip()
211
+ if data_str:
212
+ parsed = SSEParser.parse_streaming_sse_chunk(data_str)
213
+ if parsed:
214
+ results.append(parsed)
215
+
216
+ self.buffer = "" # Clear buffer
217
+ return results
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mcp-mesh
3
- Version: 0.4.0
3
+ Version: 0.4.2
4
4
  Summary: Kubernetes-native platform for distributed MCP applications
5
5
  Project-URL: Homepage, https://github.com/dhyansraj/mcp-mesh
6
6
  Project-URL: Documentation, https://github.com/dhyansraj/mcp-mesh/tree/main/docs
@@ -1,11 +1,11 @@
1
- _mcp_mesh/__init__.py,sha256=PNSqEqZWQkoWcxSu81zaMowsyvY9td88B1VWLMjTDoc,2103
1
+ _mcp_mesh/__init__.py,sha256=4YFRyw3RwxKNKMPP9rmT4enhGSe8ceAG8AgOKbU5ITY,2103
2
2
  _mcp_mesh/engine/__init__.py,sha256=o4axdnjeUnUTsukZFacMNnxd6LRX2D46qJzMzUfEZUI,3259
3
- _mcp_mesh/engine/async_mcp_client.py,sha256=uD3s9dDWNQ9SZiuiQm0C6BSjtgn1OqgMnh3oc_D6Zz4,7015
3
+ _mcp_mesh/engine/async_mcp_client.py,sha256=IdcCsswl1WXnzIWGb686Morgjf4CROm411EiDeE0cac,6308
4
4
  _mcp_mesh/engine/decorator_registry.py,sha256=w_-10yBkgYAu_u-SUrTaZcCOv1t_MqlHPg0vFQGdy2I,15506
5
5
  _mcp_mesh/engine/dependency_injector.py,sha256=RbXa9ap1PYRc7G3-ZnJkWQOcibQfecQDGIU-_7O2Raw,17310
6
- _mcp_mesh/engine/full_mcp_proxy.py,sha256=M6XH5aVPwbZfaRIMY7DPJWm3T-8cmA6maP9fdMrakr0,25134
6
+ _mcp_mesh/engine/full_mcp_proxy.py,sha256=fJyU5mbmvqDXaSgnT00eVb9EnumTHP2QMwAPB_IfIQc,25301
7
7
  _mcp_mesh/engine/http_wrapper.py,sha256=NFKCcUPwSLeyLuTc1TwWpBRrlsbgkzBoiZiZixhOias,21453
8
- _mcp_mesh/engine/mcp_client_proxy.py,sha256=IzWE5uu8rRbaJvDcQvUk6hqeI35Mm5KVSBmJQ1g0Ubo,18336
8
+ _mcp_mesh/engine/mcp_client_proxy.py,sha256=RpHdwCjC6mwiZ5idv7h8OchFRbufyC2NLaHxzU2kKyo,17623
9
9
  _mcp_mesh/engine/self_dependency_proxy.py,sha256=OkKt0-B_ADnJlWtHiHItoZCBZ7Su0iz2unEPFfXvrs4,3302
10
10
  _mcp_mesh/engine/session_aware_client.py,sha256=mc9eh-aCvUvfllORiXTf_X8_jPqV-32QdWKlr8tHLkU,10600
11
11
  _mcp_mesh/engine/session_manager.py,sha256=MCr0_fXBaUjXM51WU5EhDkiGvBdfzYQFVNb9DCXXL0A,10418
@@ -70,7 +70,7 @@ _mcp_mesh/pipeline/startup/__init__.py,sha256=_Y25DgT9eYJ_7Qe7x1Z7y4VFUIaFEmCBS9
70
70
  _mcp_mesh/pipeline/startup/configuration.py,sha256=6LRLIxrqFMU76qrBb6GjGknUlKPZZ9iqOlxE7F9ZhLs,2808
71
71
  _mcp_mesh/pipeline/startup/decorator_collection.py,sha256=RHC6MHtfP9aP0hZ-IJjISZu72e0Pml3LU0qr7dc284w,2294
72
72
  _mcp_mesh/pipeline/startup/fastapiserver_setup.py,sha256=pQIrEc70qeWP732S9LF5A2qWq0CZ5wK-bxls7AjX_2o,33346
73
- _mcp_mesh/pipeline/startup/fastmcpserver_discovery.py,sha256=fl-ZvCFySfs49NF6DQO1U-msNPcs2NBIzDI1UWQYiEc,10246
73
+ _mcp_mesh/pipeline/startup/fastmcpserver_discovery.py,sha256=ktsE9EZYdyZbCtCKB6HVdzGFMQ0E9n0-7I55LRO99sE,10270
74
74
  _mcp_mesh/pipeline/startup/heartbeat_loop.py,sha256=0IX2Q-OJvTVsNILUKxl7v3-BM8DdAW3UB-W-Z2gAWrc,3861
75
75
  _mcp_mesh/pipeline/startup/heartbeat_preparation.py,sha256=v3Fl0PvW5s7Ib_Cy7WtXA7gDvsFGiz54a-IlQRTcLPg,10410
76
76
  _mcp_mesh/pipeline/startup/startup_orchestrator.py,sha256=r0po5cQKe9F5nO6ey5PmYGjYKHe5u8pNEFy7Da0Lh5w,17835
@@ -83,16 +83,17 @@ _mcp_mesh/shared/fast_heartbeat_status.py,sha256=OquEsX9ZTbxY1lIsll0Mbb2KDzSJD76
83
83
  _mcp_mesh/shared/host_resolver.py,sha256=ycs6gXnI1zJX5KiqiLJPX5GkHX8r4j8NMHQOlG2J2X8,2964
84
84
  _mcp_mesh/shared/logging_config.py,sha256=m_eW5ub01mjjoLdF55lv9JwSK9y52rqpVTRN8jNz_6s,2381
85
85
  _mcp_mesh/shared/registry_client_wrapper.py,sha256=d8yL-MiCrQr_WYdRFStOd531qaLv9kZjh0zJAmCJ-Cc,16976
86
+ _mcp_mesh/shared/sse_parser.py,sha256=OEPnfL9xL3rsjQrbyvfUO82WljPSDeO6Z61uUwN1NAo,8035
86
87
  _mcp_mesh/shared/support_types.py,sha256=k-ICF_UwDkHxQ1D5LwFZrp-UrNb4E5dzw02CRuLW9iI,7264
87
88
  _mcp_mesh/tracing/agent_context_helper.py,sha256=XR3OyVT40lWyXaSWYBWdOhs0ZqTj3Xa49BSG8EUzrjI,4595
88
89
  _mcp_mesh/tracing/context.py,sha256=yZtTZXVKltX2BUBrWeeNfQ8Y4n6L_2ywFVlFP-wU9og,2353
89
90
  _mcp_mesh/tracing/execution_tracer.py,sha256=rAyAhSMy7Mr5KpZcQUfDcQ5QW8bj-g-H6wVB3tvkkR4,6852
90
91
  _mcp_mesh/tracing/redis_metadata_publisher.py,sha256=5U5GojBFYZdxJJ1RvmdxXHC-VuPQz4kMla6n0xdZxKc,5371
91
92
  _mcp_mesh/tracing/trace_context_helper.py,sha256=xPDygKrXcoJbrr7013Av_I0ajammWEq68ihdP7DZ-Zo,6072
92
- mesh/__init__.py,sha256=yU-xxDxSBNW2eVXIo0DazPIctkkUc-YGLV0k-liPD4g,3108
93
+ mesh/__init__.py,sha256=XtquSwwLeVJwQZjg8jO4_vRNSQZtWo_Mv2tixI1_Yzg,3171
93
94
  mesh/decorators.py,sha256=_WLqfMXQgi_4Xarci-sRa3zxqKWIf-2HYELh9MAitYY,18397
94
95
  mesh/types.py,sha256=cR8oRKHZjZXNkbHPYdRfLNDyCE4yEz_Fbrmdc08NY6k,8775
95
- mcp_mesh-0.4.0.dist-info/METADATA,sha256=xKBoxssSk35hneJgSRyztXSIH-Ei3OHPtMixJsB4nqs,4879
96
- mcp_mesh-0.4.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
97
- mcp_mesh-0.4.0.dist-info/licenses/LICENSE,sha256=_EBQHRQThv9FPOLc5eFOUdeeRO0mYwChC7cx60dM1tM,1078
98
- mcp_mesh-0.4.0.dist-info/RECORD,,
96
+ mcp_mesh-0.4.2.dist-info/METADATA,sha256=clX_rcn6hz6Q3wSbA4Wx2fkqNSm4LWmbOVL0nhojKA8,4879
97
+ mcp_mesh-0.4.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
98
+ mcp_mesh-0.4.2.dist-info/licenses/LICENSE,sha256=_EBQHRQThv9FPOLc5eFOUdeeRO0mYwChC7cx60dM1tM,1078
99
+ mcp_mesh-0.4.2.dist-info/RECORD,,
mesh/__init__.py CHANGED
@@ -19,7 +19,7 @@ Use 'import mesh' and then '@mesh.tool()' for consistency with MCP patterns.
19
19
  """
20
20
 
21
21
  from . import decorators
22
- from .types import McpMeshAgent
22
+ from .types import McpAgent, McpMeshAgent
23
23
 
24
24
  __version__ = "1.0.0"
25
25
 
@@ -95,6 +95,8 @@ def __getattr__(name):
95
95
  return decorators.agent
96
96
  elif name == "McpMeshAgent":
97
97
  return McpMeshAgent
98
+ elif name == "McpAgent":
99
+ return McpAgent
98
100
  elif name == "create_server":
99
101
  return create_server
100
102
  raise AttributeError(f"module '{__name__}' has no attribute '{name}'")