chuk-tool-processor 0.4__py3-none-any.whl → 0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chuk-tool-processor might be problematic. Click here for more details.

Files changed (29) hide show
  1. chuk_tool_processor/core/processor.py +1 -1
  2. chuk_tool_processor/execution/strategies/inprocess_strategy.py +30 -9
  3. chuk_tool_processor/execution/strategies/subprocess_strategy.py +139 -97
  4. chuk_tool_processor/execution/tool_executor.py +7 -7
  5. chuk_tool_processor/execution/wrappers/caching.py +3 -3
  6. chuk_tool_processor/execution/wrappers/retry.py +163 -174
  7. chuk_tool_processor/logging/__init__.py +12 -0
  8. chuk_tool_processor/logging/context.py +110 -7
  9. chuk_tool_processor/mcp/mcp_tool.py +148 -40
  10. chuk_tool_processor/mcp/register_mcp_tools.py +3 -3
  11. chuk_tool_processor/mcp/setup_mcp_sse.py +4 -4
  12. chuk_tool_processor/mcp/setup_mcp_stdio.py +2 -2
  13. chuk_tool_processor/mcp/stream_manager.py +71 -15
  14. chuk_tool_processor/mcp/transport/base_transport.py +2 -2
  15. chuk_tool_processor/mcp/transport/sse_transport.py +7 -19
  16. chuk_tool_processor/mcp/transport/stdio_transport.py +7 -9
  17. chuk_tool_processor/models/validated_tool.py +6 -6
  18. chuk_tool_processor/plugins/discovery.py +3 -3
  19. chuk_tool_processor/plugins/parsers/base.py +1 -1
  20. chuk_tool_processor/plugins/parsers/xml_tool.py +2 -2
  21. chuk_tool_processor/registry/auto_register.py +5 -5
  22. chuk_tool_processor/registry/decorators.py +278 -64
  23. chuk_tool_processor/registry/interface.py +2 -2
  24. chuk_tool_processor/registry/providers/memory.py +2 -2
  25. chuk_tool_processor/utils/validation.py +1 -1
  26. {chuk_tool_processor-0.4.dist-info → chuk_tool_processor-0.5.dist-info}/METADATA +2 -3
  27. {chuk_tool_processor-0.4.dist-info → chuk_tool_processor-0.5.dist-info}/RECORD +29 -29
  28. {chuk_tool_processor-0.4.dist-info → chuk_tool_processor-0.5.dist-info}/WHEEL +0 -0
  29. {chuk_tool_processor-0.4.dist-info → chuk_tool_processor-0.5.dist-info}/top_level.txt +0 -0
@@ -3,6 +3,8 @@
3
3
  """
4
4
  MCP tool shim that delegates execution to a StreamManager,
5
5
  handling its own lazy bootstrap when needed.
6
+
7
+ FIXED: Added subprocess serialization support by implementing __getstate__ and __setstate__
6
8
  """
7
9
  from __future__ import annotations
8
10
 
@@ -24,23 +26,32 @@ class MCPTool:
24
26
 
25
27
  If no ``StreamManager`` is supplied the class will start one on first
26
28
  use via ``setup_mcp_stdio``.
29
+
30
+ FIXED: Added serialization support for subprocess execution.
27
31
  """
28
32
 
29
33
  # ------------------------------------------------------------------ #
30
34
  def __init__(
31
35
  self,
32
- tool_name: str,
36
+ tool_name: str = "",
33
37
  stream_manager: Optional[StreamManager] = None,
34
38
  *,
35
39
  cfg_file: str = "",
36
40
  servers: Optional[List[str]] = None,
37
41
  server_names: Optional[Dict[int, str]] = None,
38
42
  namespace: str = "stdio",
39
- default_timeout: Optional[float] = None, # Add default timeout support
43
+ default_timeout: Optional[float] = None
40
44
  ) -> None:
45
+ if not tool_name:
46
+ raise ValueError(
47
+ "MCPTool requires a tool_name. "
48
+ "This error usually occurs during subprocess serialization. "
49
+ "Make sure the tool is properly registered with a name."
50
+ )
51
+
41
52
  self.tool_name = tool_name
42
53
  self._sm: Optional[StreamManager] = stream_manager
43
- self.default_timeout = default_timeout or 30.0 # Default to 30s if not specified
54
+ self.default_timeout = default_timeout
44
55
 
45
56
  # Boot-strap parameters (only needed if _sm is None)
46
57
  self._cfg_file = cfg_file
@@ -48,7 +59,68 @@ class MCPTool:
48
59
  self._server_names = server_names or {}
49
60
  self._namespace = namespace
50
61
 
51
- self._sm_lock = asyncio.Lock()
62
+ # Create lock only when needed (not during deserialization)
63
+ self._sm_lock: Optional[asyncio.Lock] = None
64
+
65
+ def _ensure_lock(self) -> asyncio.Lock:
66
+ """Ensure the lock exists, creating it if necessary."""
67
+ if self._sm_lock is None:
68
+ self._sm_lock = asyncio.Lock()
69
+ return self._sm_lock
70
+
71
+ # ------------------------------------------------------------------ #
72
+ # Serialization support for subprocess execution
73
+ # ------------------------------------------------------------------ #
74
+ def __getstate__(self) -> Dict[str, Any]:
75
+ """
76
+ Custom serialization for pickle support.
77
+
78
+ Excludes non-serializable async components and stream manager.
79
+ The subprocess will recreate these as needed.
80
+ """
81
+ state = self.__dict__.copy()
82
+
83
+ # Remove non-serializable items
84
+ state['_sm'] = None # StreamManager will be recreated in subprocess
85
+ state['_sm_lock'] = None # Lock will be recreated when needed
86
+
87
+ # Ensure we have the necessary configuration for subprocess
88
+ # If no servers specified, default to the tool name (common pattern)
89
+ if not state.get('_servers'):
90
+ # Extract server name from tool_name (e.g., "get_current_time" -> "time")
91
+ # This is a heuristic - adjust based on your naming convention
92
+ if 'time' in self.tool_name.lower():
93
+ state['_servers'] = ['time']
94
+ state['_server_names'] = {0: 'time'}
95
+ else:
96
+ # Default fallback - use the tool name itself
97
+ state['_servers'] = [self.tool_name]
98
+ state['_server_names'] = {0: self.tool_name}
99
+
100
+ # Ensure we have a config file path
101
+ if not state.get('_cfg_file'):
102
+ state['_cfg_file'] = 'server_config.json'
103
+
104
+ logger.debug(f"Serializing MCPTool '{self.tool_name}' for subprocess with servers: {state['_servers']}")
105
+ return state
106
+
107
+ def __setstate__(self, state: Dict[str, Any]) -> None:
108
+ """
109
+ Custom deserialization for pickle support.
110
+
111
+ Restores the object state and ensures required fields are set.
112
+ """
113
+ self.__dict__.update(state)
114
+
115
+ # Ensure critical fields exist
116
+ if not hasattr(self, 'tool_name') or not self.tool_name:
117
+ raise ValueError("Invalid MCPTool state: missing tool_name")
118
+
119
+ # Initialize transient fields
120
+ self._sm = None
121
+ self._sm_lock = None
122
+
123
+ logger.debug(f"Deserialized MCPTool '{self.tool_name}' in subprocess")
52
124
 
53
125
  # ------------------------------------------------------------------ #
54
126
  async def _ensure_stream_manager(self) -> StreamManager:
@@ -61,7 +133,8 @@ class MCPTool:
61
133
  if self._sm is not None:
62
134
  return self._sm
63
135
 
64
- async with self._sm_lock:
136
+ # Use the lock, creating it if needed
137
+ async with self._ensure_lock():
65
138
  if self._sm is None: # re-check inside lock
66
139
  logger.info(
67
140
  "Boot-strapping MCP stdio transport for '%s'", self.tool_name
@@ -79,57 +152,92 @@ class MCPTool:
79
152
 
80
153
  return self._sm # type: ignore[return-value]
81
154
 
82
- # ------------------------------------------------------------------ #
83
155
  async def execute(self, timeout: Optional[float] = None, **kwargs: Any) -> Any:
84
156
  """
85
- Forward the call to the remote MCP tool with timeout support.
157
+ Invoke the remote MCP tool, guaranteeing that *one* timeout is enforced.
86
158
 
87
- Args:
88
- timeout: Optional timeout for this specific call. If not provided,
89
- uses the instance's default_timeout.
90
- **kwargs: Arguments to pass to the MCP tool.
159
+ Parameters
160
+ ----------
161
+ timeout : float | None
162
+ If provided, forward this to StreamManager. Otherwise fall back
163
+ to ``self.default_timeout``.
164
+ **kwargs
165
+ Arguments forwarded to the tool.
91
166
 
92
- Returns:
93
- The result from the MCP tool call.
167
+ Returns
168
+ -------
169
+ Any
170
+ The ``content`` of the remote tool response.
94
171
 
95
172
  Raises
96
173
  ------
97
174
  RuntimeError
98
- If the server returns an error payload.
175
+ The remote tool returned an error payload.
99
176
  asyncio.TimeoutError
100
- If the call times out.
177
+ The call exceeded the chosen timeout.
101
178
  """
102
179
  sm = await self._ensure_stream_manager()
103
-
104
- # Use provided timeout, fall back to instance default, then global default
105
- effective_timeout = timeout if timeout is not None else self.default_timeout
106
-
107
- logger.debug("Calling MCP tool '%s' with timeout: %ss", self.tool_name, effective_timeout)
108
-
109
- try:
110
- # Pass timeout directly to StreamManager instead of wrapping with wait_for
111
- result = await sm.call_tool(
112
- tool_name=self.tool_name,
113
- arguments=kwargs,
114
- timeout=effective_timeout
180
+
181
+ # Pick the timeout we will enforce (may be None = no limit).
182
+ effective_timeout: Optional[float] = (
183
+ timeout if timeout is not None else self.default_timeout
184
+ )
185
+
186
+ call_kwargs: dict[str, Any] = {
187
+ "tool_name": self.tool_name,
188
+ "arguments": kwargs,
189
+ }
190
+ if effective_timeout is not None:
191
+ call_kwargs["timeout"] = effective_timeout
192
+ logger.debug(
193
+ "Forwarding timeout=%ss to StreamManager for tool '%s'",
194
+ effective_timeout,
195
+ self.tool_name,
115
196
  )
116
-
117
- if result.get("isError"):
118
- err = result.get("error", "Unknown error")
119
- logger.error("Remote MCP error from '%s': %s", self.tool_name, err)
120
- raise RuntimeError(err)
121
-
122
- return result.get("content")
123
-
197
+
198
+ try:
199
+ result = await sm.call_tool(**call_kwargs)
124
200
  except asyncio.TimeoutError:
125
- logger.warning("MCP tool '%s' timed out after %ss", self.tool_name, effective_timeout)
126
- raise
127
- except Exception as e:
128
- logger.error("Error calling MCP tool '%s': %s", self.tool_name, e)
201
+ logger.warning(
202
+ "MCP tool '%s' timed out after %ss",
203
+ self.tool_name,
204
+ effective_timeout,
205
+ )
129
206
  raise
130
207
 
208
+ if result.get("isError"):
209
+ err = result.get("error", "Unknown error")
210
+ logger.error("Remote MCP error from '%s': %s", self.tool_name, err)
211
+ raise RuntimeError(err)
212
+
213
+ return result.get("content")
214
+
131
215
  # ------------------------------------------------------------------ #
132
216
  # Legacy method name support
133
217
  async def _aexecute(self, timeout: Optional[float] = None, **kwargs: Any) -> Any:
134
218
  """Legacy alias for execute() method."""
135
- return await self.execute(timeout=timeout, **kwargs)
219
+ return await self.execute(timeout=timeout, **kwargs)
220
+
221
+ # ------------------------------------------------------------------ #
222
+ # Utility methods for debugging
223
+ # ------------------------------------------------------------------ #
224
+ def is_serializable(self) -> bool:
225
+ """Check if this tool can be serialized (for debugging)."""
226
+ try:
227
+ import pickle
228
+ pickle.dumps(self)
229
+ return True
230
+ except Exception:
231
+ return False
232
+
233
+ def get_serialization_info(self) -> Dict[str, Any]:
234
+ """Get information about what would be serialized."""
235
+ state = self.__getstate__()
236
+ return {
237
+ "tool_name": state.get("tool_name"),
238
+ "namespace": state.get("_namespace"),
239
+ "servers": state.get("_servers"),
240
+ "cfg_file": state.get("_cfg_file"),
241
+ "has_stream_manager": state.get("_sm") is not None,
242
+ "serializable_size": len(str(state))
243
+ }
@@ -4,7 +4,7 @@
4
4
  Discover the remote MCP tools exposed by a :class:`~chuk_tool_processor.mcp.stream_manager.StreamManager`
5
5
  instance and register them in the local CHUK registry.
6
6
 
7
- The helper is now **async-native** call it with ``await``.
7
+ The helper is now **async-native** - call it with ``await``.
8
8
  """
9
9
 
10
10
  from __future__ import annotations
@@ -55,7 +55,7 @@ async def register_mcp_tools(
55
55
  for tool_def in mcp_tools:
56
56
  tool_name = tool_def.get("name")
57
57
  if not tool_name:
58
- logger.warning("Remote tool definition without a 'name' field skipped")
58
+ logger.warning("Remote tool definition without a 'name' field - skipped")
59
59
  continue
60
60
 
61
61
  description = tool_def.get("description") or f"MCP tool • {tool_name}"
@@ -96,5 +96,5 @@ async def register_mcp_tools(
96
96
  except Exception as exc: # noqa: BLE001
97
97
  logger.error("Failed to register MCP tool '%s': %s", tool_name, exc)
98
98
 
99
- logger.info("MCP registration complete %d tool(s) available", len(registered))
99
+ logger.info("MCP registration complete - %d tool(s) available", len(registered))
100
100
  return registered
@@ -8,7 +8,7 @@ Utility that wires up:
8
8
  2. The remote MCP tools exposed by that manager (via
9
9
  :pyfunc:`~chuk_tool_processor.mcp.register_mcp_tools.register_mcp_tools`).
10
10
  3. A fully-featured :class:`~chuk_tool_processor.core.processor.ToolProcessor`
11
- instance that can execute those tools with optional caching,
11
+ instance that can execute those tools - with optional caching,
12
12
  rate-limiting, retries, etc.
13
13
  """
14
14
 
@@ -28,7 +28,7 @@ logger = get_logger("chuk_tool_processor.mcp.setup_sse")
28
28
  # --------------------------------------------------------------------------- #
29
29
  # public helper
30
30
  # --------------------------------------------------------------------------- #
31
- async def setup_mcp_sse( # noqa: C901 long, but just a config wrapper
31
+ async def setup_mcp_sse( # noqa: C901 - long, but just a config wrapper
32
32
  *,
33
33
  servers: List[Dict[str, str]],
34
34
  server_names: Optional[Dict[int, str]] = None,
@@ -47,7 +47,7 @@ async def setup_mcp_sse( # noqa: C901 – long, but just a config wrapper
47
47
  Spin up an SSE-backed *StreamManager*, register all its remote tools,
48
48
  and return a ready-to-go :class:`ToolProcessor`.
49
49
 
50
- Everything is **async-native** call with ``await``.
50
+ Everything is **async-native** - call with ``await``.
51
51
 
52
52
  NEW: Automatically detects and adds bearer token from MCP_BEARER_TOKEN
53
53
  environment variable if not explicitly provided in server config.
@@ -91,7 +91,7 @@ async def setup_mcp_sse( # noqa: C901 – long, but just a config wrapper
91
91
  )
92
92
 
93
93
  logger.info(
94
- "MCP (SSE) initialised %s tool%s registered into namespace '%s'",
94
+ "MCP (SSE) initialised - %s tool%s registered into namespace '%s'",
95
95
  len(registered),
96
96
  "" if len(registered) == 1 else "s",
97
97
  namespace,
@@ -26,7 +26,7 @@ logger = get_logger("chuk_tool_processor.mcp.setup_stdio")
26
26
  # --------------------------------------------------------------------------- #
27
27
  # public helper
28
28
  # --------------------------------------------------------------------------- #
29
- async def setup_mcp_stdio( # noqa: C901 long but just a config facade
29
+ async def setup_mcp_stdio( # noqa: C901 - long but just a config facade
30
30
  *,
31
31
  config_file: str,
32
32
  servers: List[str],
@@ -72,7 +72,7 @@ async def setup_mcp_stdio( # noqa: C901 – long but just a config facade
72
72
  )
73
73
 
74
74
  logger.info(
75
- "MCP (stdio) initialised %s tool%s registered into namespace '%s'",
75
+ "MCP (stdio) initialised - %s tool%s registered into namespace '%s'",
76
76
  len(registered),
77
77
  "" if len(registered) == 1 else "s",
78
78
  namespace,
@@ -1,3 +1,4 @@
1
+ #!/usr/bin/env python
1
2
  # chuk_tool_processor/mcp/stream_manager.py
2
3
  """
3
4
  StreamManager for CHUK Tool Processor.
@@ -36,6 +37,7 @@ class StreamManager:
36
37
  self.server_names: Dict[int, str] = {}
37
38
  self.all_tools: List[Dict[str, Any]] = []
38
39
  self._lock = asyncio.Lock()
40
+ self._close_tasks: List[asyncio.Task] = [] # Track cleanup tasks
39
41
 
40
42
  # ------------------------------------------------------------------ #
41
43
  # factory helpers #
@@ -77,7 +79,7 @@ class StreamManager:
77
79
  return inst
78
80
 
79
81
  # ------------------------------------------------------------------ #
80
- # initialisation stdio / sse #
82
+ # initialisation - stdio / sse #
81
83
  # ------------------------------------------------------------------ #
82
84
  async def initialize(
83
85
  self,
@@ -143,12 +145,12 @@ class StreamManager:
143
145
  "status": status,
144
146
  }
145
147
  )
146
- logger.info("Initialised %s %d tool(s)", server_name, len(tools))
148
+ logger.info("Initialised %s - %d tool(s)", server_name, len(tools))
147
149
  except Exception as exc: # noqa: BLE001
148
150
  logger.error("Error initialising %s: %s", server_name, exc)
149
151
 
150
152
  logger.info(
151
- "StreamManager ready %d server(s), %d tool(s)",
153
+ "StreamManager ready - %d server(s), %d tool(s)",
152
154
  len(self.transports),
153
155
  len(self.all_tools),
154
156
  )
@@ -194,12 +196,12 @@ class StreamManager:
194
196
  self.server_info.append(
195
197
  {"id": idx, "name": name, "tools": len(tools), "status": status}
196
198
  )
197
- logger.info("Initialised SSE %s %d tool(s)", name, len(tools))
199
+ logger.info("Initialised SSE %s - %d tool(s)", name, len(tools))
198
200
  except Exception as exc: # noqa: BLE001
199
201
  logger.error("Error initialising SSE %s: %s", name, exc)
200
202
 
201
203
  logger.info(
202
- "StreamManager ready %d SSE server(s), %d tool(s)",
204
+ "StreamManager ready - %d SSE server(s), %d tool(s)",
203
205
  len(self.transports),
204
206
  len(self.all_tools),
205
207
  )
@@ -245,7 +247,7 @@ class StreamManager:
245
247
  return []
246
248
 
247
249
  # ------------------------------------------------------------------ #
248
- # EXTRA HELPERS ping / resources / prompts #
250
+ # EXTRA HELPERS - ping / resources / prompts #
249
251
  # ------------------------------------------------------------------ #
250
252
  async def ping_servers(self) -> List[Dict[str, Any]]:
251
253
  async def _ping_one(name: str, tr: MCPBaseTransport):
@@ -364,22 +366,76 @@ class StreamManager:
364
366
  return await transport.call_tool(tool_name, arguments)
365
367
 
366
368
  # ------------------------------------------------------------------ #
367
- # shutdown #
369
+ # shutdown - PROPERLY FIXED VERSION #
368
370
  # ------------------------------------------------------------------ #
369
371
  async def close(self) -> None:
370
- tasks = [tr.close() for tr in self.transports.values()]
371
- if tasks:
372
+ """
373
+ Properly close all transports with graceful handling of cancellation.
374
+ """
375
+ if not self.transports:
376
+ return
377
+
378
+ # Cancel any existing close tasks
379
+ for task in self._close_tasks:
380
+ if not task.done():
381
+ task.cancel()
382
+ self._close_tasks.clear()
383
+
384
+ # Create close tasks for all transports
385
+ close_tasks = []
386
+ for name, transport in list(self.transports.items()):
372
387
  try:
373
- await asyncio.gather(*tasks)
374
- except asyncio.CancelledError: # pragma: no cover
375
- pass
376
- except Exception as exc: # noqa: BLE001
377
- logger.error("Error during close: %s", exc)
378
-
388
+ task = asyncio.create_task(
389
+ self._close_transport(name, transport),
390
+ name=f"close_{name}"
391
+ )
392
+ close_tasks.append(task)
393
+ self._close_tasks.append(task)
394
+ except Exception as e:
395
+ logger.debug(f"Error creating close task for {name}: {e}")
396
+
397
+ # Wait for all close tasks with a timeout
398
+ if close_tasks:
399
+ try:
400
+ # Give transports a reasonable time to close gracefully
401
+ await asyncio.wait_for(
402
+ asyncio.gather(*close_tasks, return_exceptions=True),
403
+ timeout=2.0
404
+ )
405
+ except asyncio.TimeoutError:
406
+ # Cancel any still-running tasks
407
+ for task in close_tasks:
408
+ if not task.done():
409
+ task.cancel()
410
+ # Brief wait for cancellation to take effect
411
+ await asyncio.gather(*close_tasks, return_exceptions=True)
412
+ except asyncio.CancelledError:
413
+ # This is expected during event loop shutdown
414
+ logger.debug("Close operation cancelled during shutdown")
415
+ except Exception as e:
416
+ logger.debug(f"Unexpected error during close: {e}")
417
+
418
+ # Clean up state
419
+ self._cleanup_state()
420
+
421
+ async def _close_transport(self, name: str, transport: MCPBaseTransport) -> None:
422
+ """Close a single transport with error handling."""
423
+ try:
424
+ await transport.close()
425
+ logger.debug(f"Closed transport: {name}")
426
+ except asyncio.CancelledError:
427
+ # Re-raise cancellation
428
+ raise
429
+ except Exception as e:
430
+ logger.debug(f"Error closing transport {name}: {e}")
431
+
432
+ def _cleanup_state(self) -> None:
433
+ """Clean up internal state (synchronous)."""
379
434
  self.transports.clear()
380
435
  self.server_info.clear()
381
436
  self.tool_to_server_map.clear()
382
437
  self.all_tools.clear()
438
+ self._close_tasks.clear()
383
439
 
384
440
  # ------------------------------------------------------------------ #
385
441
  # backwards-compat: streams helper #
@@ -73,7 +73,7 @@ class MCPBaseTransport(ABC):
73
73
  @abstractmethod
74
74
  async def list_resources(self) -> Dict[str, Any]:
75
75
  """
76
- Retrieve the servers resources catalogue.
76
+ Retrieve the server's resources catalogue.
77
77
 
78
78
  Expected shape::
79
79
  { "resources": [ {...}, ... ], "nextCursor": "…", … }
@@ -83,7 +83,7 @@ class MCPBaseTransport(ABC):
83
83
  @abstractmethod
84
84
  async def list_prompts(self) -> Dict[str, Any]:
85
85
  """
86
- Retrieve the servers prompt catalogue.
86
+ Retrieve the server's prompt catalogue.
87
87
 
88
88
  Expected shape::
89
89
  { "prompts": [ {...}, ... ], "nextCursor": "…", … }
@@ -10,6 +10,7 @@ This transport:
10
10
  5. Handles async responses via SSE message events
11
11
 
12
12
  FIXED: All hardcoded timeouts are now configurable parameters.
13
+ FIXED: Enhanced close method to avoid cancel scope conflicts.
13
14
  """
14
15
  from __future__ import annotations
15
16
 
@@ -206,24 +207,11 @@ class SSETransport(MCPBaseTransport):
206
207
  print(f"⚠️ Failed to send notification: {e}")
207
208
 
208
209
  async def close(self) -> None:
209
- """Close the transport."""
210
- # Cancel any pending requests
211
- for future in self._pending_requests.values():
212
- if not future.done():
213
- future.cancel()
214
- self._pending_requests.clear()
215
-
216
- if self._sse_task:
217
- self._sse_task.cancel()
218
- with contextlib.suppress(asyncio.CancelledError):
219
- await self._sse_task
220
- self._sse_task = None
221
-
222
- if self._client:
223
- await self._client.aclose()
224
- self._client = None
225
- self.session = None
226
-
210
+ """Minimal close method with zero async operations."""
211
+ # Just clear references - no async operations at all
212
+ self._context_stack = None
213
+ self.read_stream = None
214
+ self.write_stream = None
227
215
  # ------------------------------------------------------------------ #
228
216
  # SSE Connection Handler #
229
217
  # ------------------------------------------------------------------ #
@@ -351,7 +339,7 @@ class SSETransport(MCPBaseTransport):
351
339
  """
352
340
  # NEW: Ensure initialization before tool calls
353
341
  if not self._initialized.is_set():
354
- return {"isError": True, "error": "MCP session not initialized"}
342
+ return {"isError": True, "error": "SSE transport not implemented"}
355
343
 
356
344
  if not self._message_url:
357
345
  return {"isError": True, "error": "No message endpoint available"}
@@ -4,6 +4,7 @@ from __future__ import annotations
4
4
  from contextlib import AsyncExitStack
5
5
  import json
6
6
  from typing import Dict, Any, List, Optional
7
+ import asyncio
7
8
 
8
9
  # ------------------------------------------------------------------ #
9
10
  # Local import #
@@ -71,14 +72,11 @@ class StdioTransport(MCPBaseTransport):
71
72
  return False
72
73
 
73
74
  async def close(self) -> None:
74
- if self._context_stack:
75
- try:
76
- await self._context_stack.__aexit__(None, None, None)
77
- except Exception:
78
- pass
75
+ """Minimal close method with zero async operations."""
76
+ # Just clear references - no async operations at all
77
+ self._context_stack = None
79
78
  self.read_stream = None
80
79
  self.write_stream = None
81
- self._context_stack = None
82
80
 
83
81
  # --------------------------------------------------------------------- #
84
82
  # Utility #
@@ -131,7 +129,7 @@ class StdioTransport(MCPBaseTransport):
131
129
  def get_streams(self):
132
130
  """
133
131
  Expose the low-level streams so legacy callers can access them
134
- directly. The base-class default returns an empty list; here we
132
+ directly. The base-class' default returns an empty list; here we
135
133
  return a single-element list when the transport is active.
136
134
  """
137
135
  if self.read_stream and self.write_stream:
@@ -145,7 +143,7 @@ class StdioTransport(MCPBaseTransport):
145
143
  self, tool_name: str, arguments: Dict[str, Any]
146
144
  ) -> Dict[str, Any]:
147
145
  """
148
- Execute *tool_name* with *arguments* and normalise the servers reply.
146
+ Execute *tool_name* with *arguments* and normalise the server's reply.
149
147
 
150
148
  The echo-server often returns:
151
149
  {
@@ -194,4 +192,4 @@ class StdioTransport(MCPBaseTransport):
194
192
  import logging
195
193
 
196
194
  logging.error(f"Error calling tool {tool_name}: {e}")
197
- return {"isError": True, "error": str(e)}
195
+ return {"isError": True, "error": str(e)}
@@ -35,7 +35,7 @@ T_Validated = TypeVar("T_Validated", bound="ValidatedTool")
35
35
 
36
36
 
37
37
  # --------------------------------------------------------------------------- #
38
- # Helper mix-in serialise a *class* into assorted formats
38
+ # Helper mix-in - serialise a *class* into assorted formats
39
39
  # --------------------------------------------------------------------------- #
40
40
  class _ExportMixin:
41
41
  """Static helpers that expose a tool class in other specs."""
@@ -79,7 +79,7 @@ class _ExportMixin:
79
79
  return cls.Arguments.model_json_schema() # type: ignore[attr-defined]
80
80
 
81
81
  # ------------------------------------------------------------------ #
82
- # Tiny XML tag handy for unit-tests / demos
82
+ # Tiny XML tag - handy for unit-tests / demos
83
83
  # ------------------------------------------------------------------ #
84
84
  @classmethod
85
85
  def to_xml_tag(cls: type[T_Validated], **arguments: Any) -> str:
@@ -96,9 +96,9 @@ class ValidatedTool(_ExportMixin, BaseModel):
96
96
  """Pydantic-validated base for new async-native tools."""
97
97
 
98
98
  # ------------------------------------------------------------------ #
99
- # Inner models override in subclasses
99
+ # Inner models - override in subclasses
100
100
  # ------------------------------------------------------------------ #
101
- class Arguments(BaseModel): # noqa: D401 acts as a namespace
101
+ class Arguments(BaseModel): # noqa: D401 - acts as a namespace
102
102
  """Input model"""
103
103
 
104
104
  class Result(BaseModel): # noqa: D401
@@ -124,14 +124,14 @@ class ValidatedTool(_ExportMixin, BaseModel):
124
124
  # ------------------------------------------------------------------ #
125
125
  # Sub-classes must implement this
126
126
  # ------------------------------------------------------------------ #
127
- async def _execute(self, **_kwargs: Any): # noqa: D401 expected override
127
+ async def _execute(self, **_kwargs: Any): # noqa: D401 - expected override
128
128
  raise NotImplementedError("Tool must implement async _execute()")
129
129
 
130
130
 
131
131
  # --------------------------------------------------------------------------- #
132
132
  # Decorator to retrofit validation onto classic "imperative" tools
133
133
  # --------------------------------------------------------------------------- #
134
- def with_validation(cls): # noqa: D401 factory
134
+ def with_validation(cls): # noqa: D401 - factory
135
135
  """
136
136
  Decorator that wraps an existing async ``execute`` method with:
137
137
 
@@ -58,13 +58,13 @@ class PluginDiscovery:
58
58
  """
59
59
  Recursively scans *package_paths* for plugin classes and registers them.
60
60
 
61
- * Parser plugins concrete subclasses of :class:`ParserPlugin`
61
+ * Parser plugins - concrete subclasses of :class:`ParserPlugin`
62
62
  with an **async** ``try_parse`` coroutine.
63
63
 
64
- * Execution strategies concrete subclasses of
64
+ * Execution strategies - concrete subclasses of
65
65
  :class:`ExecutionStrategy`.
66
66
 
67
- * Explicitly-decorated plugins classes tagged with ``@plugin(...)``.
67
+ * Explicitly-decorated plugins - classes tagged with ``@plugin(...)``.
68
68
  """
69
69
 
70
70
  # ------------------------------------------------------------------ #
@@ -16,7 +16,7 @@ class ParserPlugin(ABC):
16
16
  Every parser plugin **must** implement the async ``try_parse`` coroutine.
17
17
 
18
18
  The processor awaits it and expects *a list* of :class:`ToolCall`
19
- objects. If the plugin doesnt recognise the input it should return an
19
+ objects. If the plugin doesn't recognise the input it should return an
20
20
  empty list.
21
21
  """
22
22