chuk-tool-processor 0.3__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. chuk_tool_processor/core/processor.py +1 -1
  2. chuk_tool_processor/execution/strategies/inprocess_strategy.py +1 -1
  3. chuk_tool_processor/execution/tool_executor.py +47 -9
  4. chuk_tool_processor/execution/wrappers/caching.py +3 -3
  5. chuk_tool_processor/execution/wrappers/retry.py +163 -174
  6. chuk_tool_processor/logging/context.py +6 -6
  7. chuk_tool_processor/mcp/mcp_tool.py +48 -36
  8. chuk_tool_processor/mcp/register_mcp_tools.py +3 -3
  9. chuk_tool_processor/mcp/setup_mcp_sse.py +4 -4
  10. chuk_tool_processor/mcp/setup_mcp_stdio.py +2 -2
  11. chuk_tool_processor/mcp/stream_manager.py +72 -16
  12. chuk_tool_processor/mcp/transport/base_transport.py +2 -2
  13. chuk_tool_processor/mcp/transport/sse_transport.py +68 -13
  14. chuk_tool_processor/mcp/transport/stdio_transport.py +2 -2
  15. chuk_tool_processor/models/validated_tool.py +6 -6
  16. chuk_tool_processor/plugins/discovery.py +3 -3
  17. chuk_tool_processor/plugins/parsers/base.py +1 -1
  18. chuk_tool_processor/plugins/parsers/xml_tool.py +2 -2
  19. chuk_tool_processor/registry/auto_register.py +5 -5
  20. chuk_tool_processor/registry/interface.py +2 -2
  21. chuk_tool_processor/registry/providers/memory.py +2 -2
  22. chuk_tool_processor/utils/validation.py +1 -1
  23. chuk_tool_processor-0.4.1.dist-info/METADATA +831 -0
  24. {chuk_tool_processor-0.3.dist-info → chuk_tool_processor-0.4.1.dist-info}/RECORD +26 -26
  25. chuk_tool_processor-0.3.dist-info/METADATA +0 -401
  26. {chuk_tool_processor-0.3.dist-info → chuk_tool_processor-0.4.1.dist-info}/WHEEL +0 -0
  27. {chuk_tool_processor-0.3.dist-info → chuk_tool_processor-0.4.1.dist-info}/top_level.txt +0 -0
@@ -36,11 +36,11 @@ class MCPTool:
36
36
  servers: Optional[List[str]] = None,
37
37
  server_names: Optional[Dict[int, str]] = None,
38
38
  namespace: str = "stdio",
39
- default_timeout: Optional[float] = None, # Add default timeout support
39
+ default_timeout: Optional[float] = None
40
40
  ) -> None:
41
41
  self.tool_name = tool_name
42
42
  self._sm: Optional[StreamManager] = stream_manager
43
- self.default_timeout = default_timeout or 30.0 # Default to 30s if not specified
43
+ self.default_timeout = default_timeout
44
44
 
45
45
  # Boot-strap parameters (only needed if _sm is None)
46
46
  self._cfg_file = cfg_file
@@ -79,55 +79,67 @@ class MCPTool:
79
79
 
80
80
  return self._sm # type: ignore[return-value]
81
81
 
82
- # ------------------------------------------------------------------ #
83
82
  async def execute(self, timeout: Optional[float] = None, **kwargs: Any) -> Any:
84
83
  """
85
- Forward the call to the remote MCP tool with timeout support.
84
+ Invoke the remote MCP tool, guaranteeing that *one* timeout is enforced.
86
85
 
87
- Args:
88
- timeout: Optional timeout for this specific call. If not provided,
89
- uses the instance's default_timeout.
90
- **kwargs: Arguments to pass to the MCP tool.
86
+ Parameters
87
+ ----------
88
+ timeout : float | None
89
+ If provided, forward this to StreamManager. Otherwise fall back
90
+ to ``self.default_timeout``.
91
+ **kwargs
92
+ Arguments forwarded to the tool.
91
93
 
92
- Returns:
93
- The result from the MCP tool call.
94
+ Returns
95
+ -------
96
+ Any
97
+ The ``content`` of the remote tool response.
94
98
 
95
99
  Raises
96
100
  ------
97
101
  RuntimeError
98
- If the server returns an error payload.
102
+ The remote tool returned an error payload.
99
103
  asyncio.TimeoutError
100
- If the call times out.
104
+ The call exceeded the chosen timeout.
101
105
  """
102
106
  sm = await self._ensure_stream_manager()
103
-
104
- # Use provided timeout, fall back to instance default, then global default
105
- effective_timeout = timeout if timeout is not None else self.default_timeout
106
-
107
- logger.debug("Calling MCP tool '%s' with timeout: %ss", self.tool_name, effective_timeout)
108
-
109
- try:
110
- # Pass timeout directly to StreamManager instead of wrapping with wait_for
111
- result = await sm.call_tool(
112
- tool_name=self.tool_name,
113
- arguments=kwargs,
114
- timeout=effective_timeout
107
+
108
+ # Pick the timeout we will enforce (may be None = no limit).
109
+ effective_timeout: Optional[float] = (
110
+ timeout if timeout is not None else self.default_timeout
111
+ )
112
+
113
+ call_kwargs: dict[str, Any] = {
114
+ "tool_name": self.tool_name,
115
+ "arguments": kwargs,
116
+ }
117
+ if effective_timeout is not None:
118
+ call_kwargs["timeout"] = effective_timeout
119
+ logger.debug(
120
+ "Forwarding timeout=%ss to StreamManager for tool '%s'",
121
+ effective_timeout,
122
+ self.tool_name,
115
123
  )
116
-
117
- if result.get("isError"):
118
- err = result.get("error", "Unknown error")
119
- logger.error("Remote MCP error from '%s': %s", self.tool_name, err)
120
- raise RuntimeError(err)
121
-
122
- return result.get("content")
123
-
124
+
125
+ try:
126
+ result = await sm.call_tool(**call_kwargs)
124
127
  except asyncio.TimeoutError:
125
- logger.warning("MCP tool '%s' timed out after %ss", self.tool_name, effective_timeout)
126
- raise
127
- except Exception as e:
128
- logger.error("Error calling MCP tool '%s': %s", self.tool_name, e)
128
+ logger.warning(
129
+ "MCP tool '%s' timed out after %ss",
130
+ self.tool_name,
131
+ effective_timeout,
132
+ )
129
133
  raise
130
134
 
135
+ if result.get("isError"):
136
+ err = result.get("error", "Unknown error")
137
+ logger.error("Remote MCP error from '%s': %s", self.tool_name, err)
138
+ raise RuntimeError(err)
139
+
140
+ return result.get("content")
141
+
142
+
131
143
  # ------------------------------------------------------------------ #
132
144
  # Legacy method name support
133
145
  async def _aexecute(self, timeout: Optional[float] = None, **kwargs: Any) -> Any:
@@ -4,7 +4,7 @@
4
4
  Discover the remote MCP tools exposed by a :class:`~chuk_tool_processor.mcp.stream_manager.StreamManager`
5
5
  instance and register them in the local CHUK registry.
6
6
 
7
- The helper is now **async-native** call it with ``await``.
7
+ The helper is now **async-native** - call it with ``await``.
8
8
  """
9
9
 
10
10
  from __future__ import annotations
@@ -55,7 +55,7 @@ async def register_mcp_tools(
55
55
  for tool_def in mcp_tools:
56
56
  tool_name = tool_def.get("name")
57
57
  if not tool_name:
58
- logger.warning("Remote tool definition without a 'name' field skipped")
58
+ logger.warning("Remote tool definition without a 'name' field - skipped")
59
59
  continue
60
60
 
61
61
  description = tool_def.get("description") or f"MCP tool • {tool_name}"
@@ -96,5 +96,5 @@ async def register_mcp_tools(
96
96
  except Exception as exc: # noqa: BLE001
97
97
  logger.error("Failed to register MCP tool '%s': %s", tool_name, exc)
98
98
 
99
- logger.info("MCP registration complete %d tool(s) available", len(registered))
99
+ logger.info("MCP registration complete - %d tool(s) available", len(registered))
100
100
  return registered
@@ -8,7 +8,7 @@ Utility that wires up:
8
8
  2. The remote MCP tools exposed by that manager (via
9
9
  :pyfunc:`~chuk_tool_processor.mcp.register_mcp_tools.register_mcp_tools`).
10
10
  3. A fully-featured :class:`~chuk_tool_processor.core.processor.ToolProcessor`
11
- instance that can execute those tools with optional caching,
11
+ instance that can execute those tools - with optional caching,
12
12
  rate-limiting, retries, etc.
13
13
  """
14
14
 
@@ -28,7 +28,7 @@ logger = get_logger("chuk_tool_processor.mcp.setup_sse")
28
28
  # --------------------------------------------------------------------------- #
29
29
  # public helper
30
30
  # --------------------------------------------------------------------------- #
31
- async def setup_mcp_sse( # noqa: C901 long, but just a config wrapper
31
+ async def setup_mcp_sse( # noqa: C901 - long, but just a config wrapper
32
32
  *,
33
33
  servers: List[Dict[str, str]],
34
34
  server_names: Optional[Dict[int, str]] = None,
@@ -47,7 +47,7 @@ async def setup_mcp_sse( # noqa: C901 – long, but just a config wrapper
47
47
  Spin up an SSE-backed *StreamManager*, register all its remote tools,
48
48
  and return a ready-to-go :class:`ToolProcessor`.
49
49
 
50
- Everything is **async-native** call with ``await``.
50
+ Everything is **async-native** - call with ``await``.
51
51
 
52
52
  NEW: Automatically detects and adds bearer token from MCP_BEARER_TOKEN
53
53
  environment variable if not explicitly provided in server config.
@@ -91,7 +91,7 @@ async def setup_mcp_sse( # noqa: C901 – long, but just a config wrapper
91
91
  )
92
92
 
93
93
  logger.info(
94
- "MCP (SSE) initialised %s tool%s registered into namespace '%s'",
94
+ "MCP (SSE) initialised - %s tool%s registered into namespace '%s'",
95
95
  len(registered),
96
96
  "" if len(registered) == 1 else "s",
97
97
  namespace,
@@ -26,7 +26,7 @@ logger = get_logger("chuk_tool_processor.mcp.setup_stdio")
26
26
  # --------------------------------------------------------------------------- #
27
27
  # public helper
28
28
  # --------------------------------------------------------------------------- #
29
- async def setup_mcp_stdio( # noqa: C901 long but just a config facade
29
+ async def setup_mcp_stdio( # noqa: C901 - long but just a config facade
30
30
  *,
31
31
  config_file: str,
32
32
  servers: List[str],
@@ -72,7 +72,7 @@ async def setup_mcp_stdio( # noqa: C901 – long but just a config facade
72
72
  )
73
73
 
74
74
  logger.info(
75
- "MCP (stdio) initialised %s tool%s registered into namespace '%s'",
75
+ "MCP (stdio) initialised - %s tool%s registered into namespace '%s'",
76
76
  len(registered),
77
77
  "" if len(registered) == 1 else "s",
78
78
  namespace,
@@ -47,9 +47,16 @@ class StreamManager:
47
47
  servers: List[str],
48
48
  server_names: Optional[Dict[int, str]] = None,
49
49
  transport_type: str = "stdio",
50
+ default_timeout: float = 30.0, # ADD: For consistency
50
51
  ) -> "StreamManager":
51
52
  inst = cls()
52
- await inst.initialize(config_file, servers, server_names, transport_type)
53
+ await inst.initialize(
54
+ config_file,
55
+ servers,
56
+ server_names,
57
+ transport_type,
58
+ default_timeout=default_timeout # PASS THROUGH
59
+ )
53
60
  return inst
54
61
 
55
62
  @classmethod
@@ -57,13 +64,20 @@ class StreamManager:
57
64
  cls,
58
65
  servers: List[Dict[str, str]],
59
66
  server_names: Optional[Dict[int, str]] = None,
67
+ connection_timeout: float = 10.0, # ADD: For SSE connection setup
68
+ default_timeout: float = 30.0, # ADD: For tool execution
60
69
  ) -> "StreamManager":
61
70
  inst = cls()
62
- await inst.initialize_with_sse(servers, server_names)
71
+ await inst.initialize_with_sse(
72
+ servers,
73
+ server_names,
74
+ connection_timeout=connection_timeout, # PASS THROUGH
75
+ default_timeout=default_timeout # PASS THROUGH
76
+ )
63
77
  return inst
64
78
 
65
79
  # ------------------------------------------------------------------ #
66
- # initialisation stdio / sse #
80
+ # initialisation - stdio / sse #
67
81
  # ------------------------------------------------------------------ #
68
82
  async def initialize(
69
83
  self,
@@ -71,6 +85,7 @@ class StreamManager:
71
85
  servers: List[str],
72
86
  server_names: Optional[Dict[int, str]] = None,
73
87
  transport_type: str = "stdio",
88
+ default_timeout: float = 30.0, # ADD: For consistency
74
89
  ) -> None:
75
90
  async with self._lock:
76
91
  self.server_names = server_names or {}
@@ -81,7 +96,24 @@ class StreamManager:
81
96
  params = await load_config(config_file, server_name)
82
97
  transport: MCPBaseTransport = StdioTransport(params)
83
98
  elif transport_type == "sse":
84
- transport = SSETransport("http://localhost:8000")
99
+ # WARNING: For SSE transport, prefer using create_with_sse() instead
100
+ # This is a fallback for backward compatibility
101
+ logger.warning("Using SSE transport in initialize() - consider using initialize_with_sse() instead")
102
+
103
+ # Try to extract URL from params or use localhost as fallback
104
+ if isinstance(params, dict) and 'url' in params:
105
+ sse_url = params['url']
106
+ api_key = params.get('api_key')
107
+ else:
108
+ sse_url = "http://localhost:8000"
109
+ api_key = None
110
+ logger.warning(f"No URL configured for SSE transport, using default: {sse_url}")
111
+
112
+ transport = SSETransport(
113
+ sse_url,
114
+ api_key,
115
+ default_timeout=default_timeout
116
+ )
85
117
  else:
86
118
  logger.error("Unsupported transport type: %s", transport_type)
87
119
  continue
@@ -111,12 +143,12 @@ class StreamManager:
111
143
  "status": status,
112
144
  }
113
145
  )
114
- logger.info("Initialised %s %d tool(s)", server_name, len(tools))
146
+ logger.info("Initialised %s - %d tool(s)", server_name, len(tools))
115
147
  except Exception as exc: # noqa: BLE001
116
148
  logger.error("Error initialising %s: %s", server_name, exc)
117
149
 
118
150
  logger.info(
119
- "StreamManager ready %d server(s), %d tool(s)",
151
+ "StreamManager ready - %d server(s), %d tool(s)",
120
152
  len(self.transports),
121
153
  len(self.all_tools),
122
154
  )
@@ -125,6 +157,8 @@ class StreamManager:
125
157
  self,
126
158
  servers: List[Dict[str, str]],
127
159
  server_names: Optional[Dict[int, str]] = None,
160
+ connection_timeout: float = 10.0, # ADD: For SSE connection setup
161
+ default_timeout: float = 30.0, # ADD: For tool execution
128
162
  ) -> None:
129
163
  async with self._lock:
130
164
  self.server_names = server_names or {}
@@ -135,7 +169,14 @@ class StreamManager:
135
169
  logger.error("Bad server config: %s", cfg)
136
170
  continue
137
171
  try:
138
- transport = SSETransport(url, cfg.get("api_key"))
172
+ # FIXED: Pass timeout parameters to SSETransport
173
+ transport = SSETransport(
174
+ url,
175
+ cfg.get("api_key"),
176
+ connection_timeout=connection_timeout, # ADD THIS
177
+ default_timeout=default_timeout # ADD THIS
178
+ )
179
+
139
180
  if not await transport.initialize():
140
181
  logger.error("Failed to init SSE %s", name)
141
182
  continue
@@ -153,12 +194,12 @@ class StreamManager:
153
194
  self.server_info.append(
154
195
  {"id": idx, "name": name, "tools": len(tools), "status": status}
155
196
  )
156
- logger.info("Initialised SSE %s %d tool(s)", name, len(tools))
197
+ logger.info("Initialised SSE %s - %d tool(s)", name, len(tools))
157
198
  except Exception as exc: # noqa: BLE001
158
199
  logger.error("Error initialising SSE %s: %s", name, exc)
159
200
 
160
201
  logger.info(
161
- "StreamManager ready %d SSE server(s), %d tool(s)",
202
+ "StreamManager ready - %d SSE server(s), %d tool(s)",
162
203
  len(self.transports),
163
204
  len(self.all_tools),
164
205
  )
@@ -204,7 +245,7 @@ class StreamManager:
204
245
  return []
205
246
 
206
247
  # ------------------------------------------------------------------ #
207
- # EXTRA HELPERS ping / resources / prompts #
248
+ # EXTRA HELPERS - ping / resources / prompts #
208
249
  # ------------------------------------------------------------------ #
209
250
  async def ping_servers(self) -> List[Dict[str, Any]]:
210
251
  async def _ping_one(name: str, tr: MCPBaseTransport):
@@ -265,7 +306,7 @@ class StreamManager:
265
306
  tool_name: str,
266
307
  arguments: Dict[str, Any],
267
308
  server_name: Optional[str] = None,
268
- timeout: Optional[float] = None, # Add timeout parameter
309
+ timeout: Optional[float] = None, # Timeout parameter already exists
269
310
  ) -> Dict[str, Any]:
270
311
  """
271
312
  Call a tool on the appropriate server with timeout support.
@@ -293,10 +334,25 @@ class StreamManager:
293
334
  if timeout is not None:
294
335
  logger.debug("Calling tool '%s' with %ss timeout", tool_name, timeout)
295
336
  try:
296
- return await asyncio.wait_for(
297
- transport.call_tool(tool_name, arguments),
298
- timeout=timeout
299
- )
337
+ # ENHANCED: Pass timeout to transport.call_tool if it supports it
338
+ if hasattr(transport, 'call_tool'):
339
+ import inspect
340
+ sig = inspect.signature(transport.call_tool)
341
+ if 'timeout' in sig.parameters:
342
+ # Transport supports timeout parameter - pass it through
343
+ return await transport.call_tool(tool_name, arguments, timeout=timeout)
344
+ else:
345
+ # Transport doesn't support timeout - use asyncio.wait_for wrapper
346
+ return await asyncio.wait_for(
347
+ transport.call_tool(tool_name, arguments),
348
+ timeout=timeout
349
+ )
350
+ else:
351
+ # Fallback to asyncio.wait_for
352
+ return await asyncio.wait_for(
353
+ transport.call_tool(tool_name, arguments),
354
+ timeout=timeout
355
+ )
300
356
  except asyncio.TimeoutError:
301
357
  logger.warning("Tool '%s' timed out after %ss", tool_name, timeout)
302
358
  return {
@@ -350,4 +406,4 @@ class StreamManager:
350
406
  # convenience alias
351
407
  @property
352
408
  def streams(self) -> List[Tuple[Any, Any]]: # pragma: no cover
353
- return self.get_streams()
409
+ return self.get_streams()
@@ -73,7 +73,7 @@ class MCPBaseTransport(ABC):
73
73
  @abstractmethod
74
74
  async def list_resources(self) -> Dict[str, Any]:
75
75
  """
76
- Retrieve the servers resources catalogue.
76
+ Retrieve the server's resources catalogue.
77
77
 
78
78
  Expected shape::
79
79
  { "resources": [ {...}, ... ], "nextCursor": "…", … }
@@ -83,7 +83,7 @@ class MCPBaseTransport(ABC):
83
83
  @abstractmethod
84
84
  async def list_prompts(self) -> Dict[str, Any]:
85
85
  """
86
- Retrieve the servers prompt catalogue.
86
+ Retrieve the server's prompt catalogue.
87
87
 
88
88
  Expected shape::
89
89
  { "prompts": [ {...}, ... ], "nextCursor": "…", … }
@@ -8,6 +8,8 @@ This transport:
8
8
  3. Sends MCP initialize handshake FIRST
9
9
  4. Only then proceeds with tools/list and tool calls
10
10
  5. Handles async responses via SSE message events
11
+
12
+ FIXED: All hardcoded timeouts are now configurable parameters.
11
13
  """
12
14
  from __future__ import annotations
13
15
 
@@ -24,7 +26,8 @@ from .base_transport import MCPBaseTransport
24
26
  # --------------------------------------------------------------------------- #
25
27
  # Helpers #
26
28
  # --------------------------------------------------------------------------- #
27
- DEFAULT_TIMEOUT = 30.0 # Longer timeout for real servers
29
+ DEFAULT_TIMEOUT = 30.0 # Default timeout for tool calls
30
+ DEFAULT_CONNECTION_TIMEOUT = 10.0 # Default timeout for connection setup
28
31
  HEADERS_JSON: Dict[str, str] = {"accept": "application/json"}
29
32
 
30
33
 
@@ -47,9 +50,26 @@ class SSETransport(MCPBaseTransport):
47
50
  5. Waits for async responses via SSE message events
48
51
  """
49
52
 
50
- def __init__(self, url: str, api_key: Optional[str] = None) -> None:
53
+ def __init__(
54
+ self,
55
+ url: str,
56
+ api_key: Optional[str] = None,
57
+ connection_timeout: float = DEFAULT_CONNECTION_TIMEOUT,
58
+ default_timeout: float = DEFAULT_TIMEOUT
59
+ ) -> None:
60
+ """
61
+ Initialize SSE Transport with configurable timeouts.
62
+
63
+ Args:
64
+ url: Base URL for the MCP server
65
+ api_key: Optional API key for authentication
66
+ connection_timeout: Timeout for connection setup (default: 10.0s)
67
+ default_timeout: Default timeout for tool calls (default: 30.0s)
68
+ """
51
69
  self.base_url = url.rstrip("/")
52
70
  self.api_key = api_key
71
+ self.connection_timeout = connection_timeout
72
+ self.default_timeout = default_timeout
53
73
 
54
74
  # NEW: Auto-detect bearer token from environment if not provided
55
75
  if not self.api_key:
@@ -92,7 +112,7 @@ class SSETransport(MCPBaseTransport):
92
112
 
93
113
  self._client = httpx.AsyncClient(
94
114
  headers=headers,
95
- timeout=DEFAULT_TIMEOUT,
115
+ timeout=self.default_timeout, # Use configurable timeout
96
116
  )
97
117
  self.session = self._client
98
118
 
@@ -100,8 +120,8 @@ class SSETransport(MCPBaseTransport):
100
120
  self._sse_task = asyncio.create_task(self._handle_sse_connection())
101
121
 
102
122
  try:
103
- # Wait for endpoint event (up to 10 seconds)
104
- await asyncio.wait_for(self._connected.wait(), timeout=10.0)
123
+ # FIXED: Use configurable connection timeout instead of hardcoded 10.0
124
+ await asyncio.wait_for(self._connected.wait(), timeout=self.connection_timeout)
105
125
 
106
126
  # NEW: Send MCP initialize handshake
107
127
  if await self._initialize_mcp_session():
@@ -285,7 +305,8 @@ class SSETransport(MCPBaseTransport):
285
305
  if not self._initialized.is_set():
286
306
  print("⏳ Waiting for MCP initialization...")
287
307
  try:
288
- await asyncio.wait_for(self._initialized.wait(), timeout=10.0)
308
+ # FIXED: Use configurable connection timeout instead of hardcoded 10.0
309
+ await asyncio.wait_for(self._initialized.wait(), timeout=self.connection_timeout)
289
310
  except asyncio.TimeoutError:
290
311
  print("❌ Timeout waiting for MCP initialization")
291
312
  return []
@@ -311,11 +332,26 @@ class SSETransport(MCPBaseTransport):
311
332
 
312
333
  return []
313
334
 
314
- async def call_tool(self, tool_name: str, arguments: Dict[str, Any]) -> Dict[str, Any]:
315
- """Execute a tool call using the MCP protocol."""
335
+ async def call_tool(
336
+ self,
337
+ tool_name: str,
338
+ arguments: Dict[str, Any],
339
+ timeout: Optional[float] = None
340
+ ) -> Dict[str, Any]:
341
+ """
342
+ Execute a tool call using the MCP protocol.
343
+
344
+ Args:
345
+ tool_name: Name of the tool to call
346
+ arguments: Arguments to pass to the tool
347
+ timeout: Optional timeout for this specific call
348
+
349
+ Returns:
350
+ Dictionary containing the tool result or error
351
+ """
316
352
  # NEW: Ensure initialization before tool calls
317
353
  if not self._initialized.is_set():
318
- return {"isError": True, "error": "MCP session not initialized"}
354
+ return {"isError": True, "error": "SSE transport not implemented"}
319
355
 
320
356
  if not self._message_url:
321
357
  return {"isError": True, "error": "No message endpoint available"}
@@ -331,7 +367,9 @@ class SSETransport(MCPBaseTransport):
331
367
  }
332
368
  }
333
369
 
334
- response = await self._send_message(message)
370
+ # Use provided timeout or fall back to default
371
+ effective_timeout = timeout if timeout is not None else self.default_timeout
372
+ response = await self._send_message(message, timeout=effective_timeout)
335
373
 
336
374
  # Process MCP response
337
375
  if "error" in response:
@@ -363,8 +401,21 @@ class SSETransport(MCPBaseTransport):
363
401
  except Exception as e:
364
402
  return {"isError": True, "error": str(e)}
365
403
 
366
- async def _send_message(self, message: Dict[str, Any]) -> Dict[str, Any]:
367
- """Send a JSON-RPC message to the server and wait for async response."""
404
+ async def _send_message(
405
+ self,
406
+ message: Dict[str, Any],
407
+ timeout: Optional[float] = None
408
+ ) -> Dict[str, Any]:
409
+ """
410
+ Send a JSON-RPC message to the server and wait for async response.
411
+
412
+ Args:
413
+ message: JSON-RPC message to send
414
+ timeout: Optional timeout for this specific message
415
+
416
+ Returns:
417
+ Response message from the server
418
+ """
368
419
  if not self._client or not self._message_url:
369
420
  raise RuntimeError("Transport not properly initialized")
370
421
 
@@ -372,6 +423,9 @@ class SSETransport(MCPBaseTransport):
372
423
  if not message_id:
373
424
  raise ValueError("Message must have an ID")
374
425
 
426
+ # Use provided timeout or fall back to default
427
+ effective_timeout = timeout if timeout is not None else self.default_timeout
428
+
375
429
  # Create a future for this request
376
430
  future = asyncio.Future()
377
431
  async with self._message_lock:
@@ -391,7 +445,8 @@ class SSETransport(MCPBaseTransport):
391
445
  if response.status_code == 202:
392
446
  # Server accepted - wait for async response via SSE
393
447
  try:
394
- response_message = await asyncio.wait_for(future, timeout=30.0)
448
+ # FIXED: Use effective_timeout instead of hardcoded 30.0
449
+ response_message = await asyncio.wait_for(future, timeout=effective_timeout)
395
450
  return response_message
396
451
  except asyncio.TimeoutError:
397
452
  raise RuntimeError(f"Timeout waiting for response to message {message_id}")
@@ -131,7 +131,7 @@ class StdioTransport(MCPBaseTransport):
131
131
  def get_streams(self):
132
132
  """
133
133
  Expose the low-level streams so legacy callers can access them
134
- directly. The base-class default returns an empty list; here we
134
+ directly. The base-class' default returns an empty list; here we
135
135
  return a single-element list when the transport is active.
136
136
  """
137
137
  if self.read_stream and self.write_stream:
@@ -145,7 +145,7 @@ class StdioTransport(MCPBaseTransport):
145
145
  self, tool_name: str, arguments: Dict[str, Any]
146
146
  ) -> Dict[str, Any]:
147
147
  """
148
- Execute *tool_name* with *arguments* and normalise the servers reply.
148
+ Execute *tool_name* with *arguments* and normalise the server's reply.
149
149
 
150
150
  The echo-server often returns:
151
151
  {
@@ -35,7 +35,7 @@ T_Validated = TypeVar("T_Validated", bound="ValidatedTool")
35
35
 
36
36
 
37
37
  # --------------------------------------------------------------------------- #
38
- # Helper mix-in serialise a *class* into assorted formats
38
+ # Helper mix-in - serialise a *class* into assorted formats
39
39
  # --------------------------------------------------------------------------- #
40
40
  class _ExportMixin:
41
41
  """Static helpers that expose a tool class in other specs."""
@@ -79,7 +79,7 @@ class _ExportMixin:
79
79
  return cls.Arguments.model_json_schema() # type: ignore[attr-defined]
80
80
 
81
81
  # ------------------------------------------------------------------ #
82
- # Tiny XML tag handy for unit-tests / demos
82
+ # Tiny XML tag - handy for unit-tests / demos
83
83
  # ------------------------------------------------------------------ #
84
84
  @classmethod
85
85
  def to_xml_tag(cls: type[T_Validated], **arguments: Any) -> str:
@@ -96,9 +96,9 @@ class ValidatedTool(_ExportMixin, BaseModel):
96
96
  """Pydantic-validated base for new async-native tools."""
97
97
 
98
98
  # ------------------------------------------------------------------ #
99
- # Inner models override in subclasses
99
+ # Inner models - override in subclasses
100
100
  # ------------------------------------------------------------------ #
101
- class Arguments(BaseModel): # noqa: D401 acts as a namespace
101
+ class Arguments(BaseModel): # noqa: D401 - acts as a namespace
102
102
  """Input model"""
103
103
 
104
104
  class Result(BaseModel): # noqa: D401
@@ -124,14 +124,14 @@ class ValidatedTool(_ExportMixin, BaseModel):
124
124
  # ------------------------------------------------------------------ #
125
125
  # Sub-classes must implement this
126
126
  # ------------------------------------------------------------------ #
127
- async def _execute(self, **_kwargs: Any): # noqa: D401 expected override
127
+ async def _execute(self, **_kwargs: Any): # noqa: D401 - expected override
128
128
  raise NotImplementedError("Tool must implement async _execute()")
129
129
 
130
130
 
131
131
  # --------------------------------------------------------------------------- #
132
132
  # Decorator to retrofit validation onto classic "imperative" tools
133
133
  # --------------------------------------------------------------------------- #
134
- def with_validation(cls): # noqa: D401 factory
134
+ def with_validation(cls): # noqa: D401 - factory
135
135
  """
136
136
  Decorator that wraps an existing async ``execute`` method with:
137
137
 
@@ -58,13 +58,13 @@ class PluginDiscovery:
58
58
  """
59
59
  Recursively scans *package_paths* for plugin classes and registers them.
60
60
 
61
- * Parser plugins concrete subclasses of :class:`ParserPlugin`
61
+ * Parser plugins - concrete subclasses of :class:`ParserPlugin`
62
62
  with an **async** ``try_parse`` coroutine.
63
63
 
64
- * Execution strategies concrete subclasses of
64
+ * Execution strategies - concrete subclasses of
65
65
  :class:`ExecutionStrategy`.
66
66
 
67
- * Explicitly-decorated plugins classes tagged with ``@plugin(...)``.
67
+ * Explicitly-decorated plugins - classes tagged with ``@plugin(...)``.
68
68
  """
69
69
 
70
70
  # ------------------------------------------------------------------ #
@@ -16,7 +16,7 @@ class ParserPlugin(ABC):
16
16
  Every parser plugin **must** implement the async ``try_parse`` coroutine.
17
17
 
18
18
  The processor awaits it and expects *a list* of :class:`ToolCall`
19
- objects. If the plugin doesnt recognise the input it should return an
19
+ objects. If the plugin doesn't recognise the input it should return an
20
20
  empty list.
21
21
  """
22
22