chuk-tool-processor 0.1.6__py3-none-any.whl → 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chuk-tool-processor might be problematic. Click here for more details.

Files changed (45) hide show
  1. chuk_tool_processor/core/processor.py +345 -132
  2. chuk_tool_processor/execution/strategies/inprocess_strategy.py +512 -68
  3. chuk_tool_processor/execution/strategies/subprocess_strategy.py +523 -63
  4. chuk_tool_processor/execution/tool_executor.py +282 -24
  5. chuk_tool_processor/execution/wrappers/caching.py +465 -123
  6. chuk_tool_processor/execution/wrappers/rate_limiting.py +199 -86
  7. chuk_tool_processor/execution/wrappers/retry.py +133 -23
  8. chuk_tool_processor/logging/__init__.py +83 -10
  9. chuk_tool_processor/logging/context.py +218 -22
  10. chuk_tool_processor/logging/formatter.py +56 -13
  11. chuk_tool_processor/logging/helpers.py +91 -16
  12. chuk_tool_processor/logging/metrics.py +75 -6
  13. chuk_tool_processor/mcp/mcp_tool.py +80 -35
  14. chuk_tool_processor/mcp/register_mcp_tools.py +74 -56
  15. chuk_tool_processor/mcp/setup_mcp_sse.py +41 -36
  16. chuk_tool_processor/mcp/setup_mcp_stdio.py +39 -37
  17. chuk_tool_processor/models/execution_strategy.py +52 -3
  18. chuk_tool_processor/models/streaming_tool.py +110 -0
  19. chuk_tool_processor/models/tool_call.py +56 -4
  20. chuk_tool_processor/models/tool_result.py +115 -9
  21. chuk_tool_processor/models/validated_tool.py +15 -13
  22. chuk_tool_processor/plugins/discovery.py +115 -70
  23. chuk_tool_processor/plugins/parsers/base.py +13 -5
  24. chuk_tool_processor/plugins/parsers/{function_call_tool_plugin.py → function_call_tool.py} +39 -20
  25. chuk_tool_processor/plugins/parsers/json_tool.py +50 -0
  26. chuk_tool_processor/plugins/parsers/openai_tool.py +88 -0
  27. chuk_tool_processor/plugins/parsers/xml_tool.py +74 -20
  28. chuk_tool_processor/registry/__init__.py +46 -7
  29. chuk_tool_processor/registry/auto_register.py +92 -28
  30. chuk_tool_processor/registry/decorators.py +134 -11
  31. chuk_tool_processor/registry/interface.py +48 -14
  32. chuk_tool_processor/registry/metadata.py +52 -6
  33. chuk_tool_processor/registry/provider.py +75 -36
  34. chuk_tool_processor/registry/providers/__init__.py +49 -10
  35. chuk_tool_processor/registry/providers/memory.py +59 -48
  36. chuk_tool_processor/registry/tool_export.py +208 -39
  37. chuk_tool_processor/utils/validation.py +18 -13
  38. chuk_tool_processor-0.1.7.dist-info/METADATA +401 -0
  39. chuk_tool_processor-0.1.7.dist-info/RECORD +58 -0
  40. {chuk_tool_processor-0.1.6.dist-info → chuk_tool_processor-0.1.7.dist-info}/WHEEL +1 -1
  41. chuk_tool_processor/plugins/parsers/json_tool_plugin.py +0 -38
  42. chuk_tool_processor/plugins/parsers/openai_tool_plugin.py +0 -76
  43. chuk_tool_processor-0.1.6.dist-info/METADATA +0 -462
  44. chuk_tool_processor-0.1.6.dist-info/RECORD +0 -57
  45. {chuk_tool_processor-0.1.6.dist-info → chuk_tool_processor-0.1.7.dist-info}/top_level.txt +0 -0
@@ -1,74 +1,400 @@
1
+ #!/usr/bin/env python
2
+ # chuk_tool_processor/execution/strategies/inprocess_strategy.py
1
3
  """
2
- In-process execution strategy with sync/async support.
4
+ In-process execution strategy for tools with true streaming support.
3
5
 
4
- This version prefers the public `execute()` wrapper (with validation and
5
- defaults) over the private `_execute` implementation, fixing missing-argument
6
- errors for `ValidatedTool` subclasses.
6
+ This strategy executes tools concurrently in the same process using asyncio.
7
+ It has special support for streaming tools, accessing their stream_execute method
8
+ directly to enable true item-by-item streaming.
7
9
  """
8
-
9
10
  from __future__ import annotations
10
11
 
11
12
  import asyncio
12
13
  import inspect
13
14
  import os
15
+ from contextlib import asynccontextmanager
14
16
  from datetime import datetime, timezone
15
- from typing import Any, List, Optional
17
+ from typing import Any, List, Optional, AsyncIterator, Set
16
18
 
17
19
  from chuk_tool_processor.core.exceptions import ToolExecutionError
18
20
  from chuk_tool_processor.models.execution_strategy import ExecutionStrategy
19
21
  from chuk_tool_processor.models.tool_call import ToolCall
20
22
  from chuk_tool_processor.models.tool_result import ToolResult
21
23
  from chuk_tool_processor.registry.interface import ToolRegistryInterface
22
- from chuk_tool_processor.logging import get_logger
24
+ from chuk_tool_processor.logging import get_logger, log_context_span
23
25
 
24
26
  logger = get_logger("chuk_tool_processor.execution.inprocess_strategy")
25
27
 
26
28
 
29
+ # --------------------------------------------------------------------------- #
30
+ # Async no-op context-manager (used when no semaphore configured)
31
+ # --------------------------------------------------------------------------- #
32
+ @asynccontextmanager
33
+ async def _noop_cm():
34
+ yield
35
+
36
+
37
+ # --------------------------------------------------------------------------- #
27
38
  class InProcessStrategy(ExecutionStrategy):
28
- """Run tools inside the current interpreter, concurrently."""
39
+ """Execute tools in the local event-loop with optional concurrency cap."""
29
40
 
30
41
  def __init__(
31
42
  self,
32
43
  registry: ToolRegistryInterface,
33
- default_timeout: float | None = None,
34
- max_concurrency: int | None = None,
44
+ default_timeout: Optional[float] = None,
45
+ max_concurrency: Optional[int] = None,
35
46
  ) -> None:
47
+ """
48
+ Initialize the in-process execution strategy.
49
+
50
+ Args:
51
+ registry: Tool registry to use for tool lookups
52
+ default_timeout: Default timeout for tool execution
53
+ max_concurrency: Maximum number of concurrent executions
54
+ """
36
55
  self.registry = registry
37
56
  self.default_timeout = default_timeout
38
57
  self._sem = asyncio.Semaphore(max_concurrency) if max_concurrency else None
58
+
59
+ # Task tracking for cleanup
60
+ self._active_tasks = set()
61
+ self._shutting_down = False
62
+ self._shutdown_event = asyncio.Event()
63
+
64
+ # Tracking for which calls are being handled directly by the executor
65
+ # to prevent duplicate streaming results
66
+ self._direct_streaming_calls = set()
39
67
 
40
68
  # ------------------------------------------------------------------ #
41
- # public API
69
+ def mark_direct_streaming(self, call_ids: Set[str]) -> None:
70
+ """
71
+ Mark tool calls that are being handled directly by the executor.
72
+
73
+ Args:
74
+ call_ids: Set of call IDs that should be skipped during streaming
75
+ because they're handled directly
76
+ """
77
+ self._direct_streaming_calls.update(call_ids)
78
+
79
+ def clear_direct_streaming(self) -> None:
80
+ """Clear the list of direct streaming calls."""
81
+ self._direct_streaming_calls.clear()
82
+
83
+ # ------------------------------------------------------------------ #
84
+ # 🔌 legacy façade for older wrappers #
85
+ # ------------------------------------------------------------------ #
86
+ async def execute(
87
+ self,
88
+ calls: List[ToolCall],
89
+ *,
90
+ timeout: Optional[float] = None,
91
+ ) -> List[ToolResult]:
92
+ """
93
+ Back-compat shim.
94
+
95
+ Old wrappers (`retry`, `rate_limit`, `cache`, …) still expect an
96
+ ``execute()`` coroutine on an execution-strategy object.
97
+ The real implementation lives in :meth:`run`, so we just forward.
98
+ """
99
+ return await self.run(calls, timeout)
100
+
42
101
  # ------------------------------------------------------------------ #
43
102
  async def run(
44
103
  self,
45
104
  calls: List[ToolCall],
46
- timeout: float | None = None,
105
+ timeout: Optional[float] = None,
47
106
  ) -> List[ToolResult]:
48
- tasks = [
49
- self._execute_single_call(call, timeout or self.default_timeout)
50
- for call in calls
51
- ]
52
- return await asyncio.gather(*tasks)
107
+ """
108
+ Execute tool calls concurrently and preserve order.
109
+
110
+ Args:
111
+ calls: List of tool calls to execute
112
+ timeout: Optional timeout for execution
113
+
114
+ Returns:
115
+ List of tool results in the same order as calls
116
+ """
117
+ if not calls:
118
+ return []
119
+
120
+ tasks = []
121
+ for call in calls:
122
+ task = asyncio.create_task(
123
+ self._execute_single_call(call, timeout or self.default_timeout)
124
+ )
125
+ self._active_tasks.add(task)
126
+ task.add_done_callback(self._active_tasks.discard)
127
+ tasks.append(task)
128
+
129
+ async with log_context_span("inprocess_execution", {"num_calls": len(calls)}):
130
+ return await asyncio.gather(*tasks)
53
131
 
54
132
  # ------------------------------------------------------------------ #
55
- # helpers
133
+ async def stream_run(
134
+ self,
135
+ calls: List[ToolCall],
136
+ timeout: Optional[float] = None,
137
+ ) -> AsyncIterator[ToolResult]:
138
+ """
139
+ Execute tool calls concurrently and *yield* results as soon as they are
140
+ produced, preserving completion order.
141
+ """
142
+ if not calls:
143
+ return
144
+
145
+ queue: asyncio.Queue[ToolResult] = asyncio.Queue()
146
+ tasks = {
147
+ asyncio.create_task(
148
+ self._stream_tool_call(call, queue, timeout or self.default_timeout)
149
+ )
150
+ for call in calls
151
+ if call.id not in self._direct_streaming_calls
152
+ }
153
+
154
+ # 🔑 keep consuming until every worker‐task finished *and*
155
+ # the queue is empty
156
+ while tasks or not queue.empty():
157
+ try:
158
+ result = await queue.get()
159
+ yield result
160
+ except asyncio.CancelledError:
161
+ break
162
+
163
+ # clear finished tasks (frees exceptions as well)
164
+ done, tasks = await asyncio.wait(tasks, timeout=0)
165
+ for t in done:
166
+ t.result() # re-raise if a task crashed
167
+
168
+
169
+ async def _stream_tool_call(
170
+ self,
171
+ call: ToolCall,
172
+ queue: asyncio.Queue,
173
+ timeout: Optional[float],
174
+ ) -> None:
175
+ """
176
+ Execute a tool call with streaming support.
177
+
178
+ This looks up the tool and if it's a streaming tool, it accesses
179
+ stream_execute directly to get item-by-item streaming.
180
+
181
+ Args:
182
+ call: The tool call to execute
183
+ queue: Queue to put results into
184
+ timeout: Optional timeout in seconds
185
+ """
186
+ # Skip if call is being handled directly by the executor
187
+ if call.id in self._direct_streaming_calls:
188
+ return
189
+
190
+ if self._shutting_down:
191
+ # Early exit if shutting down
192
+ now = datetime.now(timezone.utc)
193
+ result = ToolResult(
194
+ tool=call.tool,
195
+ result=None,
196
+ error="System is shutting down",
197
+ start_time=now,
198
+ end_time=now,
199
+ machine=os.uname().nodename,
200
+ pid=os.getpid(),
201
+ )
202
+ await queue.put(result)
203
+ return
204
+
205
+ try:
206
+ # Get the tool implementation
207
+ tool_impl = await self.registry.get_tool(call.tool, call.namespace)
208
+ if tool_impl is None:
209
+ # Tool not found
210
+ now = datetime.now(timezone.utc)
211
+ result = ToolResult(
212
+ tool=call.tool,
213
+ result=None,
214
+ error=f"Tool '{call.tool}' not found",
215
+ start_time=now,
216
+ end_time=now,
217
+ machine=os.uname().nodename,
218
+ pid=os.getpid(),
219
+ )
220
+ await queue.put(result)
221
+ return
222
+
223
+ # Instantiate if class
224
+ tool = tool_impl() if inspect.isclass(tool_impl) else tool_impl
225
+
226
+ # Use semaphore if available
227
+ guard = self._sem if self._sem is not None else _noop_cm()
228
+
229
+ async with guard:
230
+ # Check if this is a streaming tool
231
+ if hasattr(tool, "supports_streaming") and tool.supports_streaming and hasattr(tool, "stream_execute"):
232
+ # Use direct streaming for streaming tools
233
+ await self._stream_with_timeout(tool, call, queue, timeout)
234
+ else:
235
+ # Use regular execution for non-streaming tools
236
+ result = await self._execute_single_call(call, timeout)
237
+ await queue.put(result)
238
+
239
+ except asyncio.CancelledError:
240
+ # Handle cancellation gracefully
241
+ now = datetime.now(timezone.utc)
242
+ result = ToolResult(
243
+ tool=call.tool,
244
+ result=None,
245
+ error="Execution was cancelled",
246
+ start_time=now,
247
+ end_time=now,
248
+ machine=os.uname().nodename,
249
+ pid=os.getpid(),
250
+ )
251
+ await queue.put(result)
252
+
253
+ except Exception as e:
254
+ # Handle other errors
255
+ now = datetime.now(timezone.utc)
256
+ result = ToolResult(
257
+ tool=call.tool,
258
+ result=None,
259
+ error=f"Error setting up execution: {e}",
260
+ start_time=now,
261
+ end_time=now,
262
+ machine=os.uname().nodename,
263
+ pid=os.getpid(),
264
+ )
265
+ await queue.put(result)
266
+
267
+ async def _stream_with_timeout(
268
+ self,
269
+ tool: Any,
270
+ call: ToolCall,
271
+ queue: asyncio.Queue,
272
+ timeout: Optional[float]
273
+ ) -> None:
274
+ """
275
+ Stream results from a streaming tool with timeout support.
276
+
277
+ This method accesses the tool's stream_execute method directly
278
+ and puts each yielded result into the queue.
279
+
280
+ Args:
281
+ tool: The tool instance
282
+ call: Tool call data
283
+ queue: Queue to put results into
284
+ timeout: Optional timeout in seconds
285
+ """
286
+ start_time = datetime.now(timezone.utc)
287
+ machine = os.uname().nodename
288
+ pid = os.getpid()
289
+
290
+ # Define the streaming task
291
+ async def streamer():
292
+ try:
293
+ async for result in tool.stream_execute(**call.arguments):
294
+ # Create a ToolResult for each streamed item
295
+ now = datetime.now(timezone.utc)
296
+ tool_result = ToolResult(
297
+ tool=call.tool,
298
+ result=result,
299
+ error=None,
300
+ start_time=start_time,
301
+ end_time=now,
302
+ machine=machine,
303
+ pid=pid,
304
+ )
305
+ await queue.put(tool_result)
306
+ except Exception as e:
307
+ # Handle errors during streaming
308
+ now = datetime.now(timezone.utc)
309
+ error_result = ToolResult(
310
+ tool=call.tool,
311
+ result=None,
312
+ error=f"Streaming error: {str(e)}",
313
+ start_time=start_time,
314
+ end_time=now,
315
+ machine=machine,
316
+ pid=pid,
317
+ )
318
+ await queue.put(error_result)
319
+
320
+ try:
321
+ # Execute with timeout if specified
322
+ if timeout:
323
+ await asyncio.wait_for(streamer(), timeout)
324
+ else:
325
+ await streamer()
326
+
327
+ except asyncio.TimeoutError:
328
+ # Handle timeout
329
+ now = datetime.now(timezone.utc)
330
+ timeout_result = ToolResult(
331
+ tool=call.tool,
332
+ result=None,
333
+ error=f"Streaming timeout after {timeout}s",
334
+ start_time=start_time,
335
+ end_time=now,
336
+ machine=machine,
337
+ pid=pid,
338
+ )
339
+ await queue.put(timeout_result)
340
+
341
+ except Exception as e:
342
+ # Handle other errors
343
+ now = datetime.now(timezone.utc)
344
+ error_result = ToolResult(
345
+ tool=call.tool,
346
+ result=None,
347
+ error=f"Error during streaming: {str(e)}",
348
+ start_time=start_time,
349
+ end_time=now,
350
+ machine=machine,
351
+ pid=pid,
352
+ )
353
+ await queue.put(error_result)
354
+
355
+ async def _execute_to_queue(
356
+ self,
357
+ call: ToolCall,
358
+ queue: asyncio.Queue,
359
+ timeout: Optional[float],
360
+ ) -> None:
361
+ """Execute a single call and put the result in the queue."""
362
+ # Skip if call is being handled directly by the executor
363
+ if call.id in self._direct_streaming_calls:
364
+ return
365
+
366
+ result = await self._execute_single_call(call, timeout)
367
+ await queue.put(result)
368
+
56
369
  # ------------------------------------------------------------------ #
57
370
  async def _execute_single_call(
58
371
  self,
59
372
  call: ToolCall,
60
- timeout: float | None,
373
+ timeout: Optional[float],
61
374
  ) -> ToolResult:
375
+ """
376
+ Execute a single tool call.
377
+
378
+ The entire invocation – including argument validation – is wrapped
379
+ by the semaphore to honour *max_concurrency*.
380
+
381
+ Args:
382
+ call: Tool call to execute
383
+ timeout: Optional timeout in seconds
384
+
385
+ Returns:
386
+ Tool execution result
387
+ """
62
388
  pid = os.getpid()
63
389
  machine = os.uname().nodename
64
390
  start = datetime.now(timezone.utc)
65
-
66
- impl = self.registry.get_tool(call.tool)
67
- if impl is None:
391
+
392
+ # Early exit if shutting down
393
+ if self._shutting_down:
68
394
  return ToolResult(
69
395
  tool=call.tool,
70
396
  result=None,
71
- error="Tool not found",
397
+ error="System is shutting down",
72
398
  start_time=start,
73
399
  end_time=datetime.now(timezone.utc),
74
400
  machine=machine,
@@ -76,87 +402,178 @@ class InProcessStrategy(ExecutionStrategy):
76
402
  )
77
403
 
78
404
  try:
79
- run = self._run_with_timeout
80
- if self._sem is None:
81
- return await run(impl, call, timeout, start, machine, pid)
82
- async with self._sem:
83
- return await run(impl, call, timeout, start, machine, pid)
84
- except Exception as exc: # pragma: no cover – safety net
85
- logger.exception("Unexpected error while executing %s", call.tool)
405
+ # Get the tool implementation
406
+ impl = await self.registry.get_tool(call.tool, call.namespace)
407
+ if impl is None:
408
+ return ToolResult(
409
+ tool=call.tool,
410
+ result=None,
411
+ error=f"Tool '{call.tool}' not found",
412
+ start_time=start,
413
+ end_time=datetime.now(timezone.utc),
414
+ machine=machine,
415
+ pid=pid,
416
+ )
417
+
418
+ # Instantiate if class
419
+ tool = impl() if inspect.isclass(impl) else impl
420
+
421
+ # Use semaphore if available
422
+ guard = self._sem if self._sem is not None else _noop_cm()
423
+
424
+ try:
425
+ async with guard:
426
+ return await self._run_with_timeout(
427
+ tool, call, timeout, start, machine, pid
428
+ )
429
+ except Exception as exc:
430
+ logger.exception("Unexpected error while executing %s", call.tool)
431
+ return ToolResult(
432
+ tool=call.tool,
433
+ result=None,
434
+ error=f"Unexpected error: {exc}",
435
+ start_time=start,
436
+ end_time=datetime.now(timezone.utc),
437
+ machine=machine,
438
+ pid=pid,
439
+ )
440
+ except asyncio.CancelledError:
441
+ # Handle cancellation gracefully
86
442
  return ToolResult(
87
443
  tool=call.tool,
88
444
  result=None,
89
- error=f"Unexpected error: {exc}",
445
+ error="Execution was cancelled",
446
+ start_time=start,
447
+ end_time=datetime.now(timezone.utc),
448
+ machine=machine,
449
+ pid=pid,
450
+ )
451
+ except Exception as exc:
452
+ logger.exception("Error setting up execution for %s", call.tool)
453
+ return ToolResult(
454
+ tool=call.tool,
455
+ result=None,
456
+ error=f"Setup error: {exc}",
90
457
  start_time=start,
91
458
  end_time=datetime.now(timezone.utc),
92
459
  machine=machine,
93
460
  pid=pid,
94
461
  )
95
462
 
96
- # ------------------------------------------------------------------ #
97
- # core execution with timeout
98
- # ------------------------------------------------------------------ #
99
463
  async def _run_with_timeout(
100
464
  self,
101
- impl: Any,
465
+ tool: Any,
102
466
  call: ToolCall,
103
467
  timeout: float | None,
104
468
  start: datetime,
105
469
  machine: str,
106
470
  pid: int,
107
471
  ) -> ToolResult:
108
- tool = impl() if isinstance(impl, type) else impl
109
-
110
- # ------------------------------------------------------------------
111
- # Entry-point selection order:
112
- # 1. `_aexecute` (async special case)
113
- # 2. `execute` (public wrapper WITH validation & defaults)
114
- # 3. `_execute` (fallback / legacy)
115
- # ------------------------------------------------------------------
116
- if hasattr(tool, "_aexecute") and inspect.iscoroutinefunction(tool._aexecute):
472
+ """
473
+ Resolve the correct async entry-point and invoke it with an optional
474
+ timeout.
475
+
476
+ Args:
477
+ tool: Tool instance
478
+ call: Tool call data
479
+ timeout: Optional timeout in seconds
480
+ start: Start time for the execution
481
+ machine: Machine name
482
+ pid: Process ID
483
+
484
+ Returns:
485
+ Tool execution result
486
+ """
487
+ if hasattr(tool, "_aexecute") and inspect.iscoroutinefunction(
488
+ getattr(type(tool), "_aexecute", None)
489
+ ):
117
490
  fn = tool._aexecute
118
- is_async = True
119
- elif hasattr(tool, "execute"):
491
+ elif hasattr(tool, "execute") and inspect.iscoroutinefunction(
492
+ getattr(tool, "execute", None)
493
+ ):
120
494
  fn = tool.execute
121
- is_async = inspect.iscoroutinefunction(fn)
122
- elif hasattr(tool, "_execute"):
123
- fn = tool._execute
124
- is_async = inspect.iscoroutinefunction(fn)
125
495
  else:
126
- raise ToolExecutionError(
127
- f"Tool '{call.tool}' must implement _execute, execute or _aexecute"
128
- )
129
-
130
- async def _invoke():
131
- if is_async:
132
- return await fn(**call.arguments)
133
- loop = asyncio.get_running_loop()
134
- return await loop.run_in_executor(None, lambda: fn(**call.arguments))
135
-
136
- try:
137
- result_val = (
138
- await asyncio.wait_for(_invoke(), timeout) if timeout else await _invoke()
139
- )
140
496
  return ToolResult(
141
497
  tool=call.tool,
142
- result=result_val,
143
- error=None,
498
+ result=None,
499
+ error=(
500
+ "Tool must implement *async* '_aexecute' or 'execute'. "
501
+ "Synchronous entry-points are not supported."
502
+ ),
144
503
  start_time=start,
145
504
  end_time=datetime.now(timezone.utc),
146
505
  machine=machine,
147
506
  pid=pid,
148
507
  )
149
- except asyncio.TimeoutError:
508
+
509
+ try:
510
+ if timeout:
511
+ # Use a task with explicit cancellation
512
+ task = asyncio.create_task(fn(**call.arguments))
513
+
514
+ try:
515
+ # Wait for the task with timeout
516
+ result_val = await asyncio.wait_for(task, timeout)
517
+
518
+ return ToolResult(
519
+ tool=call.tool,
520
+ result=result_val,
521
+ error=None,
522
+ start_time=start,
523
+ end_time=datetime.now(timezone.utc),
524
+ machine=machine,
525
+ pid=pid,
526
+ )
527
+ except asyncio.TimeoutError:
528
+ # Cancel the task if it times out
529
+ if not task.done():
530
+ task.cancel()
531
+
532
+ # Wait for cancellation to complete
533
+ try:
534
+ await task
535
+ except asyncio.CancelledError:
536
+ # Expected - we just cancelled it
537
+ pass
538
+ except Exception:
539
+ # Ignore any other exceptions during cancellation
540
+ pass
541
+
542
+ # Return a timeout error
543
+ return ToolResult(
544
+ tool=call.tool,
545
+ result=None,
546
+ error=f"Timeout after {timeout}s",
547
+ start_time=start,
548
+ end_time=datetime.now(timezone.utc),
549
+ machine=machine,
550
+ pid=pid,
551
+ )
552
+ else:
553
+ # No timeout
554
+ result_val = await fn(**call.arguments)
555
+ return ToolResult(
556
+ tool=call.tool,
557
+ result=result_val,
558
+ error=None,
559
+ start_time=start,
560
+ end_time=datetime.now(timezone.utc),
561
+ machine=machine,
562
+ pid=pid,
563
+ )
564
+ except asyncio.CancelledError:
565
+ # Handle cancellation explicitly
150
566
  return ToolResult(
151
567
  tool=call.tool,
152
568
  result=None,
153
- error=f"Timeout after {timeout}s",
569
+ error="Execution was cancelled",
154
570
  start_time=start,
155
571
  end_time=datetime.now(timezone.utc),
156
572
  machine=machine,
157
573
  pid=pid,
158
574
  )
159
575
  except Exception as exc:
576
+ logger.exception("Error executing %s: %s", call.tool, exc)
160
577
  return ToolResult(
161
578
  tool=call.tool,
162
579
  result=None,
@@ -166,3 +583,30 @@ class InProcessStrategy(ExecutionStrategy):
166
583
  machine=machine,
167
584
  pid=pid,
168
585
  )
586
+
587
+ @property
588
+ def supports_streaming(self) -> bool:
589
+ """Check if this strategy supports streaming execution."""
590
+ return True
591
+
592
+ async def shutdown(self) -> None:
593
+ """
594
+ Gracefully shut down all active executions.
595
+
596
+ This cancels all active tasks and waits for them to complete.
597
+ """
598
+ if self._shutting_down:
599
+ return
600
+
601
+ self._shutting_down = True
602
+ self._shutdown_event.set()
603
+
604
+ # Cancel all active tasks
605
+ active_tasks = list(self._active_tasks)
606
+ if active_tasks:
607
+ logger.info(f"Cancelling {len(active_tasks)} active tool executions")
608
+ for task in active_tasks:
609
+ task.cancel()
610
+
611
+ # Wait for all tasks to complete (with cancellation)
612
+ await asyncio.gather(*active_tasks, return_exceptions=True)