chuk-tool-processor 0.6.4__py3-none-any.whl → 0.9.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chuk-tool-processor might be problematic. Click here for more details.

Files changed (66) hide show
  1. chuk_tool_processor/core/__init__.py +32 -1
  2. chuk_tool_processor/core/exceptions.py +225 -13
  3. chuk_tool_processor/core/processor.py +135 -104
  4. chuk_tool_processor/execution/strategies/__init__.py +6 -0
  5. chuk_tool_processor/execution/strategies/inprocess_strategy.py +142 -150
  6. chuk_tool_processor/execution/strategies/subprocess_strategy.py +202 -206
  7. chuk_tool_processor/execution/tool_executor.py +82 -84
  8. chuk_tool_processor/execution/wrappers/__init__.py +42 -0
  9. chuk_tool_processor/execution/wrappers/caching.py +150 -116
  10. chuk_tool_processor/execution/wrappers/circuit_breaker.py +370 -0
  11. chuk_tool_processor/execution/wrappers/rate_limiting.py +76 -43
  12. chuk_tool_processor/execution/wrappers/retry.py +116 -78
  13. chuk_tool_processor/logging/__init__.py +23 -17
  14. chuk_tool_processor/logging/context.py +40 -45
  15. chuk_tool_processor/logging/formatter.py +22 -21
  16. chuk_tool_processor/logging/helpers.py +28 -42
  17. chuk_tool_processor/logging/metrics.py +13 -15
  18. chuk_tool_processor/mcp/__init__.py +8 -12
  19. chuk_tool_processor/mcp/mcp_tool.py +158 -114
  20. chuk_tool_processor/mcp/register_mcp_tools.py +22 -22
  21. chuk_tool_processor/mcp/setup_mcp_http_streamable.py +57 -17
  22. chuk_tool_processor/mcp/setup_mcp_sse.py +57 -17
  23. chuk_tool_processor/mcp/setup_mcp_stdio.py +11 -11
  24. chuk_tool_processor/mcp/stream_manager.py +333 -276
  25. chuk_tool_processor/mcp/transport/__init__.py +22 -29
  26. chuk_tool_processor/mcp/transport/base_transport.py +180 -44
  27. chuk_tool_processor/mcp/transport/http_streamable_transport.py +505 -325
  28. chuk_tool_processor/mcp/transport/models.py +100 -0
  29. chuk_tool_processor/mcp/transport/sse_transport.py +607 -276
  30. chuk_tool_processor/mcp/transport/stdio_transport.py +597 -116
  31. chuk_tool_processor/models/__init__.py +21 -1
  32. chuk_tool_processor/models/execution_strategy.py +16 -21
  33. chuk_tool_processor/models/streaming_tool.py +28 -25
  34. chuk_tool_processor/models/tool_call.py +49 -31
  35. chuk_tool_processor/models/tool_export_mixin.py +22 -8
  36. chuk_tool_processor/models/tool_result.py +40 -77
  37. chuk_tool_processor/models/tool_spec.py +350 -0
  38. chuk_tool_processor/models/validated_tool.py +36 -18
  39. chuk_tool_processor/observability/__init__.py +30 -0
  40. chuk_tool_processor/observability/metrics.py +312 -0
  41. chuk_tool_processor/observability/setup.py +105 -0
  42. chuk_tool_processor/observability/tracing.py +345 -0
  43. chuk_tool_processor/plugins/__init__.py +1 -1
  44. chuk_tool_processor/plugins/discovery.py +11 -11
  45. chuk_tool_processor/plugins/parsers/__init__.py +1 -1
  46. chuk_tool_processor/plugins/parsers/base.py +1 -2
  47. chuk_tool_processor/plugins/parsers/function_call_tool.py +13 -8
  48. chuk_tool_processor/plugins/parsers/json_tool.py +4 -3
  49. chuk_tool_processor/plugins/parsers/openai_tool.py +12 -7
  50. chuk_tool_processor/plugins/parsers/xml_tool.py +4 -4
  51. chuk_tool_processor/registry/__init__.py +12 -12
  52. chuk_tool_processor/registry/auto_register.py +22 -30
  53. chuk_tool_processor/registry/decorators.py +127 -129
  54. chuk_tool_processor/registry/interface.py +26 -23
  55. chuk_tool_processor/registry/metadata.py +27 -22
  56. chuk_tool_processor/registry/provider.py +17 -18
  57. chuk_tool_processor/registry/providers/__init__.py +16 -19
  58. chuk_tool_processor/registry/providers/memory.py +18 -25
  59. chuk_tool_processor/registry/tool_export.py +42 -51
  60. chuk_tool_processor/utils/validation.py +15 -16
  61. chuk_tool_processor-0.9.7.dist-info/METADATA +1813 -0
  62. chuk_tool_processor-0.9.7.dist-info/RECORD +67 -0
  63. chuk_tool_processor-0.6.4.dist-info/METADATA +0 -697
  64. chuk_tool_processor-0.6.4.dist-info/RECORD +0 -60
  65. {chuk_tool_processor-0.6.4.dist-info → chuk_tool_processor-0.9.7.dist-info}/WHEEL +0 -0
  66. {chuk_tool_processor-0.6.4.dist-info → chuk_tool_processor-0.9.7.dist-info}/top_level.txt +0 -0
@@ -7,23 +7,28 @@ This module provides the central ToolProcessor class which handles:
7
7
  - Tool execution using configurable strategies
8
8
  - Application of execution wrappers (caching, retries, etc.)
9
9
  """
10
+
10
11
  from __future__ import annotations
11
12
 
12
13
  import asyncio
13
- import time
14
- import json
15
14
  import hashlib
16
- from typing import Any, Dict, List, Optional, Type, Union, Set
15
+ import json
16
+ import time
17
+ from typing import Any
17
18
 
19
+ from chuk_tool_processor.execution.strategies.inprocess_strategy import InProcessStrategy
20
+ from chuk_tool_processor.execution.wrappers.caching import CachingToolExecutor, InMemoryCache
21
+ from chuk_tool_processor.execution.wrappers.circuit_breaker import (
22
+ CircuitBreakerConfig,
23
+ CircuitBreakerExecutor,
24
+ )
25
+ from chuk_tool_processor.execution.wrappers.rate_limiting import RateLimitedToolExecutor, RateLimiter
26
+ from chuk_tool_processor.execution.wrappers.retry import RetryableToolExecutor, RetryConfig
27
+ from chuk_tool_processor.logging import get_logger, log_context_span, log_tool_call, metrics, request_logging
18
28
  from chuk_tool_processor.models.tool_call import ToolCall
19
29
  from chuk_tool_processor.models.tool_result import ToolResult
30
+ from chuk_tool_processor.plugins.discovery import discover_default_plugins, plugin_registry
20
31
  from chuk_tool_processor.registry import ToolRegistryInterface, ToolRegistryProvider
21
- from chuk_tool_processor.execution.strategies.inprocess_strategy import InProcessStrategy
22
- from chuk_tool_processor.execution.wrappers.caching import CacheInterface, InMemoryCache, CachingToolExecutor
23
- from chuk_tool_processor.execution.wrappers.rate_limiting import RateLimiter, RateLimitedToolExecutor
24
- from chuk_tool_processor.execution.wrappers.retry import RetryConfig, RetryableToolExecutor
25
- from chuk_tool_processor.plugins.discovery import plugin_registry, discover_default_plugins
26
- from chuk_tool_processor.logging import get_logger, log_context_span, request_logging, log_tool_call, metrics
27
32
 
28
33
 
29
34
  class ToolProcessor:
@@ -34,18 +39,22 @@ class ToolProcessor:
34
39
 
35
40
  def __init__(
36
41
  self,
37
- registry: Optional[ToolRegistryInterface] = None,
38
- strategy = None,
42
+ registry: ToolRegistryInterface | None = None,
43
+ strategy=None,
39
44
  default_timeout: float = 10.0,
40
- max_concurrency: Optional[int] = None,
45
+ max_concurrency: int | None = None,
41
46
  enable_caching: bool = True,
42
47
  cache_ttl: int = 300,
43
48
  enable_rate_limiting: bool = False,
44
- global_rate_limit: Optional[int] = None,
45
- tool_rate_limits: Optional[Dict[str, tuple]] = None,
49
+ global_rate_limit: int | None = None,
50
+ tool_rate_limits: dict[str, tuple] | None = None,
46
51
  enable_retries: bool = True,
47
52
  max_retries: int = 3,
48
- parser_plugins: Optional[List[str]] = None,
53
+ retry_config: RetryConfig | None = None,
54
+ enable_circuit_breaker: bool = False,
55
+ circuit_breaker_threshold: int = 5,
56
+ circuit_breaker_timeout: float = 60.0,
57
+ parser_plugins: list[str] | None = None,
49
58
  ):
50
59
  """
51
60
  Initialize the tool processor.
@@ -62,11 +71,14 @@ class ToolProcessor:
62
71
  tool_rate_limits: Dict mapping tool names to (limit, period) tuples.
63
72
  enable_retries: Whether to enable automatic retries.
64
73
  max_retries: Maximum number of retry attempts.
74
+ enable_circuit_breaker: Whether to enable circuit breaker pattern.
75
+ circuit_breaker_threshold: Number of failures before opening circuit.
76
+ circuit_breaker_timeout: Seconds to wait before testing recovery.
65
77
  parser_plugins: List of parser plugin names to use.
66
78
  If None, uses all available parsers.
67
79
  """
68
80
  self.logger = get_logger("chuk_tool_processor.processor")
69
-
81
+
70
82
  # Store initialization parameters for lazy initialization
71
83
  self._registry = registry
72
84
  self._strategy = strategy
@@ -79,14 +91,18 @@ class ToolProcessor:
79
91
  self.tool_rate_limits = tool_rate_limits
80
92
  self.enable_retries = enable_retries
81
93
  self.max_retries = max_retries
94
+ self.retry_config = retry_config
95
+ self.enable_circuit_breaker = enable_circuit_breaker
96
+ self.circuit_breaker_threshold = circuit_breaker_threshold
97
+ self.circuit_breaker_timeout = circuit_breaker_timeout
82
98
  self.parser_plugin_names = parser_plugins
83
-
99
+
84
100
  # Placeholder for initialized components
85
101
  self.registry = None
86
102
  self.strategy = None
87
103
  self.executor = None
88
104
  self.parsers = []
89
-
105
+
90
106
  # Flag for tracking initialization state
91
107
  self._initialized = False
92
108
  self._init_lock = asyncio.Lock()
@@ -94,28 +110,28 @@ class ToolProcessor:
94
110
  async def initialize(self) -> None:
95
111
  """
96
112
  Initialize the processor asynchronously.
97
-
113
+
98
114
  This method ensures all components are properly initialized before use.
99
115
  It is called automatically by other methods if needed.
100
116
  """
101
117
  # Fast path if already initialized
102
118
  if self._initialized:
103
119
  return
104
-
120
+
105
121
  # Ensure only one initialization happens at a time
106
122
  async with self._init_lock:
107
123
  # Double-check pattern after acquiring lock
108
124
  if self._initialized:
109
125
  return
110
-
126
+
111
127
  self.logger.debug("Initializing tool processor")
112
-
128
+
113
129
  # Get the registry
114
130
  if self._registry is not None:
115
131
  self.registry = self._registry
116
132
  else:
117
133
  self.registry = await ToolRegistryProvider.get_registry()
118
-
134
+
119
135
  # Create execution strategy if needed
120
136
  if self._strategy is not None:
121
137
  self.strategy = self._strategy
@@ -125,18 +141,32 @@ class ToolProcessor:
125
141
  default_timeout=self.default_timeout,
126
142
  max_concurrency=self.max_concurrency,
127
143
  )
128
-
144
+
129
145
  # Set up the executor chain with optional wrappers
130
146
  executor = self.strategy
131
-
147
+
132
148
  # Apply wrappers in reverse order (innermost first)
149
+ # Circuit breaker goes innermost (closest to actual execution)
150
+ if self.enable_circuit_breaker:
151
+ self.logger.debug("Enabling circuit breaker")
152
+ circuit_config = CircuitBreakerConfig(
153
+ failure_threshold=self.circuit_breaker_threshold,
154
+ reset_timeout=self.circuit_breaker_timeout,
155
+ )
156
+ executor = CircuitBreakerExecutor(
157
+ executor=executor,
158
+ default_config=circuit_config,
159
+ )
160
+
133
161
  if self.enable_retries:
134
162
  self.logger.debug("Enabling retry logic")
163
+ # Use custom retry config if provided, otherwise create default
164
+ retry_cfg = self.retry_config or RetryConfig(max_retries=self.max_retries)
135
165
  executor = RetryableToolExecutor(
136
166
  executor=executor,
137
- default_config=RetryConfig(max_retries=self.max_retries),
167
+ default_config=retry_cfg,
138
168
  )
139
-
169
+
140
170
  if self.enable_rate_limiting:
141
171
  self.logger.debug("Enabling rate limiting")
142
172
  rate_limiter = RateLimiter(
@@ -147,7 +177,7 @@ class ToolProcessor:
147
177
  executor=executor,
148
178
  limiter=rate_limiter,
149
179
  )
150
-
180
+
151
181
  if self.enable_caching:
152
182
  self.logger.debug("Enabling result caching")
153
183
  cache = InMemoryCache(default_ttl=self.cache_ttl)
@@ -156,16 +186,16 @@ class ToolProcessor:
156
186
  cache=cache,
157
187
  default_ttl=self.cache_ttl,
158
188
  )
159
-
189
+
160
190
  self.executor = executor
161
-
191
+
162
192
  # Initialize parser plugins
163
193
  # Discover plugins if not already done
164
194
  plugins = plugin_registry.list_plugins().get("parser", [])
165
195
  if not plugins:
166
196
  discover_default_plugins()
167
197
  plugins = plugin_registry.list_plugins().get("parser", [])
168
-
198
+
169
199
  # Get parser plugins
170
200
  if self.parser_plugin_names:
171
201
  self.parsers = [
@@ -174,42 +204,40 @@ class ToolProcessor:
174
204
  if plugin_registry.get_plugin("parser", name)
175
205
  ]
176
206
  else:
177
- self.parsers = [
178
- plugin_registry.get_plugin("parser", name) for name in plugins
179
- ]
180
-
207
+ self.parsers = [plugin_registry.get_plugin("parser", name) for name in plugins]
208
+
181
209
  self.logger.debug(f"Initialized with {len(self.parsers)} parser plugins")
182
210
  self._initialized = True
183
211
 
184
212
  async def process(
185
213
  self,
186
- data: Union[str, Dict[str, Any], List[Dict[str, Any]]],
187
- timeout: Optional[float] = None,
188
- use_cache: bool = True,
189
- request_id: Optional[str] = None,
190
- ) -> List[ToolResult]:
214
+ data: str | dict[str, Any] | list[dict[str, Any]],
215
+ timeout: float | None = None,
216
+ use_cache: bool = True, # noqa: ARG002
217
+ request_id: str | None = None,
218
+ ) -> list[ToolResult]:
191
219
  """
192
220
  Process tool calls from various input formats.
193
-
221
+
194
222
  This method handles different input types:
195
223
  - String: Parses tool calls from text using registered parsers
196
224
  - Dict: Processes an OpenAI-style tool_calls object
197
225
  - List[Dict]: Processes a list of individual tool calls
198
-
226
+
199
227
  Args:
200
228
  data: Input data containing tool calls
201
229
  timeout: Optional timeout for execution
202
230
  use_cache: Whether to use cached results
203
231
  request_id: Optional request ID for logging
204
-
232
+
205
233
  Returns:
206
234
  List of tool results
207
235
  """
208
236
  # Ensure initialization
209
237
  await self.initialize()
210
-
238
+
211
239
  # Create request context
212
- async with request_logging(request_id) as req_id:
240
+ async with request_logging(request_id):
213
241
  # Handle different input types
214
242
  if isinstance(data, str):
215
243
  # Text processing
@@ -224,13 +252,13 @@ class ToolProcessor:
224
252
  function = tc["function"]
225
253
  name = function.get("name")
226
254
  args_str = function.get("arguments", "{}")
227
-
255
+
228
256
  # Parse arguments
229
257
  try:
230
258
  args = json.loads(args_str) if isinstance(args_str, str) else args_str
231
259
  except json.JSONDecodeError:
232
260
  args = {"raw": args_str}
233
-
261
+
234
262
  if name:
235
263
  calls.append(ToolCall(tool=name, arguments=args, id=tc.get("id")))
236
264
  else:
@@ -242,32 +270,34 @@ class ToolProcessor:
242
270
  else:
243
271
  self.logger.warning(f"Unsupported input type: {type(data)}")
244
272
  return []
245
-
273
+
246
274
  if not calls:
247
275
  self.logger.debug("No tool calls found")
248
276
  return []
249
-
277
+
250
278
  self.logger.debug(f"Found {len(calls)} tool calls")
251
-
279
+
252
280
  # Execute tool calls
253
281
  async with log_context_span("tool_execution", {"num_calls": len(calls)}):
254
- # Check if any tools are unknown
282
+ # Check if any tools are unknown - search across all namespaces
255
283
  unknown_tools = []
284
+ all_tools = await self.registry.list_tools() # Returns list of (namespace, name) tuples
285
+ tool_names_in_registry = {name for ns, name in all_tools}
286
+
256
287
  for call in calls:
257
- tool = await self.registry.get_tool(call.tool)
258
- if not tool:
288
+ if call.tool not in tool_names_in_registry:
259
289
  unknown_tools.append(call.tool)
260
-
290
+
261
291
  if unknown_tools:
262
- self.logger.warning(f"Unknown tools: {unknown_tools}")
263
-
292
+ self.logger.debug(f"Unknown tools: {unknown_tools}")
293
+
264
294
  # Execute tools
265
295
  results = await self.executor.execute(calls, timeout=timeout)
266
-
296
+
267
297
  # Log metrics for each tool call
268
- for call, result in zip(calls, results):
298
+ for call, result in zip(calls, results, strict=False):
269
299
  await log_tool_call(call, result)
270
-
300
+
271
301
  # Record metrics
272
302
  duration = (result.end_time - result.start_time).total_seconds()
273
303
  await metrics.log_tool_execution(
@@ -278,19 +308,19 @@ class ToolProcessor:
278
308
  cached=getattr(result, "cached", False),
279
309
  attempts=getattr(result, "attempts", 1),
280
310
  )
281
-
311
+
282
312
  return results
283
313
 
284
314
  async def process_text(
285
315
  self,
286
316
  text: str,
287
- timeout: Optional[float] = None,
317
+ timeout: float | None = None,
288
318
  use_cache: bool = True,
289
- request_id: Optional[str] = None,
290
- ) -> List[ToolResult]:
319
+ request_id: str | None = None,
320
+ ) -> list[ToolResult]:
291
321
  """
292
322
  Process text to extract and execute tool calls.
293
-
323
+
294
324
  Legacy alias for process() with string input.
295
325
 
296
326
  Args:
@@ -308,35 +338,33 @@ class ToolProcessor:
308
338
  use_cache=use_cache,
309
339
  request_id=request_id,
310
340
  )
311
-
341
+
312
342
  async def execute(
313
343
  self,
314
- calls: List[ToolCall],
315
- timeout: Optional[float] = None,
344
+ calls: list[ToolCall],
345
+ timeout: float | None = None,
316
346
  use_cache: bool = True,
317
- ) -> List[ToolResult]:
347
+ ) -> list[ToolResult]:
318
348
  """
319
349
  Execute a list of ToolCall objects directly.
320
-
350
+
321
351
  Args:
322
352
  calls: List of tool calls to execute
323
353
  timeout: Optional execution timeout
324
354
  use_cache: Whether to use cached results
325
-
355
+
326
356
  Returns:
327
357
  List of tool results
328
358
  """
329
359
  # Ensure initialization
330
360
  await self.initialize()
331
-
361
+
332
362
  # Execute with the configured executor
333
363
  return await self.executor.execute(
334
- calls=calls,
335
- timeout=timeout,
336
- use_cache=use_cache if hasattr(self.executor, "use_cache") else True
364
+ calls=calls, timeout=timeout, use_cache=use_cache if hasattr(self.executor, "use_cache") else True
337
365
  )
338
366
 
339
- async def _extract_tool_calls(self, text: str) -> List[ToolCall]:
367
+ async def _extract_tool_calls(self, text: str) -> list[ToolCall]:
340
368
  """
341
369
  Extract tool calls from text using all available parsers.
342
370
 
@@ -346,19 +374,19 @@ class ToolProcessor:
346
374
  Returns:
347
375
  List of tool calls.
348
376
  """
349
- all_calls: List[ToolCall] = []
377
+ all_calls: list[ToolCall] = []
350
378
 
351
379
  # Try each parser
352
380
  async with log_context_span("parsing", {"text_length": len(text)}):
353
381
  parse_tasks = []
354
-
382
+
355
383
  # Create parsing tasks
356
384
  for parser in self.parsers:
357
385
  parse_tasks.append(self._try_parser(parser, text))
358
-
386
+
359
387
  # Execute all parsers concurrently
360
388
  parser_results = await asyncio.gather(*parse_tasks, return_exceptions=True)
361
-
389
+
362
390
  # Collect successful results
363
391
  for result in parser_results:
364
392
  if isinstance(result, Exception):
@@ -370,29 +398,29 @@ class ToolProcessor:
370
398
  # Remove duplicates - use a stable digest instead of hashing a
371
399
  # frozenset of argument items (which breaks on unhashable types).
372
400
  # ------------------------------------------------------------------ #
373
- def _args_digest(args: Dict[str, Any]) -> str:
401
+ def _args_digest(args: dict[str, Any]) -> str:
374
402
  """Return a stable hash for any JSON-serialisable payload."""
375
403
  blob = json.dumps(args, sort_keys=True, default=str)
376
- return hashlib.md5(blob.encode()).hexdigest()
404
+ return hashlib.md5(blob.encode(), usedforsecurity=False).hexdigest() # nosec B324
377
405
 
378
- unique_calls: Dict[str, ToolCall] = {}
406
+ unique_calls: dict[str, ToolCall] = {}
379
407
  for call in all_calls:
380
408
  key = f"{call.tool}:{_args_digest(call.arguments)}"
381
409
  unique_calls[key] = call
382
410
 
383
411
  return list(unique_calls.values())
384
-
385
- async def _try_parser(self, parser, text: str) -> List[ToolCall]:
412
+
413
+ async def _try_parser(self, parser, text: str) -> list[ToolCall]:
386
414
  """Try a single parser with metrics and logging."""
387
415
  parser_name = parser.__class__.__name__
388
-
416
+
389
417
  async with log_context_span(f"parser.{parser_name}", log_duration=True):
390
418
  start_time = time.time()
391
-
419
+
392
420
  try:
393
421
  # Try to parse
394
422
  calls = await parser.try_parse(text)
395
-
423
+
396
424
  # Log success
397
425
  duration = time.time() - start_time
398
426
  await metrics.log_parser_metric(
@@ -401,9 +429,9 @@ class ToolProcessor:
401
429
  duration=duration,
402
430
  num_calls=len(calls),
403
431
  )
404
-
432
+
405
433
  return calls
406
-
434
+
407
435
  except Exception as e:
408
436
  # Log failure
409
437
  duration = time.time() - start_time
@@ -413,41 +441,43 @@ class ToolProcessor:
413
441
  duration=duration,
414
442
  num_calls=0,
415
443
  )
416
- self.logger.error(f"Parser {parser_name} failed: {str(e)}")
444
+ self.logger.debug(f"Parser {parser_name} failed: {str(e)}")
417
445
  return []
418
446
 
419
447
 
420
448
  # Create a global processor instance
421
- _global_processor: Optional[ToolProcessor] = None
449
+ _global_processor: ToolProcessor | None = None
422
450
  _processor_lock = asyncio.Lock()
423
451
 
452
+
424
453
  async def get_default_processor() -> ToolProcessor:
425
454
  """Get or initialize the default global processor."""
426
455
  global _global_processor
427
-
456
+
428
457
  if _global_processor is None:
429
458
  async with _processor_lock:
430
459
  if _global_processor is None:
431
460
  _global_processor = ToolProcessor()
432
461
  await _global_processor.initialize()
433
-
462
+
434
463
  return _global_processor
435
464
 
465
+
436
466
  async def process(
437
- data: Union[str, Dict[str, Any], List[Dict[str, Any]]],
438
- timeout: Optional[float] = None,
467
+ data: str | dict[str, Any] | list[dict[str, Any]],
468
+ timeout: float | None = None,
439
469
  use_cache: bool = True,
440
- request_id: Optional[str] = None,
441
- ) -> List[ToolResult]:
470
+ request_id: str | None = None,
471
+ ) -> list[ToolResult]:
442
472
  """
443
473
  Process tool calls with the default processor.
444
-
474
+
445
475
  Args:
446
476
  data: Input data (text, dict, or list of dicts)
447
477
  timeout: Optional timeout for execution
448
478
  use_cache: Whether to use cached results
449
479
  request_id: Optional request ID for logging
450
-
480
+
451
481
  Returns:
452
482
  List of tool results
453
483
  """
@@ -459,15 +489,16 @@ async def process(
459
489
  request_id=request_id,
460
490
  )
461
491
 
492
+
462
493
  async def process_text(
463
494
  text: str,
464
- timeout: Optional[float] = None,
495
+ timeout: float | None = None,
465
496
  use_cache: bool = True,
466
- request_id: Optional[str] = None,
467
- ) -> List[ToolResult]:
497
+ request_id: str | None = None,
498
+ ) -> list[ToolResult]:
468
499
  """
469
500
  Process text with the default processor.
470
-
501
+
471
502
  Legacy alias for backward compatibility.
472
503
 
473
504
  Args:
@@ -485,4 +516,4 @@ async def process_text(
485
516
  timeout=timeout,
486
517
  use_cache=use_cache,
487
518
  request_id=request_id,
488
- )
519
+ )
@@ -0,0 +1,6 @@
1
+ """Execution strategies for tool processing."""
2
+
3
+ from chuk_tool_processor.execution.strategies.inprocess_strategy import InProcessStrategy
4
+ from chuk_tool_processor.execution.strategies.subprocess_strategy import SubprocessStrategy
5
+
6
+ __all__ = ["InProcessStrategy", "SubprocessStrategy"]