chuk-tool-processor 0.7.0__py3-none-any.whl → 0.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chuk-tool-processor might be problematic. Click here for more details.

Files changed (39) hide show
  1. chuk_tool_processor/__init__.py +114 -0
  2. chuk_tool_processor/core/__init__.py +31 -0
  3. chuk_tool_processor/core/exceptions.py +218 -12
  4. chuk_tool_processor/core/processor.py +391 -43
  5. chuk_tool_processor/execution/wrappers/__init__.py +42 -0
  6. chuk_tool_processor/execution/wrappers/caching.py +43 -10
  7. chuk_tool_processor/execution/wrappers/circuit_breaker.py +370 -0
  8. chuk_tool_processor/execution/wrappers/rate_limiting.py +31 -1
  9. chuk_tool_processor/execution/wrappers/retry.py +93 -53
  10. chuk_tool_processor/logging/__init__.py +5 -8
  11. chuk_tool_processor/logging/context.py +2 -5
  12. chuk_tool_processor/mcp/__init__.py +3 -0
  13. chuk_tool_processor/mcp/mcp_tool.py +8 -3
  14. chuk_tool_processor/mcp/models.py +87 -0
  15. chuk_tool_processor/mcp/setup_mcp_http_streamable.py +38 -2
  16. chuk_tool_processor/mcp/setup_mcp_sse.py +38 -2
  17. chuk_tool_processor/mcp/setup_mcp_stdio.py +92 -12
  18. chuk_tool_processor/mcp/stream_manager.py +109 -6
  19. chuk_tool_processor/mcp/transport/http_streamable_transport.py +18 -5
  20. chuk_tool_processor/mcp/transport/sse_transport.py +16 -3
  21. chuk_tool_processor/models/__init__.py +20 -0
  22. chuk_tool_processor/models/tool_call.py +34 -1
  23. chuk_tool_processor/models/tool_export_mixin.py +4 -4
  24. chuk_tool_processor/models/tool_spec.py +350 -0
  25. chuk_tool_processor/models/validated_tool.py +22 -2
  26. chuk_tool_processor/observability/__init__.py +30 -0
  27. chuk_tool_processor/observability/metrics.py +312 -0
  28. chuk_tool_processor/observability/setup.py +105 -0
  29. chuk_tool_processor/observability/tracing.py +346 -0
  30. chuk_tool_processor/py.typed +0 -0
  31. chuk_tool_processor/registry/interface.py +7 -7
  32. chuk_tool_processor/registry/providers/__init__.py +2 -1
  33. chuk_tool_processor/registry/tool_export.py +1 -6
  34. chuk_tool_processor-0.10.dist-info/METADATA +2326 -0
  35. chuk_tool_processor-0.10.dist-info/RECORD +69 -0
  36. chuk_tool_processor-0.7.0.dist-info/METADATA +0 -1230
  37. chuk_tool_processor-0.7.0.dist-info/RECORD +0 -61
  38. {chuk_tool_processor-0.7.0.dist-info → chuk_tool_processor-0.10.dist-info}/WHEEL +0 -0
  39. {chuk_tool_processor-0.7.0.dist-info → chuk_tool_processor-0.10.dist-info}/top_level.txt +0 -0
@@ -18,6 +18,10 @@ from typing import Any
18
18
 
19
19
  from chuk_tool_processor.execution.strategies.inprocess_strategy import InProcessStrategy
20
20
  from chuk_tool_processor.execution.wrappers.caching import CachingToolExecutor, InMemoryCache
21
+ from chuk_tool_processor.execution.wrappers.circuit_breaker import (
22
+ CircuitBreakerConfig,
23
+ CircuitBreakerExecutor,
24
+ )
21
25
  from chuk_tool_processor.execution.wrappers.rate_limiting import RateLimitedToolExecutor, RateLimiter
22
26
  from chuk_tool_processor.execution.wrappers.retry import RetryableToolExecutor, RetryConfig
23
27
  from chuk_tool_processor.logging import get_logger, log_context_span, log_tool_call, metrics, request_logging
@@ -30,13 +34,62 @@ from chuk_tool_processor.registry import ToolRegistryInterface, ToolRegistryProv
30
34
  class ToolProcessor:
31
35
  """
32
36
  Main class for processing tool calls from LLM responses.
33
- Combines parsing, execution, and result handling with full async support.
37
+
38
+ ToolProcessor combines parsing, execution, and result handling with full async support.
39
+ It provides production-ready features including timeouts, retries, caching, rate limiting,
40
+ and circuit breaking.
41
+
42
+ Examples:
43
+ Basic usage with context manager:
44
+
45
+ >>> import asyncio
46
+ >>> from chuk_tool_processor import ToolProcessor, register_tool
47
+ >>>
48
+ >>> @register_tool(name="calculator")
49
+ ... class Calculator:
50
+ ... async def execute(self, a: int, b: int) -> dict:
51
+ ... return {"result": a + b}
52
+ >>>
53
+ >>> async def main():
54
+ ... async with ToolProcessor() as processor:
55
+ ... llm_output = '<tool name="calculator" args=\'{"a": 5, "b": 3}\'/>'
56
+ ... results = await processor.process(llm_output)
57
+ ... print(results[0].result) # {'result': 8}
58
+ >>>
59
+ >>> asyncio.run(main())
60
+
61
+ Production configuration:
62
+
63
+ >>> processor = ToolProcessor(
64
+ ... default_timeout=30.0,
65
+ ... enable_caching=True,
66
+ ... cache_ttl=600,
67
+ ... enable_rate_limiting=True,
68
+ ... global_rate_limit=100, # 100 requests/minute
69
+ ... enable_retries=True,
70
+ ... max_retries=3,
71
+ ... enable_circuit_breaker=True,
72
+ ... )
73
+
74
+ Manual cleanup:
75
+
76
+ >>> processor = ToolProcessor()
77
+ >>> try:
78
+ ... results = await processor.process(llm_output)
79
+ ... finally:
80
+ ... await processor.close()
81
+
82
+ Attributes:
83
+ registry: Tool registry containing registered tools
84
+ strategy: Execution strategy (InProcess or Subprocess)
85
+ executor: Wrapped executor with caching, retries, etc.
86
+ parsers: List of parser plugins for extracting tool calls
34
87
  """
35
88
 
36
89
  def __init__(
37
90
  self,
38
91
  registry: ToolRegistryInterface | None = None,
39
- strategy=None,
92
+ strategy: Any | None = None, # Strategy can be InProcessStrategy or SubprocessStrategy
40
93
  default_timeout: float = 10.0,
41
94
  max_concurrency: int | None = None,
42
95
  enable_caching: bool = True,
@@ -46,6 +99,10 @@ class ToolProcessor:
46
99
  tool_rate_limits: dict[str, tuple] | None = None,
47
100
  enable_retries: bool = True,
48
101
  max_retries: int = 3,
102
+ retry_config: RetryConfig | None = None,
103
+ enable_circuit_breaker: bool = False,
104
+ circuit_breaker_threshold: int = 5,
105
+ circuit_breaker_timeout: float = 60.0,
49
106
  parser_plugins: list[str] | None = None,
50
107
  ):
51
108
  """
@@ -53,18 +110,60 @@ class ToolProcessor:
53
110
 
54
111
  Args:
55
112
  registry: Tool registry to use. If None, uses the global registry.
56
- strategy: Optional execution strategy (default: InProcessStrategy)
113
+ strategy: Optional execution strategy (default: InProcessStrategy).
114
+ Use SubprocessStrategy for isolated execution of untrusted code.
57
115
  default_timeout: Default timeout for tool execution in seconds.
116
+ Individual tools can override this. Default: 10.0
58
117
  max_concurrency: Maximum number of concurrent tool executions.
59
- enable_caching: Whether to enable result caching.
60
- cache_ttl: Default cache TTL in seconds.
61
- enable_rate_limiting: Whether to enable rate limiting.
118
+ If None, uses unlimited concurrency. Default: None
119
+ enable_caching: Whether to enable result caching. Caches results
120
+ based on tool name and arguments. Default: True
121
+ cache_ttl: Default cache TTL in seconds. Results older than this
122
+ are evicted. Default: 300 (5 minutes)
123
+ enable_rate_limiting: Whether to enable rate limiting. Prevents
124
+ API abuse and quota exhaustion. Default: False
62
125
  global_rate_limit: Optional global rate limit (requests per minute).
126
+ Applies to all tools unless overridden. Default: None
63
127
  tool_rate_limits: Dict mapping tool names to (limit, period) tuples.
64
- enable_retries: Whether to enable automatic retries.
65
- max_retries: Maximum number of retry attempts.
66
- parser_plugins: List of parser plugin names to use.
67
- If None, uses all available parsers.
128
+ Example: {"api_call": (10, 60)} = 10 requests per 60 seconds
129
+ enable_retries: Whether to enable automatic retries on transient
130
+ failures. Uses exponential backoff. Default: True
131
+ max_retries: Maximum number of retry attempts. Total attempts will
132
+ be max_retries + 1 (initial + retries). Default: 3
133
+ retry_config: Optional custom retry configuration. If provided,
134
+ overrides max_retries. See RetryConfig for details.
135
+ enable_circuit_breaker: Whether to enable circuit breaker pattern.
136
+ Opens circuit after repeated failures to prevent cascading
137
+ failures. Default: False
138
+ circuit_breaker_threshold: Number of consecutive failures before
139
+ opening circuit. Default: 5
140
+ circuit_breaker_timeout: Seconds to wait before attempting recovery
141
+ (transition from OPEN to HALF_OPEN). Default: 60.0
142
+ parser_plugins: List of parser plugin names to use. If None, uses
143
+ all available parsers (XML, OpenAI, JSON). Default: None
144
+
145
+ Raises:
146
+ ImportError: If required dependencies are not installed.
147
+
148
+ Example:
149
+ >>> # Production configuration with all features
150
+ >>> processor = ToolProcessor(
151
+ ... default_timeout=30.0,
152
+ ... max_concurrency=20,
153
+ ... enable_caching=True,
154
+ ... cache_ttl=600,
155
+ ... enable_rate_limiting=True,
156
+ ... global_rate_limit=100,
157
+ ... tool_rate_limits={
158
+ ... "expensive_api": (10, 60),
159
+ ... "free_api": (100, 60),
160
+ ... },
161
+ ... enable_retries=True,
162
+ ... max_retries=3,
163
+ ... enable_circuit_breaker=True,
164
+ ... circuit_breaker_threshold=5,
165
+ ... circuit_breaker_timeout=60.0,
166
+ ... )
68
167
  """
69
168
  self.logger = get_logger("chuk_tool_processor.processor")
70
169
 
@@ -80,13 +179,17 @@ class ToolProcessor:
80
179
  self.tool_rate_limits = tool_rate_limits
81
180
  self.enable_retries = enable_retries
82
181
  self.max_retries = max_retries
182
+ self.retry_config = retry_config
183
+ self.enable_circuit_breaker = enable_circuit_breaker
184
+ self.circuit_breaker_threshold = circuit_breaker_threshold
185
+ self.circuit_breaker_timeout = circuit_breaker_timeout
83
186
  self.parser_plugin_names = parser_plugins
84
187
 
85
- # Placeholder for initialized components
86
- self.registry = None
87
- self.strategy = None
88
- self.executor = None
89
- self.parsers = []
188
+ # Placeholder for initialized components (typed as Optional for type safety)
189
+ self.registry: ToolRegistryInterface | None = None
190
+ self.strategy: Any | None = None # Strategy type is complex, use Any for now
191
+ self.executor: Any | None = None # Executor type is complex, use Any for now
192
+ self.parsers: list[Any] = [] # Parser types vary, use Any for now
90
193
 
91
194
  # Flag for tracking initialization state
92
195
  self._initialized = False
@@ -99,13 +202,8 @@ class ToolProcessor:
99
202
  This method ensures all components are properly initialized before use.
100
203
  It is called automatically by other methods if needed.
101
204
  """
102
- # Fast path if already initialized
103
- if self._initialized:
104
- return
105
-
106
205
  # Ensure only one initialization happens at a time
107
206
  async with self._init_lock:
108
- # Double-check pattern after acquiring lock
109
207
  if self._initialized:
110
208
  return
111
209
 
@@ -131,11 +229,25 @@ class ToolProcessor:
131
229
  executor = self.strategy
132
230
 
133
231
  # Apply wrappers in reverse order (innermost first)
232
+ # Circuit breaker goes innermost (closest to actual execution)
233
+ if self.enable_circuit_breaker:
234
+ self.logger.debug("Enabling circuit breaker")
235
+ circuit_config = CircuitBreakerConfig(
236
+ failure_threshold=self.circuit_breaker_threshold,
237
+ reset_timeout=self.circuit_breaker_timeout,
238
+ )
239
+ executor = CircuitBreakerExecutor(
240
+ executor=executor,
241
+ default_config=circuit_config,
242
+ )
243
+
134
244
  if self.enable_retries:
135
245
  self.logger.debug("Enabling retry logic")
246
+ # Use custom retry config if provided, otherwise create default
247
+ retry_cfg = self.retry_config or RetryConfig(max_retries=self.max_retries)
136
248
  executor = RetryableToolExecutor(
137
249
  executor=executor,
138
- default_config=RetryConfig(max_retries=self.max_retries),
250
+ default_config=retry_cfg,
139
251
  )
140
252
 
141
253
  if self.enable_rate_limiting:
@@ -188,21 +300,87 @@ class ToolProcessor:
188
300
  request_id: str | None = None,
189
301
  ) -> list[ToolResult]:
190
302
  """
191
- Process tool calls from various input formats.
192
-
193
- This method handles different input types:
194
- - String: Parses tool calls from text using registered parsers
195
- - Dict: Processes an OpenAI-style tool_calls object
196
- - List[Dict]: Processes a list of individual tool calls
303
+ Process tool calls from various LLM output formats.
304
+
305
+ This method handles different input types from various LLM providers:
306
+
307
+ **String Input (Anthropic Claude style)**:
308
+ Parses tool calls from XML-like text using registered parsers.
309
+
310
+ Example:
311
+ >>> llm_output = '<tool name="calculator" args=\'{"a": 5, "b": 3}\'/>'
312
+ >>> results = await processor.process(llm_output)
313
+
314
+ **Dict Input (OpenAI style)**:
315
+ Processes an OpenAI-style tool_calls object.
316
+
317
+ Example:
318
+ >>> openai_output = {
319
+ ... "tool_calls": [
320
+ ... {
321
+ ... "type": "function",
322
+ ... "function": {
323
+ ... "name": "calculator",
324
+ ... "arguments": '{"a": 5, "b": 3}'
325
+ ... }
326
+ ... }
327
+ ... ]
328
+ ... }
329
+ >>> results = await processor.process(openai_output)
330
+
331
+ **List[Dict] Input (Direct tool calls)**:
332
+ Processes a list of individual tool call dictionaries.
333
+
334
+ Example:
335
+ >>> direct_calls = [
336
+ ... {"tool": "calculator", "arguments": {"a": 5, "b": 3}},
337
+ ... {"tool": "weather", "arguments": {"city": "London"}}
338
+ ... ]
339
+ >>> results = await processor.process(direct_calls)
197
340
 
198
341
  Args:
199
- data: Input data containing tool calls
200
- timeout: Optional timeout for execution
201
- use_cache: Whether to use cached results
202
- request_id: Optional request ID for logging
342
+ data: Input data containing tool calls. Can be:
343
+ - String: XML/text format (e.g., Anthropic Claude)
344
+ - Dict: OpenAI tool_calls format
345
+ - List[Dict]: Direct tool call list
346
+ timeout: Optional timeout in seconds for tool execution.
347
+ Overrides default_timeout if provided. Default: None
348
+ use_cache: Whether to use cached results. If False, forces
349
+ fresh execution even if cached results exist. Default: True
350
+ request_id: Optional request ID for tracing and logging.
351
+ If not provided, a UUID will be generated. Default: None
203
352
 
204
353
  Returns:
205
- List of tool results
354
+ List of ToolResult objects. Each result contains:
355
+ - tool: Name of the tool that was executed
356
+ - result: The tool's output (None if error)
357
+ - error: Error message if execution failed (None if success)
358
+ - duration: Execution time in seconds
359
+ - cached: Whether result was retrieved from cache
360
+
361
+ **Always returns a list** (never None), even if empty.
362
+
363
+ Raises:
364
+ ToolNotFoundError: If a tool is not registered in the registry
365
+ ToolTimeoutError: If tool execution exceeds timeout
366
+ ToolCircuitOpenError: If circuit breaker is open for a tool
367
+ ToolRateLimitedError: If rate limit is exceeded
368
+
369
+ Example:
370
+ >>> async with ToolProcessor() as processor:
371
+ ... # Process Claude-style XML
372
+ ... results = await processor.process(
373
+ ... '<tool name="echo" args=\'{"message": "hello"}\'/>'
374
+ ... )
375
+ ...
376
+ ... # Check results
377
+ ... for result in results:
378
+ ... if result.error:
379
+ ... print(f"Error: {result.error}")
380
+ ... else:
381
+ ... print(f"Success: {result.result}")
382
+ ... print(f"Duration: {result.duration}s")
383
+ ... print(f"From cache: {result.cached}")
206
384
  """
207
385
  # Ensure initialization
208
386
  await self.initialize()
@@ -231,7 +409,11 @@ class ToolProcessor:
231
409
  args = {"raw": args_str}
232
410
 
233
411
  if name:
234
- calls.append(ToolCall(tool=name, arguments=args, id=tc.get("id")))
412
+ # Build ToolCall kwargs, only include id if present
413
+ call_kwargs: dict[str, Any] = {"tool": name, "arguments": args}
414
+ if "id" in tc and tc["id"]:
415
+ call_kwargs["id"] = tc["id"]
416
+ calls.append(ToolCall(**call_kwargs))
235
417
  else:
236
418
  # Assume it's a single tool call
237
419
  calls = [ToolCall(**data)]
@@ -239,7 +421,9 @@ class ToolProcessor:
239
421
  # List of tool calls
240
422
  calls = [ToolCall(**tc) for tc in data]
241
423
  else:
242
- self.logger.warning(f"Unsupported input type: {type(data)}")
424
+ # Defensive: handle unexpected types at runtime
425
+ # This shouldn't happen per type signature, but helps with debugging
426
+ self.logger.warning(f"Unsupported input type: {type(data)}") # type: ignore[unreachable]
243
427
  return []
244
428
 
245
429
  if not calls:
@@ -250,6 +434,10 @@ class ToolProcessor:
250
434
 
251
435
  # Execute tool calls
252
436
  async with log_context_span("tool_execution", {"num_calls": len(calls)}):
437
+ # Assert that initialization completed successfully
438
+ assert self.registry is not None, "Registry must be initialized"
439
+ assert self.executor is not None, "Executor must be initialized"
440
+
253
441
  # Check if any tools are unknown - search across all namespaces
254
442
  unknown_tools = []
255
443
  all_tools = await self.registry.list_tools() # Returns list of (namespace, name) tuples
@@ -263,7 +451,7 @@ class ToolProcessor:
263
451
  self.logger.debug(f"Unknown tools: {unknown_tools}")
264
452
 
265
453
  # Execute tools
266
- results = await self.executor.execute(calls, timeout=timeout)
454
+ results: list[ToolResult] = await self.executor.execute(calls, timeout=timeout)
267
455
 
268
456
  # Log metrics for each tool call
269
457
  for call, result in zip(calls, results, strict=False):
@@ -319,22 +507,66 @@ class ToolProcessor:
319
507
  """
320
508
  Execute a list of ToolCall objects directly.
321
509
 
510
+ This is a lower-level method for executing tool calls when you already
511
+ have parsed ToolCall objects. For most use cases, prefer process()
512
+ which handles parsing automatically.
513
+
322
514
  Args:
323
- calls: List of tool calls to execute
324
- timeout: Optional execution timeout
325
- use_cache: Whether to use cached results
515
+ calls: List of ToolCall objects to execute. Each call must have:
516
+ - tool: Name of the tool to execute
517
+ - arguments: Dictionary of arguments for the tool
518
+ timeout: Optional timeout in seconds for tool execution.
519
+ Overrides default_timeout if provided. Default: None
520
+ use_cache: Whether to use cached results. If False, forces
521
+ fresh execution even if cached results exist. Default: True
326
522
 
327
523
  Returns:
328
- List of tool results
524
+ List of ToolResult objects, one per input ToolCall.
525
+ **Always returns a list** (never None), even if empty.
526
+
527
+ Each result contains:
528
+ - tool: Name of the tool that was executed
529
+ - result: The tool's output (None if error)
530
+ - error: Error message if execution failed (None if success)
531
+ - duration: Execution time in seconds
532
+ - cached: Whether result was retrieved from cache
533
+
534
+ Raises:
535
+ RuntimeError: If processor is not initialized
536
+ ToolNotFoundError: If a tool is not registered
537
+ ToolTimeoutError: If tool execution exceeds timeout
538
+ ToolCircuitOpenError: If circuit breaker is open
539
+ ToolRateLimitedError: If rate limit is exceeded
540
+
541
+ Example:
542
+ >>> from chuk_tool_processor import ToolCall
543
+ >>>
544
+ >>> # Create tool calls directly
545
+ >>> calls = [
546
+ ... ToolCall(tool="calculator", arguments={"a": 5, "b": 3}),
547
+ ... ToolCall(tool="weather", arguments={"city": "London"}),
548
+ ... ]
549
+ >>>
550
+ >>> async with ToolProcessor() as processor:
551
+ ... results = await processor.execute(calls)
552
+ ... for result in results:
553
+ ... print(f"{result.tool}: {result.result}")
329
554
  """
330
555
  # Ensure initialization
331
556
  await self.initialize()
332
557
 
558
+ # Safety check: ensure we have an executor
559
+ if self.executor is None:
560
+ raise RuntimeError("Executor not initialized. Call initialize() first.")
561
+
333
562
  # Execute with the configured executor
334
- return await self.executor.execute(
563
+ results = await self.executor.execute(
335
564
  calls=calls, timeout=timeout, use_cache=use_cache if hasattr(self.executor, "use_cache") else True
336
565
  )
337
566
 
567
+ # Ensure we always return a list (never None)
568
+ return results if results is not None else []
569
+
338
570
  async def _extract_tool_calls(self, text: str) -> list[ToolCall]:
339
571
  """
340
572
  Extract tool calls from text using all available parsers.
@@ -362,7 +594,8 @@ class ToolProcessor:
362
594
  for result in parser_results:
363
595
  if isinstance(result, Exception):
364
596
  continue
365
- if result:
597
+ # At this point, result is list[ToolCall], not an exception
598
+ if result and isinstance(result, list):
366
599
  all_calls.extend(result)
367
600
 
368
601
  # ------------------------------------------------------------------ #
@@ -381,7 +614,7 @@ class ToolProcessor:
381
614
 
382
615
  return list(unique_calls.values())
383
616
 
384
- async def _try_parser(self, parser, text: str) -> list[ToolCall]:
617
+ async def _try_parser(self, parser: Any, text: str) -> list[ToolCall]:
385
618
  """Try a single parser with metrics and logging."""
386
619
  parser_name = parser.__class__.__name__
387
620
 
@@ -390,7 +623,7 @@ class ToolProcessor:
390
623
 
391
624
  try:
392
625
  # Try to parse
393
- calls = await parser.try_parse(text)
626
+ calls: list[ToolCall] = await parser.try_parse(text)
394
627
 
395
628
  # Log success
396
629
  duration = time.time() - start_time
@@ -415,6 +648,121 @@ class ToolProcessor:
415
648
  self.logger.debug(f"Parser {parser_name} failed: {str(e)}")
416
649
  return []
417
650
 
651
+ # ------------------------------------------------------------------ #
652
+ # Tool discovery and introspection #
653
+ # ------------------------------------------------------------------ #
654
+ async def list_tools(self) -> list[str]:
655
+ """
656
+ List all registered tool names.
657
+
658
+ This method provides programmatic access to all tools in the registry.
659
+
660
+ Returns:
661
+ List of tool names (strings).
662
+
663
+ Example:
664
+ >>> async with ToolProcessor() as processor:
665
+ ... tools = await processor.list_tools()
666
+ ... for name in tools:
667
+ ... print(f"Available tool: {name}")
668
+
669
+ Raises:
670
+ RuntimeError: If processor is not initialized. Call initialize()
671
+ or use the processor in a context manager.
672
+ """
673
+ await self.initialize()
674
+
675
+ if self.registry is None:
676
+ raise RuntimeError("Registry not initialized")
677
+
678
+ # Get tool tuples and extract names
679
+ tool_tuples = await self.registry.list_tools()
680
+ return [name for _, name in tool_tuples]
681
+
682
+ async def get_tool_count(self) -> int:
683
+ """
684
+ Get the number of registered tools.
685
+
686
+ Returns:
687
+ Number of registered tools.
688
+
689
+ Example:
690
+ >>> async with ToolProcessor() as processor:
691
+ ... count = await processor.get_tool_count()
692
+ ... print(f"Total tools: {count}")
693
+ """
694
+ await self.initialize()
695
+
696
+ if self.registry is None:
697
+ raise RuntimeError("Registry not initialized")
698
+
699
+ tool_tuples = await self.registry.list_tools()
700
+ return len(tool_tuples)
701
+
702
+ # ------------------------------------------------------------------ #
703
+ # Context manager support for automatic cleanup #
704
+ # ------------------------------------------------------------------ #
705
+ async def __aenter__(self):
706
+ """Context manager entry - ensures initialization."""
707
+ await self.initialize()
708
+ return self
709
+
710
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
711
+ """Context manager exit with automatic cleanup."""
712
+ await self.close()
713
+ return False
714
+
715
+ async def close(self) -> None:
716
+ """
717
+ Close the processor and clean up resources.
718
+
719
+ This method ensures proper cleanup of executor resources, caches,
720
+ and any other stateful components.
721
+ """
722
+ self.logger.debug("Closing tool processor")
723
+
724
+ try:
725
+ # Close the executor if it has a close method
726
+ if self.executor and hasattr(self.executor, "close"):
727
+ close_method = self.executor.close
728
+ if asyncio.iscoroutinefunction(close_method):
729
+ await close_method()
730
+ elif callable(close_method):
731
+ close_method()
732
+
733
+ # Close the strategy if it has a close method
734
+ if self.strategy and hasattr(self.strategy, "close"):
735
+ close_method = self.strategy.close
736
+ if asyncio.iscoroutinefunction(close_method):
737
+ await close_method()
738
+ elif callable(close_method):
739
+ result = close_method()
740
+ # Check if the result is a coroutine and await it
741
+ if asyncio.iscoroutine(result):
742
+ await result
743
+
744
+ # Clear cached results if using caching
745
+ if self.enable_caching and self.executor:
746
+ # Walk the executor chain to find the CachingToolExecutor
747
+ current = self.executor
748
+ while current:
749
+ if isinstance(current, CachingToolExecutor):
750
+ if hasattr(current.cache, "clear"):
751
+ clear_method = current.cache.clear
752
+ if asyncio.iscoroutinefunction(clear_method):
753
+ await clear_method()
754
+ else:
755
+ clear_result = clear_method()
756
+ if asyncio.iscoroutine(clear_result):
757
+ await clear_result
758
+ break
759
+ current = getattr(current, "executor", None)
760
+
761
+ self.logger.debug("Tool processor closed successfully")
762
+
763
+ except Exception as e:
764
+ self.logger.error(f"Error during processor cleanup: {e}")
765
+
418
766
 
419
767
  # Create a global processor instance
420
768
  _global_processor: ToolProcessor | None = None
@@ -0,0 +1,42 @@
1
+ # chuk_tool_processor/execution/wrappers/__init__.py
2
+ """Execution wrappers for adding production features to tool execution."""
3
+
4
+ from chuk_tool_processor.execution.wrappers.caching import (
5
+ CacheInterface,
6
+ CachingToolExecutor,
7
+ InMemoryCache,
8
+ cacheable,
9
+ )
10
+ from chuk_tool_processor.execution.wrappers.circuit_breaker import (
11
+ CircuitBreakerConfig,
12
+ CircuitBreakerExecutor,
13
+ CircuitState,
14
+ )
15
+ from chuk_tool_processor.execution.wrappers.rate_limiting import (
16
+ RateLimitedToolExecutor,
17
+ RateLimiter,
18
+ )
19
+ from chuk_tool_processor.execution.wrappers.retry import (
20
+ RetryableToolExecutor,
21
+ RetryConfig,
22
+ retryable,
23
+ )
24
+
25
+ __all__ = [
26
+ # Caching
27
+ "CacheInterface",
28
+ "CachingToolExecutor",
29
+ "InMemoryCache",
30
+ "cacheable",
31
+ # Circuit breaker
32
+ "CircuitBreakerConfig",
33
+ "CircuitBreakerExecutor",
34
+ "CircuitState",
35
+ # Rate limiting
36
+ "RateLimitedToolExecutor",
37
+ "RateLimiter",
38
+ # Retry
39
+ "RetryableToolExecutor",
40
+ "RetryConfig",
41
+ "retryable",
42
+ ]