chuk-tool-processor 0.7.0__py3-none-any.whl → 0.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chuk-tool-processor might be problematic. Click here for more details.

@@ -1 +1,32 @@
1
1
  # chuk_tool_processor/core/__init__.py
2
+ """Core functionality for the tool processor."""
3
+
4
+ from chuk_tool_processor.core.exceptions import (
5
+ ErrorCode,
6
+ MCPConnectionError,
7
+ MCPError,
8
+ MCPTimeoutError,
9
+ ParserError,
10
+ ToolCircuitOpenError,
11
+ ToolExecutionError,
12
+ ToolNotFoundError,
13
+ ToolProcessorError,
14
+ ToolRateLimitedError,
15
+ ToolTimeoutError,
16
+ ToolValidationError,
17
+ )
18
+
19
+ __all__ = [
20
+ "ErrorCode",
21
+ "ToolProcessorError",
22
+ "ToolNotFoundError",
23
+ "ToolExecutionError",
24
+ "ToolTimeoutError",
25
+ "ToolValidationError",
26
+ "ParserError",
27
+ "ToolRateLimitedError",
28
+ "ToolCircuitOpenError",
29
+ "MCPError",
30
+ "MCPConnectionError",
31
+ "MCPTimeoutError",
32
+ ]
@@ -1,51 +1,257 @@
1
1
  # chuk_tool_processor/exceptions.py
2
+ from enum import Enum
2
3
  from typing import Any
3
4
 
4
5
 
6
+ class ErrorCode(str, Enum):
7
+ """Machine-readable error codes for tool processor errors."""
8
+
9
+ # Tool registry errors
10
+ TOOL_NOT_FOUND = "TOOL_NOT_FOUND"
11
+ TOOL_REGISTRATION_FAILED = "TOOL_REGISTRATION_FAILED"
12
+
13
+ # Execution errors
14
+ TOOL_EXECUTION_FAILED = "TOOL_EXECUTION_FAILED"
15
+ TOOL_TIMEOUT = "TOOL_TIMEOUT"
16
+ TOOL_CANCELLED = "TOOL_CANCELLED"
17
+
18
+ # Validation errors
19
+ TOOL_VALIDATION_ERROR = "TOOL_VALIDATION_ERROR"
20
+ TOOL_ARGUMENT_ERROR = "TOOL_ARGUMENT_ERROR"
21
+ TOOL_RESULT_ERROR = "TOOL_RESULT_ERROR"
22
+
23
+ # Rate limiting and circuit breaker
24
+ TOOL_RATE_LIMITED = "TOOL_RATE_LIMITED"
25
+ TOOL_CIRCUIT_OPEN = "TOOL_CIRCUIT_OPEN"
26
+
27
+ # Parser errors
28
+ PARSER_ERROR = "PARSER_ERROR"
29
+ PARSER_INVALID_FORMAT = "PARSER_INVALID_FORMAT"
30
+
31
+ # MCP errors
32
+ MCP_CONNECTION_FAILED = "MCP_CONNECTION_FAILED"
33
+ MCP_TRANSPORT_ERROR = "MCP_TRANSPORT_ERROR"
34
+ MCP_SERVER_ERROR = "MCP_SERVER_ERROR"
35
+ MCP_TIMEOUT = "MCP_TIMEOUT"
36
+
37
+ # System errors
38
+ RESOURCE_EXHAUSTED = "RESOURCE_EXHAUSTED"
39
+ CONFIGURATION_ERROR = "CONFIGURATION_ERROR"
40
+
41
+
5
42
  class ToolProcessorError(Exception):
6
- """Base exception for all tool processor errors."""
43
+ """Base exception for all tool processor errors with machine-readable codes."""
7
44
 
8
- pass
45
+ def __init__(
46
+ self,
47
+ message: str,
48
+ code: ErrorCode | None = None,
49
+ details: dict[str, Any] | None = None,
50
+ original_error: Exception | None = None,
51
+ ):
52
+ super().__init__(message)
53
+ self.code = code or ErrorCode.TOOL_EXECUTION_FAILED
54
+ self.details = details or {}
55
+ self.original_error = original_error
56
+
57
+ def to_dict(self) -> dict[str, Any]:
58
+ """Convert exception to a structured dictionary for logging/monitoring."""
59
+ result = {
60
+ "error": self.__class__.__name__,
61
+ "code": self.code.value,
62
+ "message": str(self),
63
+ "details": self.details,
64
+ }
65
+ if self.original_error:
66
+ result["original_error"] = {
67
+ "type": type(self.original_error).__name__,
68
+ "message": str(self.original_error),
69
+ }
70
+ return result
9
71
 
10
72
 
11
73
  class ToolNotFoundError(ToolProcessorError):
12
74
  """Raised when a requested tool is not found in the registry."""
13
75
 
14
- def __init__(self, tool_name: str):
76
+ def __init__(self, tool_name: str, available_tools: list[str] | None = None):
15
77
  self.tool_name = tool_name
16
- super().__init__(f"Tool '{tool_name}' not found in registry")
78
+ details: dict[str, Any] = {"tool_name": tool_name}
79
+ if available_tools:
80
+ details["available_tools"] = available_tools
81
+ super().__init__(
82
+ f"Tool '{tool_name}' not found in registry",
83
+ code=ErrorCode.TOOL_NOT_FOUND,
84
+ details=details,
85
+ )
17
86
 
18
87
 
19
88
  class ToolExecutionError(ToolProcessorError):
20
89
  """Raised when a tool execution fails."""
21
90
 
22
- def __init__(self, tool_name: str, original_error: Exception | None = None):
91
+ def __init__(
92
+ self,
93
+ tool_name: str,
94
+ original_error: Exception | None = None,
95
+ details: dict[str, Any] | None = None,
96
+ ):
23
97
  self.tool_name = tool_name
24
- self.original_error = original_error
25
98
  message = f"Tool '{tool_name}' execution failed"
26
99
  if original_error:
27
100
  message += f": {str(original_error)}"
28
- super().__init__(message)
101
+
102
+ error_details = {"tool_name": tool_name}
103
+ if details:
104
+ error_details.update(details)
105
+
106
+ super().__init__(
107
+ message,
108
+ code=ErrorCode.TOOL_EXECUTION_FAILED,
109
+ details=error_details,
110
+ original_error=original_error,
111
+ )
29
112
 
30
113
 
31
114
  class ToolTimeoutError(ToolExecutionError):
32
115
  """Raised when a tool execution times out."""
33
116
 
34
- def __init__(self, tool_name: str, timeout: float):
117
+ def __init__(self, tool_name: str, timeout: float, attempts: int = 1):
35
118
  self.timeout = timeout
36
- super().__init__(tool_name, Exception(f"Execution timed out after {timeout}s"))
119
+ self.attempts = attempts
120
+ # Call ToolProcessorError.__init__ directly to set the right code
121
+ ToolProcessorError.__init__(
122
+ self,
123
+ f"Tool '{tool_name}' timed out after {timeout}s (attempts: {attempts})",
124
+ code=ErrorCode.TOOL_TIMEOUT,
125
+ details={"tool_name": tool_name, "timeout": timeout, "attempts": attempts},
126
+ )
127
+ self.tool_name = tool_name
37
128
 
38
129
 
39
130
  class ToolValidationError(ToolProcessorError):
40
131
  """Raised when tool arguments or results fail validation."""
41
132
 
42
- def __init__(self, tool_name: str, errors: dict[str, Any]):
133
+ def __init__(
134
+ self,
135
+ tool_name: str,
136
+ errors: dict[str, Any],
137
+ validation_type: str = "arguments",
138
+ ):
43
139
  self.tool_name = tool_name
44
140
  self.errors = errors
45
- super().__init__(f"Validation failed for tool '{tool_name}': {errors}")
141
+ self.validation_type = validation_type
142
+ super().__init__(
143
+ f"Validation failed for tool '{tool_name}' {validation_type}: {errors}",
144
+ code=ErrorCode.TOOL_VALIDATION_ERROR,
145
+ details={"tool_name": tool_name, "validation_type": validation_type, "errors": errors},
146
+ )
46
147
 
47
148
 
48
149
  class ParserError(ToolProcessorError):
49
150
  """Raised when parsing tool calls from raw input fails."""
50
151
 
51
- pass
152
+ def __init__(
153
+ self,
154
+ message: str,
155
+ parser_name: str | None = None,
156
+ input_sample: str | None = None,
157
+ ):
158
+ self.parser_name = parser_name
159
+ self.input_sample = input_sample
160
+ details = {}
161
+ if parser_name:
162
+ details["parser_name"] = parser_name
163
+ if input_sample:
164
+ # Truncate sample for logging
165
+ details["input_sample"] = input_sample[:200] + "..." if len(input_sample) > 200 else input_sample
166
+ super().__init__(
167
+ message,
168
+ code=ErrorCode.PARSER_ERROR,
169
+ details=details,
170
+ )
171
+
172
+
173
+ class ToolRateLimitedError(ToolProcessorError):
174
+ """Raised when a tool call is rate limited."""
175
+
176
+ def __init__(
177
+ self,
178
+ tool_name: str,
179
+ retry_after: float | None = None,
180
+ limit: int | None = None,
181
+ ):
182
+ self.tool_name = tool_name
183
+ self.retry_after = retry_after
184
+ self.limit = limit
185
+ message = f"Tool '{tool_name}' rate limited"
186
+ if retry_after:
187
+ message += f" (retry after {retry_after}s)"
188
+ super().__init__(
189
+ message,
190
+ code=ErrorCode.TOOL_RATE_LIMITED,
191
+ details={"tool_name": tool_name, "retry_after": retry_after, "limit": limit},
192
+ )
193
+
194
+
195
+ class ToolCircuitOpenError(ToolProcessorError):
196
+ """Raised when a tool circuit breaker is open."""
197
+
198
+ def __init__(
199
+ self,
200
+ tool_name: str,
201
+ failure_count: int,
202
+ reset_timeout: float | None = None,
203
+ ):
204
+ self.tool_name = tool_name
205
+ self.failure_count = failure_count
206
+ self.reset_timeout = reset_timeout
207
+ message = f"Tool '{tool_name}' circuit breaker is open (failures: {failure_count})"
208
+ if reset_timeout:
209
+ message += f" (reset in {reset_timeout}s)"
210
+ super().__init__(
211
+ message,
212
+ code=ErrorCode.TOOL_CIRCUIT_OPEN,
213
+ details={"tool_name": tool_name, "failure_count": failure_count, "reset_timeout": reset_timeout},
214
+ )
215
+
216
+
217
+ class MCPError(ToolProcessorError):
218
+ """Base class for MCP-related errors."""
219
+
220
+ def __init__(
221
+ self,
222
+ message: str,
223
+ code: ErrorCode,
224
+ server_name: str | None = None,
225
+ details: dict[str, Any] | None = None,
226
+ ):
227
+ error_details = details or {}
228
+ if server_name:
229
+ error_details["server_name"] = server_name
230
+ super().__init__(message, code=code, details=error_details)
231
+
232
+
233
+ class MCPConnectionError(MCPError):
234
+ """Raised when MCP connection fails."""
235
+
236
+ def __init__(self, server_name: str, reason: str | None = None):
237
+ message = f"Failed to connect to MCP server '{server_name}'"
238
+ if reason:
239
+ message += f": {reason}"
240
+ super().__init__(
241
+ message,
242
+ code=ErrorCode.MCP_CONNECTION_FAILED,
243
+ server_name=server_name,
244
+ details={"reason": reason} if reason else None,
245
+ )
246
+
247
+
248
+ class MCPTimeoutError(MCPError):
249
+ """Raised when MCP operation times out."""
250
+
251
+ def __init__(self, server_name: str, operation: str, timeout: float):
252
+ super().__init__(
253
+ f"MCP operation '{operation}' on server '{server_name}' timed out after {timeout}s",
254
+ code=ErrorCode.MCP_TIMEOUT,
255
+ server_name=server_name,
256
+ details={"operation": operation, "timeout": timeout},
257
+ )
@@ -18,6 +18,10 @@ from typing import Any
18
18
 
19
19
  from chuk_tool_processor.execution.strategies.inprocess_strategy import InProcessStrategy
20
20
  from chuk_tool_processor.execution.wrappers.caching import CachingToolExecutor, InMemoryCache
21
+ from chuk_tool_processor.execution.wrappers.circuit_breaker import (
22
+ CircuitBreakerConfig,
23
+ CircuitBreakerExecutor,
24
+ )
21
25
  from chuk_tool_processor.execution.wrappers.rate_limiting import RateLimitedToolExecutor, RateLimiter
22
26
  from chuk_tool_processor.execution.wrappers.retry import RetryableToolExecutor, RetryConfig
23
27
  from chuk_tool_processor.logging import get_logger, log_context_span, log_tool_call, metrics, request_logging
@@ -46,6 +50,10 @@ class ToolProcessor:
46
50
  tool_rate_limits: dict[str, tuple] | None = None,
47
51
  enable_retries: bool = True,
48
52
  max_retries: int = 3,
53
+ retry_config: RetryConfig | None = None,
54
+ enable_circuit_breaker: bool = False,
55
+ circuit_breaker_threshold: int = 5,
56
+ circuit_breaker_timeout: float = 60.0,
49
57
  parser_plugins: list[str] | None = None,
50
58
  ):
51
59
  """
@@ -63,6 +71,9 @@ class ToolProcessor:
63
71
  tool_rate_limits: Dict mapping tool names to (limit, period) tuples.
64
72
  enable_retries: Whether to enable automatic retries.
65
73
  max_retries: Maximum number of retry attempts.
74
+ enable_circuit_breaker: Whether to enable circuit breaker pattern.
75
+ circuit_breaker_threshold: Number of failures before opening circuit.
76
+ circuit_breaker_timeout: Seconds to wait before testing recovery.
66
77
  parser_plugins: List of parser plugin names to use.
67
78
  If None, uses all available parsers.
68
79
  """
@@ -80,6 +91,10 @@ class ToolProcessor:
80
91
  self.tool_rate_limits = tool_rate_limits
81
92
  self.enable_retries = enable_retries
82
93
  self.max_retries = max_retries
94
+ self.retry_config = retry_config
95
+ self.enable_circuit_breaker = enable_circuit_breaker
96
+ self.circuit_breaker_threshold = circuit_breaker_threshold
97
+ self.circuit_breaker_timeout = circuit_breaker_timeout
83
98
  self.parser_plugin_names = parser_plugins
84
99
 
85
100
  # Placeholder for initialized components
@@ -131,11 +146,25 @@ class ToolProcessor:
131
146
  executor = self.strategy
132
147
 
133
148
  # Apply wrappers in reverse order (innermost first)
149
+ # Circuit breaker goes innermost (closest to actual execution)
150
+ if self.enable_circuit_breaker:
151
+ self.logger.debug("Enabling circuit breaker")
152
+ circuit_config = CircuitBreakerConfig(
153
+ failure_threshold=self.circuit_breaker_threshold,
154
+ reset_timeout=self.circuit_breaker_timeout,
155
+ )
156
+ executor = CircuitBreakerExecutor(
157
+ executor=executor,
158
+ default_config=circuit_config,
159
+ )
160
+
134
161
  if self.enable_retries:
135
162
  self.logger.debug("Enabling retry logic")
163
+ # Use custom retry config if provided, otherwise create default
164
+ retry_cfg = self.retry_config or RetryConfig(max_retries=self.max_retries)
136
165
  executor = RetryableToolExecutor(
137
166
  executor=executor,
138
- default_config=RetryConfig(max_retries=self.max_retries),
167
+ default_config=retry_cfg,
139
168
  )
140
169
 
141
170
  if self.enable_rate_limiting:
@@ -0,0 +1,42 @@
1
+ # chuk_tool_processor/execution/wrappers/__init__.py
2
+ """Execution wrappers for adding production features to tool execution."""
3
+
4
+ from chuk_tool_processor.execution.wrappers.caching import (
5
+ CacheInterface,
6
+ CachingToolExecutor,
7
+ InMemoryCache,
8
+ cacheable,
9
+ )
10
+ from chuk_tool_processor.execution.wrappers.circuit_breaker import (
11
+ CircuitBreakerConfig,
12
+ CircuitBreakerExecutor,
13
+ CircuitState,
14
+ )
15
+ from chuk_tool_processor.execution.wrappers.rate_limiting import (
16
+ RateLimitedToolExecutor,
17
+ RateLimiter,
18
+ )
19
+ from chuk_tool_processor.execution.wrappers.retry import (
20
+ RetryableToolExecutor,
21
+ RetryConfig,
22
+ retryable,
23
+ )
24
+
25
+ __all__ = [
26
+ # Caching
27
+ "CacheInterface",
28
+ "CachingToolExecutor",
29
+ "InMemoryCache",
30
+ "cacheable",
31
+ # Circuit breaker
32
+ "CircuitBreakerConfig",
33
+ "CircuitBreakerExecutor",
34
+ "CircuitState",
35
+ # Rate limiting
36
+ "RateLimitedToolExecutor",
37
+ "RateLimiter",
38
+ # Retry
39
+ "RetryableToolExecutor",
40
+ "RetryConfig",
41
+ "retryable",
42
+ ]
@@ -29,6 +29,24 @@ from chuk_tool_processor.models.tool_result import ToolResult
29
29
 
30
30
  logger = get_logger("chuk_tool_processor.execution.wrappers.caching")
31
31
 
32
+ # Optional observability imports
33
+ try:
34
+ from chuk_tool_processor.observability.metrics import get_metrics
35
+ from chuk_tool_processor.observability.tracing import trace_cache_operation
36
+
37
+ _observability_available = True
38
+ except ImportError:
39
+ _observability_available = False
40
+
41
+ # No-op functions when observability not available
42
+ def get_metrics():
43
+ return None
44
+
45
+ def trace_cache_operation(*_args, **_kwargs):
46
+ from contextlib import nullcontext
47
+
48
+ return nullcontext()
49
+
32
50
 
33
51
  # --------------------------------------------------------------------------- #
34
52
  # Cache primitives
@@ -428,8 +446,17 @@ class CachingToolExecutor:
428
446
  uncached.append((idx, call))
429
447
  continue
430
448
 
431
- h = self._hash_arguments(call.arguments)
432
- cached_val = await self.cache.get(call.tool, h)
449
+ # Use idempotency_key if available, otherwise hash arguments
450
+ cache_key = call.idempotency_key or self._hash_arguments(call.arguments)
451
+
452
+ # Trace cache lookup operation
453
+ with trace_cache_operation("lookup", call.tool):
454
+ cached_val = await self.cache.get(call.tool, cache_key)
455
+
456
+ # Record metrics
457
+ metrics = get_metrics()
458
+ if metrics:
459
+ metrics.record_cache_operation(call.tool, "lookup", hit=(cached_val is not None))
433
460
 
434
461
  if cached_val is None:
435
462
  # Cache miss
@@ -480,19 +507,25 @@ class CachingToolExecutor:
480
507
  # ------------------------------------------------------------------
481
508
  if use_cache:
482
509
  cache_tasks = []
510
+ metrics = get_metrics()
511
+
483
512
  for (_idx, call), result in zip(uncached, uncached_results, strict=False):
484
513
  if result.error is None and self._is_cacheable(call.tool):
485
514
  ttl = self._ttl_for(call.tool)
486
515
  logger.debug(f"Caching result for {call.tool} with TTL={ttl}s")
487
516
 
488
- # Create task but don't await yet (for concurrent caching)
489
- task = self.cache.set(
490
- call.tool,
491
- self._hash_arguments(call.arguments),
492
- result.result,
493
- ttl=ttl,
494
- )
495
- cache_tasks.append(task)
517
+ # Use idempotency_key if available, otherwise hash arguments
518
+ cache_key = call.idempotency_key or self._hash_arguments(call.arguments)
519
+
520
+ # Trace and record cache set operation
521
+ # Bind loop variables to avoid B023 error
522
+ async def cache_with_trace(tool=call.tool, key=cache_key, value=result.result, ttl_val=ttl):
523
+ with trace_cache_operation("set", tool, attributes={"ttl": ttl_val}):
524
+ await self.cache.set(tool, key, value, ttl=ttl_val)
525
+ if metrics:
526
+ metrics.record_cache_operation(tool, "set")
527
+
528
+ cache_tasks.append(cache_with_trace())
496
529
 
497
530
  # Flag as non-cached so callers can tell
498
531
  if hasattr(result, "cached"):