chuk-tool-processor 0.4__tar.gz → 0.4.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of chuk-tool-processor might be problematic. Click here for more details.
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/PKG-INFO +1 -1
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/pyproject.toml +1 -1
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/core/processor.py +1 -1
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/execution/strategies/inprocess_strategy.py +1 -1
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/execution/wrappers/caching.py +3 -3
- chuk_tool_processor-0.4.1/src/chuk_tool_processor/execution/wrappers/retry.py +275 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/logging/context.py +6 -6
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/mcp/mcp_tool.py +48 -36
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/mcp/register_mcp_tools.py +3 -3
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/mcp/setup_mcp_sse.py +4 -4
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/mcp/setup_mcp_stdio.py +2 -2
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/mcp/stream_manager.py +6 -6
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/mcp/transport/base_transport.py +2 -2
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/mcp/transport/sse_transport.py +1 -1
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/mcp/transport/stdio_transport.py +2 -2
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/models/validated_tool.py +6 -6
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/plugins/discovery.py +3 -3
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/plugins/parsers/base.py +1 -1
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/plugins/parsers/xml_tool.py +2 -2
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/registry/auto_register.py +5 -5
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/registry/interface.py +2 -2
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/registry/providers/memory.py +2 -2
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/utils/validation.py +1 -1
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor.egg-info/PKG-INFO +1 -1
- chuk_tool_processor-0.4/src/chuk_tool_processor/execution/wrappers/retry.py +0 -286
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/README.md +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/setup.cfg +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/__init__.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/core/__init__.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/core/exceptions.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/execution/__init__.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/execution/strategies/__init__.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/execution/strategies/subprocess_strategy.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/execution/tool_executor.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/execution/wrappers/__init__.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/execution/wrappers/rate_limiting.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/logging/__init__.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/logging/formatter.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/logging/helpers.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/logging/metrics.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/mcp/__init__.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/mcp/transport/__init__.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/models/__init__.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/models/execution_strategy.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/models/streaming_tool.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/models/tool_call.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/models/tool_export_mixin.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/models/tool_result.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/plugins/__init__.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/plugins/parsers/__init__.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/plugins/parsers/function_call_tool.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/plugins/parsers/json_tool.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/plugins/parsers/openai_tool.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/registry/__init__.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/registry/decorators.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/registry/metadata.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/registry/provider.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/registry/providers/__init__.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/registry/tool_export.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/utils/__init__.py +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor.egg-info/SOURCES.txt +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor.egg-info/dependency_links.txt +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor.egg-info/requires.txt +0 -0
- {chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor.egg-info/top_level.txt +0 -0
{chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/core/processor.py
RENAMED
|
@@ -367,7 +367,7 @@ class ToolProcessor:
|
|
|
367
367
|
all_calls.extend(result)
|
|
368
368
|
|
|
369
369
|
# ------------------------------------------------------------------ #
|
|
370
|
-
# Remove duplicates
|
|
370
|
+
# Remove duplicates - use a stable digest instead of hashing a
|
|
371
371
|
# frozenset of argument items (which breaks on unhashable types).
|
|
372
372
|
# ------------------------------------------------------------------ #
|
|
373
373
|
def _args_digest(args: Dict[str, Any]) -> str:
|
|
@@ -393,7 +393,7 @@ class InProcessStrategy(ExecutionStrategy):
|
|
|
393
393
|
"""
|
|
394
394
|
Execute a single tool call with guaranteed timeout.
|
|
395
395
|
|
|
396
|
-
The entire invocation
|
|
396
|
+
The entire invocation - including argument validation - is wrapped
|
|
397
397
|
by the semaphore to honour *max_concurrency*.
|
|
398
398
|
|
|
399
399
|
Args:
|
|
@@ -4,9 +4,9 @@ Async-native caching wrapper for tool execution.
|
|
|
4
4
|
|
|
5
5
|
This module provides:
|
|
6
6
|
|
|
7
|
-
* **CacheInterface**
|
|
8
|
-
* **InMemoryCache**
|
|
9
|
-
* **CachingToolExecutor**
|
|
7
|
+
* **CacheInterface** - abstract async cache contract for custom implementations
|
|
8
|
+
* **InMemoryCache** - simple, thread-safe in-memory cache with TTL support
|
|
9
|
+
* **CachingToolExecutor** - executor wrapper that transparently caches results
|
|
10
10
|
|
|
11
11
|
Results retrieved from cache are marked with `cached=True` and `machine="cache"`
|
|
12
12
|
for easy detection.
|
|
@@ -0,0 +1,275 @@
|
|
|
1
|
+
# chuk_tool_processor/execution/wrappers/retry.py
|
|
2
|
+
"""
|
|
3
|
+
Async-native retry wrapper for tool execution.
|
|
4
|
+
|
|
5
|
+
Adds exponential–back-off retry logic and *deadline-aware* timeout handling so a
|
|
6
|
+
`timeout=` passed by callers is treated as the **total wall-clock budget** for
|
|
7
|
+
all attempts of a single tool call.
|
|
8
|
+
"""
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import asyncio
|
|
12
|
+
import random
|
|
13
|
+
import time
|
|
14
|
+
from datetime import datetime, timezone
|
|
15
|
+
from typing import Any, Dict, List, Optional, Type
|
|
16
|
+
|
|
17
|
+
from chuk_tool_processor.logging import get_logger
|
|
18
|
+
from chuk_tool_processor.models.tool_call import ToolCall
|
|
19
|
+
from chuk_tool_processor.models.tool_result import ToolResult
|
|
20
|
+
|
|
21
|
+
logger = get_logger("chuk_tool_processor.execution.wrappers.retry")
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
# --------------------------------------------------------------------------- #
|
|
25
|
+
# Retry configuration
|
|
26
|
+
# --------------------------------------------------------------------------- #
|
|
27
|
+
class RetryConfig:
|
|
28
|
+
"""Configuration object that decides *whether* and *when* to retry."""
|
|
29
|
+
|
|
30
|
+
def __init__(
|
|
31
|
+
self,
|
|
32
|
+
max_retries: int = 3,
|
|
33
|
+
base_delay: float = 1.0,
|
|
34
|
+
max_delay: float = 60.0,
|
|
35
|
+
jitter: bool = True,
|
|
36
|
+
retry_on_exceptions: Optional[List[Type[Exception]]] = None,
|
|
37
|
+
retry_on_error_substrings: Optional[List[str]] = None,
|
|
38
|
+
):
|
|
39
|
+
if max_retries < 0:
|
|
40
|
+
raise ValueError("max_retries cannot be negative")
|
|
41
|
+
self.max_retries = max_retries
|
|
42
|
+
self.base_delay = base_delay
|
|
43
|
+
self.max_delay = max_delay
|
|
44
|
+
self.jitter = jitter
|
|
45
|
+
self.retry_on_exceptions = retry_on_exceptions or []
|
|
46
|
+
self.retry_on_error_substrings = retry_on_error_substrings or []
|
|
47
|
+
|
|
48
|
+
# --------------------------------------------------------------------- #
|
|
49
|
+
# Decision helpers
|
|
50
|
+
# --------------------------------------------------------------------- #
|
|
51
|
+
def should_retry( # noqa: D401 (imperative mood is fine)
|
|
52
|
+
self,
|
|
53
|
+
attempt: int,
|
|
54
|
+
*,
|
|
55
|
+
error: Optional[Exception] = None,
|
|
56
|
+
error_str: Optional[str] = None,
|
|
57
|
+
) -> bool:
|
|
58
|
+
"""Return *True* iff another retry is allowed for this attempt."""
|
|
59
|
+
if attempt >= self.max_retries:
|
|
60
|
+
return False
|
|
61
|
+
|
|
62
|
+
# Nothing specified → always retry until max_retries reached
|
|
63
|
+
if not self.retry_on_exceptions and not self.retry_on_error_substrings:
|
|
64
|
+
return True
|
|
65
|
+
|
|
66
|
+
if error is not None and any(isinstance(error, exc) for exc in self.retry_on_exceptions):
|
|
67
|
+
return True
|
|
68
|
+
|
|
69
|
+
if error_str and any(substr in error_str for substr in self.retry_on_error_substrings):
|
|
70
|
+
return True
|
|
71
|
+
|
|
72
|
+
return False
|
|
73
|
+
|
|
74
|
+
# --------------------------------------------------------------------- #
|
|
75
|
+
# Back-off
|
|
76
|
+
# --------------------------------------------------------------------- #
|
|
77
|
+
def get_delay(self, attempt: int) -> float:
|
|
78
|
+
"""Exponential back-off delay for *attempt* (0-based)."""
|
|
79
|
+
delay = min(self.base_delay * (2 ** attempt), self.max_delay)
|
|
80
|
+
if self.jitter:
|
|
81
|
+
delay *= 0.5 + random.random() # jitter in [0.5, 1.5)
|
|
82
|
+
return delay
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
# --------------------------------------------------------------------------- #
|
|
86
|
+
# Retryable executor
|
|
87
|
+
# --------------------------------------------------------------------------- #
|
|
88
|
+
class RetryableToolExecutor:
|
|
89
|
+
"""
|
|
90
|
+
Wraps another executor and re-invokes it according to a :class:`RetryConfig`.
|
|
91
|
+
"""
|
|
92
|
+
|
|
93
|
+
def __init__(
|
|
94
|
+
self,
|
|
95
|
+
executor: Any,
|
|
96
|
+
*,
|
|
97
|
+
default_config: Optional[RetryConfig] = None,
|
|
98
|
+
tool_configs: Optional[Dict[str, RetryConfig]] = None,
|
|
99
|
+
):
|
|
100
|
+
self.executor = executor
|
|
101
|
+
self.default_config = default_config or RetryConfig()
|
|
102
|
+
self.tool_configs = tool_configs or {}
|
|
103
|
+
|
|
104
|
+
# --------------------------------------------------------------------- #
|
|
105
|
+
# Public helpers
|
|
106
|
+
# --------------------------------------------------------------------- #
|
|
107
|
+
def _config_for(self, tool: str) -> RetryConfig:
|
|
108
|
+
return self.tool_configs.get(tool, self.default_config)
|
|
109
|
+
|
|
110
|
+
async def execute(
|
|
111
|
+
self,
|
|
112
|
+
calls: List[ToolCall],
|
|
113
|
+
*,
|
|
114
|
+
timeout: Optional[float] = None,
|
|
115
|
+
use_cache: bool = True,
|
|
116
|
+
) -> List[ToolResult]:
|
|
117
|
+
if not calls:
|
|
118
|
+
return []
|
|
119
|
+
|
|
120
|
+
out: List[ToolResult] = []
|
|
121
|
+
for call in calls:
|
|
122
|
+
cfg = self._config_for(call.tool)
|
|
123
|
+
out.append(await self._execute_single(call, cfg, timeout, use_cache))
|
|
124
|
+
return out
|
|
125
|
+
|
|
126
|
+
# --------------------------------------------------------------------- #
|
|
127
|
+
# Core retry loop (per call)
|
|
128
|
+
# --------------------------------------------------------------------- #
|
|
129
|
+
async def _execute_single(
|
|
130
|
+
self,
|
|
131
|
+
call: ToolCall,
|
|
132
|
+
cfg: RetryConfig,
|
|
133
|
+
timeout: Optional[float],
|
|
134
|
+
use_cache: bool,
|
|
135
|
+
) -> ToolResult:
|
|
136
|
+
attempt = 0
|
|
137
|
+
last_error: Optional[str] = None
|
|
138
|
+
pid = 0
|
|
139
|
+
machine = "unknown"
|
|
140
|
+
|
|
141
|
+
# ---------------------------------------------------------------- #
|
|
142
|
+
# Deadline budget (wall-clock)
|
|
143
|
+
# ---------------------------------------------------------------- #
|
|
144
|
+
deadline = None
|
|
145
|
+
if timeout is not None:
|
|
146
|
+
deadline = time.monotonic() + timeout
|
|
147
|
+
|
|
148
|
+
while True:
|
|
149
|
+
# ---------------------------------------------------------------- #
|
|
150
|
+
# Check whether we have any time left *before* trying the call
|
|
151
|
+
# ---------------------------------------------------------------- #
|
|
152
|
+
if deadline is not None:
|
|
153
|
+
remaining = deadline - time.monotonic()
|
|
154
|
+
if remaining <= 0:
|
|
155
|
+
return ToolResult(
|
|
156
|
+
tool=call.tool,
|
|
157
|
+
result=None,
|
|
158
|
+
error=f"Timeout after {timeout}s",
|
|
159
|
+
start_time=datetime.now(timezone.utc),
|
|
160
|
+
end_time=datetime.now(timezone.utc),
|
|
161
|
+
machine=machine,
|
|
162
|
+
pid=pid,
|
|
163
|
+
attempts=attempt,
|
|
164
|
+
)
|
|
165
|
+
else:
|
|
166
|
+
remaining = None # unlimited
|
|
167
|
+
|
|
168
|
+
# ---------------------------------------------------------------- #
|
|
169
|
+
# Execute one attempt
|
|
170
|
+
# ---------------------------------------------------------------- #
|
|
171
|
+
start_time = datetime.now(timezone.utc)
|
|
172
|
+
try:
|
|
173
|
+
kwargs = {"timeout": remaining} if remaining is not None else {}
|
|
174
|
+
if hasattr(self.executor, "use_cache"):
|
|
175
|
+
kwargs["use_cache"] = use_cache
|
|
176
|
+
|
|
177
|
+
result = (await self.executor.execute([call], **kwargs))[0]
|
|
178
|
+
pid = result.pid
|
|
179
|
+
machine = result.machine
|
|
180
|
+
|
|
181
|
+
# Success?
|
|
182
|
+
if not result.error:
|
|
183
|
+
result.attempts = attempt + 1
|
|
184
|
+
return result
|
|
185
|
+
|
|
186
|
+
# Error: decide on retry
|
|
187
|
+
last_error = result.error
|
|
188
|
+
if cfg.should_retry(attempt, error_str=result.error):
|
|
189
|
+
delay = cfg.get_delay(attempt)
|
|
190
|
+
# never overshoot the deadline
|
|
191
|
+
if deadline is not None:
|
|
192
|
+
delay = min(delay, max(deadline - time.monotonic(), 0))
|
|
193
|
+
if delay:
|
|
194
|
+
await asyncio.sleep(delay)
|
|
195
|
+
attempt += 1
|
|
196
|
+
continue
|
|
197
|
+
|
|
198
|
+
# No more retries wanted
|
|
199
|
+
result.error = self._wrap_error(last_error, attempt, cfg)
|
|
200
|
+
result.attempts = attempt + 1
|
|
201
|
+
return result
|
|
202
|
+
|
|
203
|
+
# ---------------------------------------------------------------- #
|
|
204
|
+
# Exception path
|
|
205
|
+
# ---------------------------------------------------------------- #
|
|
206
|
+
except Exception as exc: # noqa: BLE001
|
|
207
|
+
err_str = str(exc)
|
|
208
|
+
last_error = err_str
|
|
209
|
+
if cfg.should_retry(attempt, error=exc):
|
|
210
|
+
delay = cfg.get_delay(attempt)
|
|
211
|
+
if deadline is not None:
|
|
212
|
+
delay = min(delay, max(deadline - time.monotonic(), 0))
|
|
213
|
+
if delay:
|
|
214
|
+
await asyncio.sleep(delay)
|
|
215
|
+
attempt += 1
|
|
216
|
+
continue
|
|
217
|
+
|
|
218
|
+
end_time = datetime.now(timezone.utc)
|
|
219
|
+
return ToolResult(
|
|
220
|
+
tool=call.tool,
|
|
221
|
+
result=None,
|
|
222
|
+
error=self._wrap_error(err_str, attempt, cfg),
|
|
223
|
+
start_time=start_time,
|
|
224
|
+
end_time=end_time,
|
|
225
|
+
machine=machine,
|
|
226
|
+
pid=pid,
|
|
227
|
+
attempts=attempt + 1,
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
# --------------------------------------------------------------------- #
|
|
231
|
+
# Helpers
|
|
232
|
+
# --------------------------------------------------------------------- #
|
|
233
|
+
@staticmethod
|
|
234
|
+
def _wrap_error(err: str, attempt: int, cfg: RetryConfig) -> str:
|
|
235
|
+
if attempt >= cfg.max_retries and attempt > 0:
|
|
236
|
+
return f"Max retries reached ({cfg.max_retries}): {err}"
|
|
237
|
+
return err
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
# --------------------------------------------------------------------------- #
|
|
241
|
+
# Decorator helper
|
|
242
|
+
# --------------------------------------------------------------------------- #
|
|
243
|
+
def retryable(
|
|
244
|
+
*,
|
|
245
|
+
max_retries: int = 3,
|
|
246
|
+
base_delay: float = 1.0,
|
|
247
|
+
max_delay: float = 60.0,
|
|
248
|
+
jitter: bool = True,
|
|
249
|
+
retry_on_exceptions: Optional[List[Type[Exception]]] = None,
|
|
250
|
+
retry_on_error_substrings: Optional[List[str]] = None,
|
|
251
|
+
):
|
|
252
|
+
"""
|
|
253
|
+
Class decorator that attaches a :class:`RetryConfig` to a *tool* class.
|
|
254
|
+
|
|
255
|
+
Example
|
|
256
|
+
-------
|
|
257
|
+
```python
|
|
258
|
+
@retryable(max_retries=5, base_delay=0.5)
|
|
259
|
+
class MyTool:
|
|
260
|
+
...
|
|
261
|
+
```
|
|
262
|
+
"""
|
|
263
|
+
|
|
264
|
+
def _decorator(cls):
|
|
265
|
+
cls._retry_config = RetryConfig(
|
|
266
|
+
max_retries=max_retries,
|
|
267
|
+
base_delay=base_delay,
|
|
268
|
+
max_delay=max_delay,
|
|
269
|
+
jitter=jitter,
|
|
270
|
+
retry_on_exceptions=retry_on_exceptions,
|
|
271
|
+
retry_on_error_substrings=retry_on_error_substrings,
|
|
272
|
+
)
|
|
273
|
+
return cls
|
|
274
|
+
|
|
275
|
+
return _decorator
|
{chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/logging/context.py
RENAMED
|
@@ -4,12 +4,12 @@ Async-safe context management for structured logging.
|
|
|
4
4
|
|
|
5
5
|
This module provides:
|
|
6
6
|
|
|
7
|
-
* **LogContext**
|
|
7
|
+
* **LogContext** - an `asyncio`-aware container that keeps a per-task dict of
|
|
8
8
|
contextual data (request IDs, span IDs, arbitrary metadata, …).
|
|
9
|
-
* **log_context**
|
|
10
|
-
* **StructuredAdapter**
|
|
9
|
+
* **log_context** - a global instance of `LogContext` for convenience.
|
|
10
|
+
* **StructuredAdapter** - a `logging.LoggerAdapter` that injects the current
|
|
11
11
|
`log_context.context` into every log record.
|
|
12
|
-
* **get_logger**
|
|
12
|
+
* **get_logger** - helper that returns a configured `StructuredAdapter`.
|
|
13
13
|
"""
|
|
14
14
|
|
|
15
15
|
from __future__ import annotations
|
|
@@ -75,7 +75,7 @@ class LogContext:
|
|
|
75
75
|
Async-safe context container.
|
|
76
76
|
|
|
77
77
|
Holds a mutable dict that is *local* to the current asyncio task, so
|
|
78
|
-
concurrent coroutines don
|
|
78
|
+
concurrent coroutines don't interfere with each other.
|
|
79
79
|
"""
|
|
80
80
|
|
|
81
81
|
# ------------------------------------------------------------------ #
|
|
@@ -196,7 +196,7 @@ class StructuredAdapter(logging.LoggerAdapter):
|
|
|
196
196
|
"""
|
|
197
197
|
|
|
198
198
|
# --------------------------- core hook -------------------------------- #
|
|
199
|
-
def process(self, msg, kwargs): # noqa: D401
|
|
199
|
+
def process(self, msg, kwargs): # noqa: D401 - keep signature from base
|
|
200
200
|
kwargs = kwargs or {}
|
|
201
201
|
extra = kwargs.get("extra", {}).copy()
|
|
202
202
|
ctx = log_context.context
|
{chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/mcp/mcp_tool.py
RENAMED
|
@@ -36,11 +36,11 @@ class MCPTool:
|
|
|
36
36
|
servers: Optional[List[str]] = None,
|
|
37
37
|
server_names: Optional[Dict[int, str]] = None,
|
|
38
38
|
namespace: str = "stdio",
|
|
39
|
-
default_timeout: Optional[float] = None
|
|
39
|
+
default_timeout: Optional[float] = None
|
|
40
40
|
) -> None:
|
|
41
41
|
self.tool_name = tool_name
|
|
42
42
|
self._sm: Optional[StreamManager] = stream_manager
|
|
43
|
-
self.default_timeout = default_timeout
|
|
43
|
+
self.default_timeout = default_timeout
|
|
44
44
|
|
|
45
45
|
# Boot-strap parameters (only needed if _sm is None)
|
|
46
46
|
self._cfg_file = cfg_file
|
|
@@ -79,55 +79,67 @@ class MCPTool:
|
|
|
79
79
|
|
|
80
80
|
return self._sm # type: ignore[return-value]
|
|
81
81
|
|
|
82
|
-
# ------------------------------------------------------------------ #
|
|
83
82
|
async def execute(self, timeout: Optional[float] = None, **kwargs: Any) -> Any:
|
|
84
83
|
"""
|
|
85
|
-
|
|
84
|
+
Invoke the remote MCP tool, guaranteeing that *one* timeout is enforced.
|
|
86
85
|
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
86
|
+
Parameters
|
|
87
|
+
----------
|
|
88
|
+
timeout : float | None
|
|
89
|
+
If provided, forward this to StreamManager. Otherwise fall back
|
|
90
|
+
to ``self.default_timeout``.
|
|
91
|
+
**kwargs
|
|
92
|
+
Arguments forwarded to the tool.
|
|
91
93
|
|
|
92
|
-
Returns
|
|
93
|
-
|
|
94
|
+
Returns
|
|
95
|
+
-------
|
|
96
|
+
Any
|
|
97
|
+
The ``content`` of the remote tool response.
|
|
94
98
|
|
|
95
99
|
Raises
|
|
96
100
|
------
|
|
97
101
|
RuntimeError
|
|
98
|
-
|
|
102
|
+
The remote tool returned an error payload.
|
|
99
103
|
asyncio.TimeoutError
|
|
100
|
-
|
|
104
|
+
The call exceeded the chosen timeout.
|
|
101
105
|
"""
|
|
102
106
|
sm = await self._ensure_stream_manager()
|
|
103
|
-
|
|
104
|
-
#
|
|
105
|
-
effective_timeout =
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
107
|
+
|
|
108
|
+
# Pick the timeout we will enforce (may be None = no limit).
|
|
109
|
+
effective_timeout: Optional[float] = (
|
|
110
|
+
timeout if timeout is not None else self.default_timeout
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
call_kwargs: dict[str, Any] = {
|
|
114
|
+
"tool_name": self.tool_name,
|
|
115
|
+
"arguments": kwargs,
|
|
116
|
+
}
|
|
117
|
+
if effective_timeout is not None:
|
|
118
|
+
call_kwargs["timeout"] = effective_timeout
|
|
119
|
+
logger.debug(
|
|
120
|
+
"Forwarding timeout=%ss to StreamManager for tool '%s'",
|
|
121
|
+
effective_timeout,
|
|
122
|
+
self.tool_name,
|
|
115
123
|
)
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
logger.error("Remote MCP error from '%s': %s", self.tool_name, err)
|
|
120
|
-
raise RuntimeError(err)
|
|
121
|
-
|
|
122
|
-
return result.get("content")
|
|
123
|
-
|
|
124
|
+
|
|
125
|
+
try:
|
|
126
|
+
result = await sm.call_tool(**call_kwargs)
|
|
124
127
|
except asyncio.TimeoutError:
|
|
125
|
-
logger.warning(
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
128
|
+
logger.warning(
|
|
129
|
+
"MCP tool '%s' timed out after %ss",
|
|
130
|
+
self.tool_name,
|
|
131
|
+
effective_timeout,
|
|
132
|
+
)
|
|
129
133
|
raise
|
|
130
134
|
|
|
135
|
+
if result.get("isError"):
|
|
136
|
+
err = result.get("error", "Unknown error")
|
|
137
|
+
logger.error("Remote MCP error from '%s': %s", self.tool_name, err)
|
|
138
|
+
raise RuntimeError(err)
|
|
139
|
+
|
|
140
|
+
return result.get("content")
|
|
141
|
+
|
|
142
|
+
|
|
131
143
|
# ------------------------------------------------------------------ #
|
|
132
144
|
# Legacy method name support
|
|
133
145
|
async def _aexecute(self, timeout: Optional[float] = None, **kwargs: Any) -> Any:
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
Discover the remote MCP tools exposed by a :class:`~chuk_tool_processor.mcp.stream_manager.StreamManager`
|
|
5
5
|
instance and register them in the local CHUK registry.
|
|
6
6
|
|
|
7
|
-
The helper is now **async-native**
|
|
7
|
+
The helper is now **async-native** - call it with ``await``.
|
|
8
8
|
"""
|
|
9
9
|
|
|
10
10
|
from __future__ import annotations
|
|
@@ -55,7 +55,7 @@ async def register_mcp_tools(
|
|
|
55
55
|
for tool_def in mcp_tools:
|
|
56
56
|
tool_name = tool_def.get("name")
|
|
57
57
|
if not tool_name:
|
|
58
|
-
logger.warning("Remote tool definition without a 'name' field
|
|
58
|
+
logger.warning("Remote tool definition without a 'name' field - skipped")
|
|
59
59
|
continue
|
|
60
60
|
|
|
61
61
|
description = tool_def.get("description") or f"MCP tool • {tool_name}"
|
|
@@ -96,5 +96,5 @@ async def register_mcp_tools(
|
|
|
96
96
|
except Exception as exc: # noqa: BLE001
|
|
97
97
|
logger.error("Failed to register MCP tool '%s': %s", tool_name, exc)
|
|
98
98
|
|
|
99
|
-
logger.info("MCP registration complete
|
|
99
|
+
logger.info("MCP registration complete - %d tool(s) available", len(registered))
|
|
100
100
|
return registered
|
{chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/mcp/setup_mcp_sse.py
RENAMED
|
@@ -8,7 +8,7 @@ Utility that wires up:
|
|
|
8
8
|
2. The remote MCP tools exposed by that manager (via
|
|
9
9
|
:pyfunc:`~chuk_tool_processor.mcp.register_mcp_tools.register_mcp_tools`).
|
|
10
10
|
3. A fully-featured :class:`~chuk_tool_processor.core.processor.ToolProcessor`
|
|
11
|
-
instance that can execute those tools
|
|
11
|
+
instance that can execute those tools - with optional caching,
|
|
12
12
|
rate-limiting, retries, etc.
|
|
13
13
|
"""
|
|
14
14
|
|
|
@@ -28,7 +28,7 @@ logger = get_logger("chuk_tool_processor.mcp.setup_sse")
|
|
|
28
28
|
# --------------------------------------------------------------------------- #
|
|
29
29
|
# public helper
|
|
30
30
|
# --------------------------------------------------------------------------- #
|
|
31
|
-
async def setup_mcp_sse( # noqa: C901
|
|
31
|
+
async def setup_mcp_sse( # noqa: C901 - long, but just a config wrapper
|
|
32
32
|
*,
|
|
33
33
|
servers: List[Dict[str, str]],
|
|
34
34
|
server_names: Optional[Dict[int, str]] = None,
|
|
@@ -47,7 +47,7 @@ async def setup_mcp_sse( # noqa: C901 – long, but just a config wrapper
|
|
|
47
47
|
Spin up an SSE-backed *StreamManager*, register all its remote tools,
|
|
48
48
|
and return a ready-to-go :class:`ToolProcessor`.
|
|
49
49
|
|
|
50
|
-
Everything is **async-native**
|
|
50
|
+
Everything is **async-native** - call with ``await``.
|
|
51
51
|
|
|
52
52
|
NEW: Automatically detects and adds bearer token from MCP_BEARER_TOKEN
|
|
53
53
|
environment variable if not explicitly provided in server config.
|
|
@@ -91,7 +91,7 @@ async def setup_mcp_sse( # noqa: C901 – long, but just a config wrapper
|
|
|
91
91
|
)
|
|
92
92
|
|
|
93
93
|
logger.info(
|
|
94
|
-
"MCP (SSE) initialised
|
|
94
|
+
"MCP (SSE) initialised - %s tool%s registered into namespace '%s'",
|
|
95
95
|
len(registered),
|
|
96
96
|
"" if len(registered) == 1 else "s",
|
|
97
97
|
namespace,
|
{chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/mcp/setup_mcp_stdio.py
RENAMED
|
@@ -26,7 +26,7 @@ logger = get_logger("chuk_tool_processor.mcp.setup_stdio")
|
|
|
26
26
|
# --------------------------------------------------------------------------- #
|
|
27
27
|
# public helper
|
|
28
28
|
# --------------------------------------------------------------------------- #
|
|
29
|
-
async def setup_mcp_stdio( # noqa: C901
|
|
29
|
+
async def setup_mcp_stdio( # noqa: C901 - long but just a config facade
|
|
30
30
|
*,
|
|
31
31
|
config_file: str,
|
|
32
32
|
servers: List[str],
|
|
@@ -72,7 +72,7 @@ async def setup_mcp_stdio( # noqa: C901 – long but just a config facade
|
|
|
72
72
|
)
|
|
73
73
|
|
|
74
74
|
logger.info(
|
|
75
|
-
"MCP (stdio) initialised
|
|
75
|
+
"MCP (stdio) initialised - %s tool%s registered into namespace '%s'",
|
|
76
76
|
len(registered),
|
|
77
77
|
"" if len(registered) == 1 else "s",
|
|
78
78
|
namespace,
|
{chuk_tool_processor-0.4 → chuk_tool_processor-0.4.1}/src/chuk_tool_processor/mcp/stream_manager.py
RENAMED
|
@@ -77,7 +77,7 @@ class StreamManager:
|
|
|
77
77
|
return inst
|
|
78
78
|
|
|
79
79
|
# ------------------------------------------------------------------ #
|
|
80
|
-
# initialisation
|
|
80
|
+
# initialisation - stdio / sse #
|
|
81
81
|
# ------------------------------------------------------------------ #
|
|
82
82
|
async def initialize(
|
|
83
83
|
self,
|
|
@@ -143,12 +143,12 @@ class StreamManager:
|
|
|
143
143
|
"status": status,
|
|
144
144
|
}
|
|
145
145
|
)
|
|
146
|
-
logger.info("Initialised %s
|
|
146
|
+
logger.info("Initialised %s - %d tool(s)", server_name, len(tools))
|
|
147
147
|
except Exception as exc: # noqa: BLE001
|
|
148
148
|
logger.error("Error initialising %s: %s", server_name, exc)
|
|
149
149
|
|
|
150
150
|
logger.info(
|
|
151
|
-
"StreamManager ready
|
|
151
|
+
"StreamManager ready - %d server(s), %d tool(s)",
|
|
152
152
|
len(self.transports),
|
|
153
153
|
len(self.all_tools),
|
|
154
154
|
)
|
|
@@ -194,12 +194,12 @@ class StreamManager:
|
|
|
194
194
|
self.server_info.append(
|
|
195
195
|
{"id": idx, "name": name, "tools": len(tools), "status": status}
|
|
196
196
|
)
|
|
197
|
-
logger.info("Initialised SSE %s
|
|
197
|
+
logger.info("Initialised SSE %s - %d tool(s)", name, len(tools))
|
|
198
198
|
except Exception as exc: # noqa: BLE001
|
|
199
199
|
logger.error("Error initialising SSE %s: %s", name, exc)
|
|
200
200
|
|
|
201
201
|
logger.info(
|
|
202
|
-
"StreamManager ready
|
|
202
|
+
"StreamManager ready - %d SSE server(s), %d tool(s)",
|
|
203
203
|
len(self.transports),
|
|
204
204
|
len(self.all_tools),
|
|
205
205
|
)
|
|
@@ -245,7 +245,7 @@ class StreamManager:
|
|
|
245
245
|
return []
|
|
246
246
|
|
|
247
247
|
# ------------------------------------------------------------------ #
|
|
248
|
-
# EXTRA HELPERS
|
|
248
|
+
# EXTRA HELPERS - ping / resources / prompts #
|
|
249
249
|
# ------------------------------------------------------------------ #
|
|
250
250
|
async def ping_servers(self) -> List[Dict[str, Any]]:
|
|
251
251
|
async def _ping_one(name: str, tr: MCPBaseTransport):
|
|
@@ -73,7 +73,7 @@ class MCPBaseTransport(ABC):
|
|
|
73
73
|
@abstractmethod
|
|
74
74
|
async def list_resources(self) -> Dict[str, Any]:
|
|
75
75
|
"""
|
|
76
|
-
Retrieve the server
|
|
76
|
+
Retrieve the server's resources catalogue.
|
|
77
77
|
|
|
78
78
|
Expected shape::
|
|
79
79
|
{ "resources": [ {...}, ... ], "nextCursor": "…", … }
|
|
@@ -83,7 +83,7 @@ class MCPBaseTransport(ABC):
|
|
|
83
83
|
@abstractmethod
|
|
84
84
|
async def list_prompts(self) -> Dict[str, Any]:
|
|
85
85
|
"""
|
|
86
|
-
Retrieve the server
|
|
86
|
+
Retrieve the server's prompt catalogue.
|
|
87
87
|
|
|
88
88
|
Expected shape::
|
|
89
89
|
{ "prompts": [ {...}, ... ], "nextCursor": "…", … }
|
|
@@ -351,7 +351,7 @@ class SSETransport(MCPBaseTransport):
|
|
|
351
351
|
"""
|
|
352
352
|
# NEW: Ensure initialization before tool calls
|
|
353
353
|
if not self._initialized.is_set():
|
|
354
|
-
return {"isError": True, "error": "
|
|
354
|
+
return {"isError": True, "error": "SSE transport not implemented"}
|
|
355
355
|
|
|
356
356
|
if not self._message_url:
|
|
357
357
|
return {"isError": True, "error": "No message endpoint available"}
|
|
@@ -131,7 +131,7 @@ class StdioTransport(MCPBaseTransport):
|
|
|
131
131
|
def get_streams(self):
|
|
132
132
|
"""
|
|
133
133
|
Expose the low-level streams so legacy callers can access them
|
|
134
|
-
directly. The base-class
|
|
134
|
+
directly. The base-class' default returns an empty list; here we
|
|
135
135
|
return a single-element list when the transport is active.
|
|
136
136
|
"""
|
|
137
137
|
if self.read_stream and self.write_stream:
|
|
@@ -145,7 +145,7 @@ class StdioTransport(MCPBaseTransport):
|
|
|
145
145
|
self, tool_name: str, arguments: Dict[str, Any]
|
|
146
146
|
) -> Dict[str, Any]:
|
|
147
147
|
"""
|
|
148
|
-
Execute *tool_name* with *arguments* and normalise the server
|
|
148
|
+
Execute *tool_name* with *arguments* and normalise the server's reply.
|
|
149
149
|
|
|
150
150
|
The echo-server often returns:
|
|
151
151
|
{
|