chuk-tool-processor 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of chuk-tool-processor might be problematic. Click here for more details.
- chuk_tool_processor/__init__.py +1 -0
- chuk_tool_processor/core/__init__.py +1 -0
- chuk_tool_processor/core/exceptions.py +45 -0
- chuk_tool_processor/core/processor.py +268 -0
- chuk_tool_processor/execution/__init__.py +0 -0
- chuk_tool_processor/execution/strategies/__init__.py +0 -0
- chuk_tool_processor/execution/strategies/inprocess_strategy.py +206 -0
- chuk_tool_processor/execution/strategies/subprocess_strategy.py +103 -0
- chuk_tool_processor/execution/tool_executor.py +46 -0
- chuk_tool_processor/execution/wrappers/__init__.py +0 -0
- chuk_tool_processor/execution/wrappers/caching.py +234 -0
- chuk_tool_processor/execution/wrappers/rate_limiting.py +149 -0
- chuk_tool_processor/execution/wrappers/retry.py +176 -0
- chuk_tool_processor/models/__init__.py +1 -0
- chuk_tool_processor/models/execution_strategy.py +19 -0
- chuk_tool_processor/models/tool_call.py +7 -0
- chuk_tool_processor/models/tool_result.py +49 -0
- chuk_tool_processor/plugins/__init__.py +1 -0
- chuk_tool_processor/plugins/discovery.py +205 -0
- chuk_tool_processor/plugins/parsers/__init__.py +1 -0
- chuk_tool_processor/plugins/parsers/function_call_tool.py +105 -0
- chuk_tool_processor/plugins/parsers/json_tool.py +17 -0
- chuk_tool_processor/plugins/parsers/xml_tool.py +41 -0
- chuk_tool_processor/registry/__init__.py +20 -0
- chuk_tool_processor/registry/decorators.py +42 -0
- chuk_tool_processor/registry/interface.py +79 -0
- chuk_tool_processor/registry/metadata.py +36 -0
- chuk_tool_processor/registry/provider.py +44 -0
- chuk_tool_processor/registry/providers/__init__.py +41 -0
- chuk_tool_processor/registry/providers/memory.py +165 -0
- chuk_tool_processor/utils/__init__.py +0 -0
- chuk_tool_processor/utils/logging.py +260 -0
- chuk_tool_processor/utils/validation.py +192 -0
- chuk_tool_processor-0.1.0.dist-info/METADATA +293 -0
- chuk_tool_processor-0.1.0.dist-info/RECORD +37 -0
- chuk_tool_processor-0.1.0.dist-info/WHEEL +5 -0
- chuk_tool_processor-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
# chuk_tool_processor/execution/wrappers/caching.py
|
|
2
|
+
import asyncio
|
|
3
|
+
import hashlib
|
|
4
|
+
import json
|
|
5
|
+
import time
|
|
6
|
+
from abc import ABC, abstractmethod
|
|
7
|
+
from datetime import datetime, timedelta
|
|
8
|
+
from functools import wraps
|
|
9
|
+
from typing import Any, Dict, Optional, Tuple, List, Callable
|
|
10
|
+
from pydantic import BaseModel
|
|
11
|
+
|
|
12
|
+
# imports
|
|
13
|
+
from chuk_tool_processor.models.tool_call import ToolCall
|
|
14
|
+
from chuk_tool_processor.models.tool_result import ToolResult
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class CacheEntry(BaseModel):
|
|
18
|
+
"""
|
|
19
|
+
Entry in the tool result cache.
|
|
20
|
+
"""
|
|
21
|
+
tool: str
|
|
22
|
+
arguments_hash: str
|
|
23
|
+
result: Any
|
|
24
|
+
created_at: datetime
|
|
25
|
+
expires_at: Optional[datetime] = None
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class CacheInterface(ABC):
|
|
29
|
+
"""
|
|
30
|
+
Abstract interface for cache implementations.
|
|
31
|
+
"""
|
|
32
|
+
@abstractmethod
|
|
33
|
+
async def get(self, tool: str, arguments_hash: str) -> Optional[Any]:
|
|
34
|
+
"""
|
|
35
|
+
Get a cached result for a tool with given arguments hash.
|
|
36
|
+
"""
|
|
37
|
+
pass
|
|
38
|
+
|
|
39
|
+
@abstractmethod
|
|
40
|
+
async def set(
|
|
41
|
+
self,
|
|
42
|
+
tool: str,
|
|
43
|
+
arguments_hash: str,
|
|
44
|
+
result: Any,
|
|
45
|
+
ttl: Optional[int] = None
|
|
46
|
+
) -> None:
|
|
47
|
+
"""
|
|
48
|
+
Set a cached result for a tool with given arguments hash.
|
|
49
|
+
"""
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
@abstractmethod
|
|
53
|
+
async def invalidate(self, tool: str, arguments_hash: Optional[str] = None) -> None:
|
|
54
|
+
"""
|
|
55
|
+
Invalidate cached results for a tool, optionally for specific arguments.
|
|
56
|
+
"""
|
|
57
|
+
pass
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class InMemoryCache(CacheInterface):
|
|
61
|
+
"""
|
|
62
|
+
In-memory implementation of the cache interface.
|
|
63
|
+
"""
|
|
64
|
+
def __init__(self, default_ttl: Optional[int] = 300):
|
|
65
|
+
self._cache: Dict[str, Dict[str, CacheEntry]] = {}
|
|
66
|
+
self._default_ttl = default_ttl
|
|
67
|
+
self._lock = asyncio.Lock()
|
|
68
|
+
|
|
69
|
+
async def get(self, tool: str, arguments_hash: str) -> Optional[Any]:
|
|
70
|
+
async with self._lock:
|
|
71
|
+
tool_cache = self._cache.get(tool)
|
|
72
|
+
if not tool_cache:
|
|
73
|
+
return None
|
|
74
|
+
entry = tool_cache.get(arguments_hash)
|
|
75
|
+
if not entry:
|
|
76
|
+
return None
|
|
77
|
+
now = datetime.now()
|
|
78
|
+
if entry.expires_at and entry.expires_at < now:
|
|
79
|
+
del tool_cache[arguments_hash]
|
|
80
|
+
return None
|
|
81
|
+
return entry.result
|
|
82
|
+
|
|
83
|
+
async def set(
|
|
84
|
+
self,
|
|
85
|
+
tool: str,
|
|
86
|
+
arguments_hash: str,
|
|
87
|
+
result: Any,
|
|
88
|
+
ttl: Optional[int] = None
|
|
89
|
+
) -> None:
|
|
90
|
+
async with self._lock:
|
|
91
|
+
if tool not in self._cache:
|
|
92
|
+
self._cache[tool] = {}
|
|
93
|
+
now = datetime.now()
|
|
94
|
+
expires_at = None
|
|
95
|
+
actual_ttl = ttl if ttl is not None else self._default_ttl
|
|
96
|
+
if actual_ttl is not None:
|
|
97
|
+
expires_at = now + timedelta(seconds=actual_ttl)
|
|
98
|
+
entry = CacheEntry(
|
|
99
|
+
tool=tool,
|
|
100
|
+
arguments_hash=arguments_hash,
|
|
101
|
+
result=result,
|
|
102
|
+
created_at=now,
|
|
103
|
+
expires_at=expires_at
|
|
104
|
+
)
|
|
105
|
+
self._cache[tool][arguments_hash] = entry
|
|
106
|
+
|
|
107
|
+
async def invalidate(self, tool: str, arguments_hash: Optional[str] = None) -> None:
|
|
108
|
+
async with self._lock:
|
|
109
|
+
if tool not in self._cache:
|
|
110
|
+
return
|
|
111
|
+
if arguments_hash is not None:
|
|
112
|
+
self._cache[tool].pop(arguments_hash, None)
|
|
113
|
+
else:
|
|
114
|
+
del self._cache[tool]
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class CachingToolExecutor:
|
|
118
|
+
"""
|
|
119
|
+
Wrapper for a tool executor that caches results.
|
|
120
|
+
"""
|
|
121
|
+
def __init__(
|
|
122
|
+
self,
|
|
123
|
+
executor: Any,
|
|
124
|
+
cache: CacheInterface,
|
|
125
|
+
default_ttl: Optional[int] = None,
|
|
126
|
+
tool_ttls: Optional[Dict[str, int]] = None,
|
|
127
|
+
cacheable_tools: Optional[List[str]] = None
|
|
128
|
+
):
|
|
129
|
+
self.executor = executor
|
|
130
|
+
self.cache = cache
|
|
131
|
+
self.default_ttl = default_ttl
|
|
132
|
+
self.tool_ttls = tool_ttls or {}
|
|
133
|
+
self.cacheable_tools = cacheable_tools
|
|
134
|
+
|
|
135
|
+
def _get_arguments_hash(self, arguments: Dict[str, Any]) -> str:
|
|
136
|
+
serialized = json.dumps(arguments, sort_keys=True)
|
|
137
|
+
return hashlib.md5(serialized.encode()).hexdigest()
|
|
138
|
+
|
|
139
|
+
def _is_cacheable(self, tool: str) -> bool:
|
|
140
|
+
if self.cacheable_tools is None:
|
|
141
|
+
return True
|
|
142
|
+
return tool in self.cacheable_tools
|
|
143
|
+
|
|
144
|
+
def _get_ttl(self, tool: str) -> Optional[int]:
|
|
145
|
+
return self.tool_ttls.get(tool, self.default_ttl)
|
|
146
|
+
|
|
147
|
+
async def execute(
|
|
148
|
+
self,
|
|
149
|
+
calls: List[ToolCall],
|
|
150
|
+
timeout: Optional[float] = None,
|
|
151
|
+
use_cache: bool = True
|
|
152
|
+
) -> List[ToolResult]:
|
|
153
|
+
results: List[ToolResult] = []
|
|
154
|
+
uncached_calls: List[Tuple[int, ToolCall]] = []
|
|
155
|
+
|
|
156
|
+
if use_cache:
|
|
157
|
+
for i, call in enumerate(calls):
|
|
158
|
+
if not self._is_cacheable(call.tool):
|
|
159
|
+
uncached_calls.append((i, call))
|
|
160
|
+
continue
|
|
161
|
+
arguments_hash = self._get_arguments_hash(call.arguments)
|
|
162
|
+
cached_result = await self.cache.get(call.tool, arguments_hash)
|
|
163
|
+
if cached_result is not None:
|
|
164
|
+
now = datetime.now()
|
|
165
|
+
results.append(ToolResult(
|
|
166
|
+
tool=call.tool,
|
|
167
|
+
result=cached_result,
|
|
168
|
+
error=None,
|
|
169
|
+
start_time=now,
|
|
170
|
+
end_time=now,
|
|
171
|
+
machine="cache",
|
|
172
|
+
pid=0,
|
|
173
|
+
cached=True
|
|
174
|
+
))
|
|
175
|
+
else:
|
|
176
|
+
uncached_calls.append((i, call))
|
|
177
|
+
else:
|
|
178
|
+
uncached_calls = [(i, call) for i, call in enumerate(calls)]
|
|
179
|
+
|
|
180
|
+
# Early return if all served from cache
|
|
181
|
+
if use_cache and not uncached_calls:
|
|
182
|
+
return results
|
|
183
|
+
|
|
184
|
+
if uncached_calls:
|
|
185
|
+
uncached_results = await self.executor.execute(
|
|
186
|
+
[call for _, call in uncached_calls],
|
|
187
|
+
timeout=timeout
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
if use_cache:
|
|
191
|
+
for idx, result in enumerate(uncached_results):
|
|
192
|
+
_, call = uncached_calls[idx]
|
|
193
|
+
if result.error is None and self._is_cacheable(call.tool):
|
|
194
|
+
arguments_hash = self._get_arguments_hash(call.arguments)
|
|
195
|
+
ttl = self._get_ttl(call.tool)
|
|
196
|
+
await self.cache.set(
|
|
197
|
+
call.tool,
|
|
198
|
+
arguments_hash,
|
|
199
|
+
result.result,
|
|
200
|
+
ttl=ttl
|
|
201
|
+
)
|
|
202
|
+
result.cached = False
|
|
203
|
+
|
|
204
|
+
final_results: List[ToolResult] = [None] * len(calls)
|
|
205
|
+
uncached_indices = {idx for idx, _ in uncached_calls}
|
|
206
|
+
uncached_iter = iter(uncached_results)
|
|
207
|
+
cache_iter = iter(results)
|
|
208
|
+
for i in range(len(calls)):
|
|
209
|
+
if i in uncached_indices:
|
|
210
|
+
final_results[i] = next(uncached_iter)
|
|
211
|
+
else:
|
|
212
|
+
final_results[i] = next(cache_iter)
|
|
213
|
+
return final_results
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def cacheable(ttl: Optional[int] = None):
|
|
217
|
+
def decorator(cls):
|
|
218
|
+
cls._cacheable = True
|
|
219
|
+
if ttl is not None:
|
|
220
|
+
cls._cache_ttl = ttl
|
|
221
|
+
return cls
|
|
222
|
+
return decorator
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def invalidate_cache(tool: str, arguments: Optional[Dict[str, Any]] = None):
|
|
226
|
+
async def _invalidate(cache: CacheInterface):
|
|
227
|
+
if arguments is not None:
|
|
228
|
+
arguments_hash = hashlib.md5(
|
|
229
|
+
json.dumps(arguments, sort_keys=True).encode()
|
|
230
|
+
).hexdigest()
|
|
231
|
+
await cache.invalidate(tool, arguments_hash)
|
|
232
|
+
else:
|
|
233
|
+
await cache.invalidate(tool)
|
|
234
|
+
return _invalidate
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
# chuk_tool_processor/execution/wrappers/rate_limiting.py
|
|
2
|
+
import asyncio
|
|
3
|
+
import time
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import Dict, Optional, List, Any, Tuple
|
|
6
|
+
|
|
7
|
+
# imports
|
|
8
|
+
from chuk_tool_processor.models.tool_call import ToolCall
|
|
9
|
+
from chuk_tool_processor.models.tool_result import ToolResult
|
|
10
|
+
from chuk_tool_processor.core.exceptions import ToolExecutionError
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class RateLimiter:
|
|
14
|
+
"""
|
|
15
|
+
Rate limiter for tool executions.
|
|
16
|
+
Supports per-tool rate limits and global rate limits.
|
|
17
|
+
"""
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
global_limit: Optional[int] = None,
|
|
21
|
+
global_period: float = 60.0,
|
|
22
|
+
tool_limits: Optional[Dict[str, Tuple[int, float]]] = None
|
|
23
|
+
):
|
|
24
|
+
"""
|
|
25
|
+
Initialize the rate limiter.
|
|
26
|
+
"""
|
|
27
|
+
self.global_limit = global_limit
|
|
28
|
+
self.global_period = global_period
|
|
29
|
+
self.tool_limits = tool_limits or {}
|
|
30
|
+
|
|
31
|
+
# Track request timestamps
|
|
32
|
+
self._global_timestamps: List[float] = []
|
|
33
|
+
self._tool_timestamps: Dict[str, List[float]] = {}
|
|
34
|
+
|
|
35
|
+
# Locks for concurrency safety
|
|
36
|
+
self._global_lock = asyncio.Lock()
|
|
37
|
+
self._tool_locks: Dict[str, asyncio.Lock] = {}
|
|
38
|
+
|
|
39
|
+
async def _wait_for_global_limit(self) -> None:
|
|
40
|
+
"""
|
|
41
|
+
Wait until global rate limit allows another request.
|
|
42
|
+
"""
|
|
43
|
+
if self.global_limit is None:
|
|
44
|
+
return
|
|
45
|
+
|
|
46
|
+
while True:
|
|
47
|
+
# Acquire lock to check and possibly record
|
|
48
|
+
async with self._global_lock:
|
|
49
|
+
now = time.time()
|
|
50
|
+
# Remove expired timestamps
|
|
51
|
+
cutoff = now - self.global_period
|
|
52
|
+
self._global_timestamps = [ts for ts in self._global_timestamps if ts > cutoff]
|
|
53
|
+
# If under limit, record and proceed
|
|
54
|
+
if len(self._global_timestamps) < self.global_limit:
|
|
55
|
+
self._global_timestamps.append(now)
|
|
56
|
+
return
|
|
57
|
+
# Otherwise compute wait time
|
|
58
|
+
oldest = min(self._global_timestamps)
|
|
59
|
+
wait_time = (oldest + self.global_period) - now
|
|
60
|
+
# Sleep outside lock
|
|
61
|
+
if wait_time > 0:
|
|
62
|
+
await asyncio.sleep(wait_time)
|
|
63
|
+
else:
|
|
64
|
+
# retry immediately
|
|
65
|
+
continue
|
|
66
|
+
|
|
67
|
+
async def _wait_for_tool_limit(self, tool: str) -> None:
|
|
68
|
+
"""
|
|
69
|
+
Wait until tool-specific rate limit allows another request.
|
|
70
|
+
"""
|
|
71
|
+
# Check if tool has a limit
|
|
72
|
+
if tool not in self.tool_limits:
|
|
73
|
+
return
|
|
74
|
+
limit, period = self.tool_limits[tool]
|
|
75
|
+
|
|
76
|
+
# Initialize lock and timestamps list if needed
|
|
77
|
+
if tool not in self._tool_locks:
|
|
78
|
+
self._tool_locks[tool] = asyncio.Lock()
|
|
79
|
+
if tool not in self._tool_timestamps:
|
|
80
|
+
self._tool_timestamps[tool] = []
|
|
81
|
+
|
|
82
|
+
while True:
|
|
83
|
+
async with self._tool_locks[tool]:
|
|
84
|
+
now = time.time()
|
|
85
|
+
# Remove expired timestamps
|
|
86
|
+
cutoff = now - period
|
|
87
|
+
self._tool_timestamps[tool] = [ts for ts in self._tool_timestamps[tool] if ts > cutoff]
|
|
88
|
+
# If under limit, record and proceed
|
|
89
|
+
if len(self._tool_timestamps[tool]) < limit:
|
|
90
|
+
self._tool_timestamps[tool].append(now)
|
|
91
|
+
return
|
|
92
|
+
# Otherwise compute wait time
|
|
93
|
+
oldest = min(self._tool_timestamps[tool])
|
|
94
|
+
wait_time = (oldest + period) - now
|
|
95
|
+
# Sleep outside lock
|
|
96
|
+
if wait_time > 0:
|
|
97
|
+
await asyncio.sleep(wait_time)
|
|
98
|
+
else:
|
|
99
|
+
continue
|
|
100
|
+
|
|
101
|
+
async def wait(self, tool: str) -> None:
|
|
102
|
+
"""
|
|
103
|
+
Wait until rate limits allow execution of the given tool.
|
|
104
|
+
"""
|
|
105
|
+
# Wait for global limit first
|
|
106
|
+
await self._wait_for_global_limit()
|
|
107
|
+
# Then wait for tool-specific limit
|
|
108
|
+
await self._wait_for_tool_limit(tool)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
class RateLimitedToolExecutor:
|
|
112
|
+
"""
|
|
113
|
+
Wrapper for a tool executor that applies rate limiting.
|
|
114
|
+
"""
|
|
115
|
+
def __init__(
|
|
116
|
+
self,
|
|
117
|
+
executor: Any,
|
|
118
|
+
rate_limiter: RateLimiter
|
|
119
|
+
):
|
|
120
|
+
"""
|
|
121
|
+
Initialize the rate-limited executor.
|
|
122
|
+
"""
|
|
123
|
+
self.executor = executor
|
|
124
|
+
self.rate_limiter = rate_limiter
|
|
125
|
+
|
|
126
|
+
async def execute(
|
|
127
|
+
self,
|
|
128
|
+
calls: List[ToolCall],
|
|
129
|
+
timeout: Optional[float] = None
|
|
130
|
+
) -> List[ToolResult]:
|
|
131
|
+
"""
|
|
132
|
+
Execute tool calls with rate limiting.
|
|
133
|
+
"""
|
|
134
|
+
# Apply rate limiting to each call
|
|
135
|
+
for call in calls:
|
|
136
|
+
await self.rate_limiter.wait(call.tool)
|
|
137
|
+
# Delegate to inner executor
|
|
138
|
+
return await self.executor.execute(calls, timeout=timeout)
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def rate_limited(limit: int, period: float = 60.0):
|
|
142
|
+
"""
|
|
143
|
+
Decorator to specify rate limits for a tool class.
|
|
144
|
+
"""
|
|
145
|
+
def decorator(cls):
|
|
146
|
+
cls._rate_limit = limit
|
|
147
|
+
cls._rate_period = period
|
|
148
|
+
return cls
|
|
149
|
+
return decorator
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
# chuk_tool_processor/retry.py
|
|
2
|
+
import asyncio
|
|
3
|
+
import logging
|
|
4
|
+
import random
|
|
5
|
+
from datetime import datetime, timezone
|
|
6
|
+
from typing import Any, Dict, List, Optional, Type
|
|
7
|
+
|
|
8
|
+
# imports
|
|
9
|
+
from chuk_tool_processor.models.tool_call import ToolCall
|
|
10
|
+
from chuk_tool_processor.models.tool_result import ToolResult
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class RetryConfig:
|
|
16
|
+
"""
|
|
17
|
+
Configuration for retry behavior.
|
|
18
|
+
"""
|
|
19
|
+
def __init__(
|
|
20
|
+
self,
|
|
21
|
+
max_retries: int = 3,
|
|
22
|
+
base_delay: float = 1.0,
|
|
23
|
+
max_delay: float = 60.0,
|
|
24
|
+
jitter: bool = True,
|
|
25
|
+
retry_on_exceptions: Optional[List[Type[Exception]]] = None,
|
|
26
|
+
retry_on_error_substrings: Optional[List[str]] = None
|
|
27
|
+
):
|
|
28
|
+
self.max_retries = max_retries
|
|
29
|
+
self.base_delay = base_delay
|
|
30
|
+
self.max_delay = max_delay
|
|
31
|
+
self.jitter = jitter
|
|
32
|
+
self.retry_on_exceptions = retry_on_exceptions or []
|
|
33
|
+
self.retry_on_error_substrings = retry_on_error_substrings or []
|
|
34
|
+
|
|
35
|
+
def should_retry(self, attempt: int, error: Optional[Exception] = None, error_str: Optional[str] = None) -> bool:
|
|
36
|
+
if attempt >= self.max_retries:
|
|
37
|
+
return False
|
|
38
|
+
if not self.retry_on_exceptions and not self.retry_on_error_substrings:
|
|
39
|
+
return True
|
|
40
|
+
if error is not None and any(isinstance(error, exc) for exc in self.retry_on_exceptions):
|
|
41
|
+
return True
|
|
42
|
+
if error_str and any(substr in error_str for substr in self.retry_on_error_substrings):
|
|
43
|
+
return True
|
|
44
|
+
return False
|
|
45
|
+
|
|
46
|
+
def get_delay(self, attempt: int) -> float:
|
|
47
|
+
delay = min(self.base_delay * (2 ** attempt), self.max_delay)
|
|
48
|
+
if self.jitter:
|
|
49
|
+
delay *= (0.5 + random.random())
|
|
50
|
+
return delay
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class RetryableToolExecutor:
|
|
54
|
+
"""
|
|
55
|
+
Wrapper for a tool executor that applies retry logic.
|
|
56
|
+
"""
|
|
57
|
+
def __init__(
|
|
58
|
+
self,
|
|
59
|
+
executor: Any,
|
|
60
|
+
default_config: RetryConfig = None,
|
|
61
|
+
tool_configs: Dict[str, RetryConfig] = None
|
|
62
|
+
):
|
|
63
|
+
self.executor = executor
|
|
64
|
+
self.default_config = default_config or RetryConfig()
|
|
65
|
+
self.tool_configs = tool_configs or {}
|
|
66
|
+
|
|
67
|
+
def _get_config(self, tool: str) -> RetryConfig:
|
|
68
|
+
return self.tool_configs.get(tool, self.default_config)
|
|
69
|
+
|
|
70
|
+
async def execute(
|
|
71
|
+
self,
|
|
72
|
+
calls: List[ToolCall],
|
|
73
|
+
timeout: Optional[float] = None
|
|
74
|
+
) -> List[ToolResult]:
|
|
75
|
+
results: List[ToolResult] = []
|
|
76
|
+
for call in calls:
|
|
77
|
+
config = self._get_config(call.tool)
|
|
78
|
+
result = await self._execute_with_retry(call, config, timeout)
|
|
79
|
+
results.append(result)
|
|
80
|
+
return results
|
|
81
|
+
|
|
82
|
+
async def _execute_with_retry(
|
|
83
|
+
self,
|
|
84
|
+
call: ToolCall,
|
|
85
|
+
config: RetryConfig,
|
|
86
|
+
timeout: Optional[float]
|
|
87
|
+
) -> ToolResult:
|
|
88
|
+
attempt = 0
|
|
89
|
+
last_error: Optional[str] = None
|
|
90
|
+
pid = 0
|
|
91
|
+
machine = "unknown"
|
|
92
|
+
|
|
93
|
+
while True:
|
|
94
|
+
start_time = datetime.now(timezone.utc)
|
|
95
|
+
try:
|
|
96
|
+
# execute call
|
|
97
|
+
tool_results = await self.executor.execute([call], timeout=timeout)
|
|
98
|
+
result = tool_results[0]
|
|
99
|
+
pid = result.pid
|
|
100
|
+
machine = result.machine
|
|
101
|
+
|
|
102
|
+
# error in result
|
|
103
|
+
if result.error:
|
|
104
|
+
last_error = result.error
|
|
105
|
+
if config.should_retry(attempt, error_str=result.error):
|
|
106
|
+
logger.info(
|
|
107
|
+
f"Retrying tool {call.tool} after error: {result.error} (attempt {attempt + 1})"
|
|
108
|
+
)
|
|
109
|
+
await asyncio.sleep(config.get_delay(attempt))
|
|
110
|
+
attempt += 1
|
|
111
|
+
continue
|
|
112
|
+
# no retry: if any retries happened, wrap final error
|
|
113
|
+
if attempt > 0:
|
|
114
|
+
end_time = datetime.now(timezone.utc)
|
|
115
|
+
final = ToolResult(
|
|
116
|
+
tool=call.tool,
|
|
117
|
+
result=None,
|
|
118
|
+
error=f"Max retries reached ({config.max_retries}): {last_error}",
|
|
119
|
+
start_time=start_time,
|
|
120
|
+
end_time=end_time,
|
|
121
|
+
machine=machine,
|
|
122
|
+
pid=pid
|
|
123
|
+
)
|
|
124
|
+
# attach attempts
|
|
125
|
+
object.__setattr__(final, 'attempts', attempt)
|
|
126
|
+
return final
|
|
127
|
+
# no retries occurred, return the original failure
|
|
128
|
+
return result
|
|
129
|
+
|
|
130
|
+
# success: attach attempts and return
|
|
131
|
+
object.__setattr__(result, 'attempts', attempt)
|
|
132
|
+
return result
|
|
133
|
+
except Exception as e:
|
|
134
|
+
err_str = str(e)
|
|
135
|
+
last_error = err_str
|
|
136
|
+
if config.should_retry(attempt, error=e):
|
|
137
|
+
logger.info(
|
|
138
|
+
f"Retrying tool {call.tool} after exception: {err_str} (attempt {attempt + 1})"
|
|
139
|
+
)
|
|
140
|
+
await asyncio.sleep(config.get_delay(attempt))
|
|
141
|
+
attempt += 1
|
|
142
|
+
continue
|
|
143
|
+
# no more retries: return error result
|
|
144
|
+
end_time = datetime.now(timezone.utc)
|
|
145
|
+
final_exc = ToolResult(
|
|
146
|
+
tool=call.tool,
|
|
147
|
+
result=None,
|
|
148
|
+
error=err_str,
|
|
149
|
+
start_time=start_time,
|
|
150
|
+
end_time=end_time,
|
|
151
|
+
machine=machine,
|
|
152
|
+
pid=pid
|
|
153
|
+
)
|
|
154
|
+
object.__setattr__(final_exc, 'attempts', attempt + 1)
|
|
155
|
+
return final_exc
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def retryable(
|
|
159
|
+
max_retries: int = 3,
|
|
160
|
+
base_delay: float = 1.0,
|
|
161
|
+
max_delay: float = 60.0,
|
|
162
|
+
jitter: bool = True,
|
|
163
|
+
retry_on_exceptions: Optional[List[Type[Exception]]] = None,
|
|
164
|
+
retry_on_error_substrings: Optional[List[str]] = None
|
|
165
|
+
):
|
|
166
|
+
def decorator(cls):
|
|
167
|
+
cls._retry_config = RetryConfig(
|
|
168
|
+
max_retries=max_retries,
|
|
169
|
+
base_delay=base_delay,
|
|
170
|
+
max_delay=max_delay,
|
|
171
|
+
jitter=jitter,
|
|
172
|
+
retry_on_exceptions=retry_on_exceptions,
|
|
173
|
+
retry_on_error_substrings=retry_on_error_substrings
|
|
174
|
+
)
|
|
175
|
+
return cls
|
|
176
|
+
return decorator
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# chuk_tool_processor/models/__init__.py
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
# chuk_tool_processor/execution/execution_strategy.py
|
|
2
|
+
from abc import ABC, abstractmethod
|
|
3
|
+
from typing import List, Optional
|
|
4
|
+
|
|
5
|
+
from chuk_tool_processor.models.tool_call import ToolCall
|
|
6
|
+
from chuk_tool_processor.models.tool_result import ToolResult
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ExecutionStrategy(ABC):
|
|
10
|
+
"""
|
|
11
|
+
Strategy interface for executing ToolCall objects.
|
|
12
|
+
"""
|
|
13
|
+
@abstractmethod
|
|
14
|
+
async def run(
|
|
15
|
+
self,
|
|
16
|
+
calls: List[ToolCall],
|
|
17
|
+
timeout: Optional[float] = None
|
|
18
|
+
) -> List[ToolResult]:
|
|
19
|
+
pass
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
# chuk_tool_processor/models/tool_call.py
|
|
2
|
+
from pydantic import BaseModel, Field
|
|
3
|
+
from typing import Any, Dict
|
|
4
|
+
|
|
5
|
+
class ToolCall(BaseModel):
|
|
6
|
+
tool: str = Field(..., min_length=1, description="Name of the tool to call; must be non‐empty")
|
|
7
|
+
arguments: Dict[str, Any] = Field(default_factory=dict)
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
# chuk_tool_processor/models/tool_result.py
|
|
2
|
+
import os
|
|
3
|
+
from pydantic import BaseModel, Field, ConfigDict
|
|
4
|
+
from typing import Any, Optional
|
|
5
|
+
from datetime import datetime, timezone
|
|
6
|
+
|
|
7
|
+
class ToolResult(BaseModel):
|
|
8
|
+
"""
|
|
9
|
+
Represents the result of executing a tool.
|
|
10
|
+
Includes timing, host, and process metadata for diagnostics.
|
|
11
|
+
"""
|
|
12
|
+
# Configure Pydantic to ignore any extra fields
|
|
13
|
+
model_config = ConfigDict(extra='ignore')
|
|
14
|
+
|
|
15
|
+
# Flag indicating whether this result was retrieved from cache
|
|
16
|
+
cached: bool = Field(
|
|
17
|
+
default=False,
|
|
18
|
+
description="True if this result was retrieved from cache"
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
tool: str = Field(
|
|
22
|
+
...,
|
|
23
|
+
min_length=1,
|
|
24
|
+
description="Name of the tool; must be non-empty"
|
|
25
|
+
)
|
|
26
|
+
result: Any = Field(
|
|
27
|
+
None,
|
|
28
|
+
description="Return value from the tool execution"
|
|
29
|
+
)
|
|
30
|
+
error: Optional[str] = Field(
|
|
31
|
+
None,
|
|
32
|
+
description="Error message if execution failed"
|
|
33
|
+
)
|
|
34
|
+
start_time: datetime = Field(
|
|
35
|
+
default_factory=lambda: datetime.now(timezone.utc),
|
|
36
|
+
description="UTC timestamp when execution started"
|
|
37
|
+
)
|
|
38
|
+
end_time: datetime = Field(
|
|
39
|
+
default_factory=lambda: datetime.now(timezone.utc),
|
|
40
|
+
description="UTC timestamp when execution finished"
|
|
41
|
+
)
|
|
42
|
+
machine: str = Field(
|
|
43
|
+
default_factory=lambda: os.uname().nodename,
|
|
44
|
+
description="Hostname where the tool ran"
|
|
45
|
+
)
|
|
46
|
+
pid: int = Field(
|
|
47
|
+
default_factory=lambda: os.getpid(),
|
|
48
|
+
description="Process ID of the worker"
|
|
49
|
+
)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# chuk_tool_processor/plugins/parsers__init__.py
|