chuk-tool-processor 0.6.4__py3-none-any.whl → 0.9.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chuk-tool-processor might be problematic. Click here for more details.

Files changed (66) hide show
  1. chuk_tool_processor/core/__init__.py +32 -1
  2. chuk_tool_processor/core/exceptions.py +225 -13
  3. chuk_tool_processor/core/processor.py +135 -104
  4. chuk_tool_processor/execution/strategies/__init__.py +6 -0
  5. chuk_tool_processor/execution/strategies/inprocess_strategy.py +142 -150
  6. chuk_tool_processor/execution/strategies/subprocess_strategy.py +202 -206
  7. chuk_tool_processor/execution/tool_executor.py +82 -84
  8. chuk_tool_processor/execution/wrappers/__init__.py +42 -0
  9. chuk_tool_processor/execution/wrappers/caching.py +150 -116
  10. chuk_tool_processor/execution/wrappers/circuit_breaker.py +370 -0
  11. chuk_tool_processor/execution/wrappers/rate_limiting.py +76 -43
  12. chuk_tool_processor/execution/wrappers/retry.py +116 -78
  13. chuk_tool_processor/logging/__init__.py +23 -17
  14. chuk_tool_processor/logging/context.py +40 -45
  15. chuk_tool_processor/logging/formatter.py +22 -21
  16. chuk_tool_processor/logging/helpers.py +28 -42
  17. chuk_tool_processor/logging/metrics.py +13 -15
  18. chuk_tool_processor/mcp/__init__.py +8 -12
  19. chuk_tool_processor/mcp/mcp_tool.py +158 -114
  20. chuk_tool_processor/mcp/register_mcp_tools.py +22 -22
  21. chuk_tool_processor/mcp/setup_mcp_http_streamable.py +57 -17
  22. chuk_tool_processor/mcp/setup_mcp_sse.py +57 -17
  23. chuk_tool_processor/mcp/setup_mcp_stdio.py +11 -11
  24. chuk_tool_processor/mcp/stream_manager.py +333 -276
  25. chuk_tool_processor/mcp/transport/__init__.py +22 -29
  26. chuk_tool_processor/mcp/transport/base_transport.py +180 -44
  27. chuk_tool_processor/mcp/transport/http_streamable_transport.py +505 -325
  28. chuk_tool_processor/mcp/transport/models.py +100 -0
  29. chuk_tool_processor/mcp/transport/sse_transport.py +607 -276
  30. chuk_tool_processor/mcp/transport/stdio_transport.py +597 -116
  31. chuk_tool_processor/models/__init__.py +21 -1
  32. chuk_tool_processor/models/execution_strategy.py +16 -21
  33. chuk_tool_processor/models/streaming_tool.py +28 -25
  34. chuk_tool_processor/models/tool_call.py +49 -31
  35. chuk_tool_processor/models/tool_export_mixin.py +22 -8
  36. chuk_tool_processor/models/tool_result.py +40 -77
  37. chuk_tool_processor/models/tool_spec.py +350 -0
  38. chuk_tool_processor/models/validated_tool.py +36 -18
  39. chuk_tool_processor/observability/__init__.py +30 -0
  40. chuk_tool_processor/observability/metrics.py +312 -0
  41. chuk_tool_processor/observability/setup.py +105 -0
  42. chuk_tool_processor/observability/tracing.py +345 -0
  43. chuk_tool_processor/plugins/__init__.py +1 -1
  44. chuk_tool_processor/plugins/discovery.py +11 -11
  45. chuk_tool_processor/plugins/parsers/__init__.py +1 -1
  46. chuk_tool_processor/plugins/parsers/base.py +1 -2
  47. chuk_tool_processor/plugins/parsers/function_call_tool.py +13 -8
  48. chuk_tool_processor/plugins/parsers/json_tool.py +4 -3
  49. chuk_tool_processor/plugins/parsers/openai_tool.py +12 -7
  50. chuk_tool_processor/plugins/parsers/xml_tool.py +4 -4
  51. chuk_tool_processor/registry/__init__.py +12 -12
  52. chuk_tool_processor/registry/auto_register.py +22 -30
  53. chuk_tool_processor/registry/decorators.py +127 -129
  54. chuk_tool_processor/registry/interface.py +26 -23
  55. chuk_tool_processor/registry/metadata.py +27 -22
  56. chuk_tool_processor/registry/provider.py +17 -18
  57. chuk_tool_processor/registry/providers/__init__.py +16 -19
  58. chuk_tool_processor/registry/providers/memory.py +18 -25
  59. chuk_tool_processor/registry/tool_export.py +42 -51
  60. chuk_tool_processor/utils/validation.py +15 -16
  61. chuk_tool_processor-0.9.7.dist-info/METADATA +1813 -0
  62. chuk_tool_processor-0.9.7.dist-info/RECORD +67 -0
  63. chuk_tool_processor-0.6.4.dist-info/METADATA +0 -697
  64. chuk_tool_processor-0.6.4.dist-info/RECORD +0 -60
  65. {chuk_tool_processor-0.6.4.dist-info → chuk_tool_processor-0.9.7.dist-info}/WHEEL +0 -0
  66. {chuk_tool_processor-0.6.4.dist-info → chuk_tool_processor-0.9.7.dist-info}/top_level.txt +0 -0
@@ -11,31 +11,50 @@ This module provides:
11
11
  Results retrieved from cache are marked with `cached=True` and `machine="cache"`
12
12
  for easy detection.
13
13
  """
14
+
14
15
  from __future__ import annotations
15
16
 
16
17
  import asyncio
17
18
  import hashlib
18
19
  import json
19
- import logging
20
20
  from abc import ABC, abstractmethod
21
- from datetime import datetime, timedelta, timezone
22
- from typing import Any, Dict, List, Optional, Tuple, Set, Union
21
+ from datetime import UTC, datetime, timedelta
22
+ from typing import Any
23
23
 
24
24
  from pydantic import BaseModel, Field
25
25
 
26
+ from chuk_tool_processor.logging import get_logger
26
27
  from chuk_tool_processor.models.tool_call import ToolCall
27
28
  from chuk_tool_processor.models.tool_result import ToolResult
28
- from chuk_tool_processor.logging import get_logger
29
29
 
30
30
  logger = get_logger("chuk_tool_processor.execution.wrappers.caching")
31
31
 
32
+ # Optional observability imports
33
+ try:
34
+ from chuk_tool_processor.observability.metrics import get_metrics
35
+ from chuk_tool_processor.observability.tracing import trace_cache_operation
36
+
37
+ _observability_available = True
38
+ except ImportError:
39
+ _observability_available = False
40
+
41
+ # No-op functions when observability not available
42
+ def get_metrics():
43
+ return None
44
+
45
+ def trace_cache_operation(*_args, **_kwargs):
46
+ from contextlib import nullcontext
47
+
48
+ return nullcontext()
49
+
50
+
32
51
  # --------------------------------------------------------------------------- #
33
52
  # Cache primitives
34
53
  # --------------------------------------------------------------------------- #
35
54
  class CacheEntry(BaseModel):
36
55
  """
37
56
  Model representing a cached tool result.
38
-
57
+
39
58
  Attributes:
40
59
  tool: Name of the tool
41
60
  arguments_hash: Hash of the tool arguments
@@ -43,29 +62,30 @@ class CacheEntry(BaseModel):
43
62
  created_at: When the entry was created
44
63
  expires_at: When the entry expires (None = no expiration)
45
64
  """
65
+
46
66
  tool: str = Field(..., description="Tool name")
47
67
  arguments_hash: str = Field(..., description="MD5 hash of arguments")
48
68
  result: Any = Field(..., description="Cached result value")
49
69
  created_at: datetime = Field(..., description="Creation timestamp")
50
- expires_at: Optional[datetime] = Field(None, description="Expiration timestamp")
70
+ expires_at: datetime | None = Field(None, description="Expiration timestamp")
51
71
 
52
72
 
53
73
  class CacheInterface(ABC):
54
74
  """
55
75
  Abstract interface for tool result caches.
56
-
76
+
57
77
  All cache implementations must be async-native and thread-safe.
58
78
  """
59
79
 
60
80
  @abstractmethod
61
- async def get(self, tool: str, arguments_hash: str) -> Optional[Any]:
81
+ async def get(self, tool: str, arguments_hash: str) -> Any | None:
62
82
  """
63
83
  Get a cached result by tool name and arguments hash.
64
-
84
+
65
85
  Args:
66
86
  tool: Tool name
67
87
  arguments_hash: Hash of the arguments
68
-
88
+
69
89
  Returns:
70
90
  Cached result value or None if not found
71
91
  """
@@ -78,11 +98,11 @@ class CacheInterface(ABC):
78
98
  arguments_hash: str,
79
99
  result: Any,
80
100
  *,
81
- ttl: Optional[int] = None,
101
+ ttl: int | None = None,
82
102
  ) -> None:
83
103
  """
84
104
  Set a cache entry.
85
-
105
+
86
106
  Args:
87
107
  tool: Tool name
88
108
  arguments_hash: Hash of the arguments
@@ -92,29 +112,29 @@ class CacheInterface(ABC):
92
112
  pass
93
113
 
94
114
  @abstractmethod
95
- async def invalidate(self, tool: str, arguments_hash: Optional[str] = None) -> None:
115
+ async def invalidate(self, tool: str, arguments_hash: str | None = None) -> None:
96
116
  """
97
117
  Invalidate cache entries.
98
-
118
+
99
119
  Args:
100
120
  tool: Tool name
101
121
  arguments_hash: Optional arguments hash. If None, all entries for the tool are invalidated.
102
122
  """
103
123
  pass
104
-
124
+
105
125
  async def clear(self) -> None:
106
126
  """
107
127
  Clear all cache entries.
108
-
128
+
109
129
  Default implementation raises NotImplementedError.
110
130
  Override in subclasses to provide an efficient implementation.
111
131
  """
112
132
  raise NotImplementedError("Cache clear not implemented")
113
-
114
- async def get_stats(self) -> Dict[str, Any]:
133
+
134
+ async def get_stats(self) -> dict[str, Any]:
115
135
  """
116
136
  Get cache statistics.
117
-
137
+
118
138
  Returns:
119
139
  Dict with cache statistics (implementation-specific)
120
140
  """
@@ -124,46 +144,46 @@ class CacheInterface(ABC):
124
144
  class InMemoryCache(CacheInterface):
125
145
  """
126
146
  In-memory cache implementation with async thread-safety.
127
-
147
+
128
148
  This cache uses a two-level dictionary structure with asyncio locks
129
149
  to ensure thread safety. Entries can have optional TTL values.
130
150
  """
131
151
 
132
- def __init__(self, default_ttl: Optional[int] = 300) -> None:
152
+ def __init__(self, default_ttl: int | None = 300) -> None:
133
153
  """
134
154
  Initialize the in-memory cache.
135
-
155
+
136
156
  Args:
137
157
  default_ttl: Default time-to-live in seconds (None = no expiration)
138
158
  """
139
- self._cache: Dict[str, Dict[str, CacheEntry]] = {}
159
+ self._cache: dict[str, dict[str, CacheEntry]] = {}
140
160
  self._default_ttl = default_ttl
141
161
  self._lock = asyncio.Lock()
142
- self._stats: Dict[str, int] = {
162
+ self._stats: dict[str, int] = {
143
163
  "hits": 0,
144
164
  "misses": 0,
145
165
  "sets": 0,
146
166
  "invalidations": 0,
147
167
  "expirations": 0,
148
168
  }
149
-
169
+
150
170
  logger.debug(f"Initialized InMemoryCache with default_ttl={default_ttl}s")
151
171
 
152
172
  # ---------------------- Helper methods ------------------------ #
153
173
  def _is_expired(self, entry: CacheEntry) -> bool:
154
174
  """Check if an entry is expired."""
155
175
  return entry.expires_at is not None and entry.expires_at < datetime.now()
156
-
176
+
157
177
  async def _prune_expired(self) -> int:
158
178
  """
159
179
  Remove all expired entries.
160
-
180
+
161
181
  Returns:
162
182
  Number of entries removed
163
183
  """
164
184
  now = datetime.now()
165
185
  removed = 0
166
-
186
+
167
187
  async with self._lock:
168
188
  for tool in list(self._cache.keys()):
169
189
  tool_cache = self._cache[tool]
@@ -173,42 +193,42 @@ class InMemoryCache(CacheInterface):
173
193
  del tool_cache[arg_hash]
174
194
  removed += 1
175
195
  self._stats["expirations"] += 1
176
-
196
+
177
197
  # Remove empty tool caches
178
198
  if not tool_cache:
179
199
  del self._cache[tool]
180
-
200
+
181
201
  return removed
182
202
 
183
203
  # ---------------------- CacheInterface implementation ------------------------ #
184
- async def get(self, tool: str, arguments_hash: str) -> Optional[Any]:
204
+ async def get(self, tool: str, arguments_hash: str) -> Any | None:
185
205
  """
186
206
  Get a cached result, checking expiration.
187
-
207
+
188
208
  Args:
189
209
  tool: Tool name
190
210
  arguments_hash: Hash of the arguments
191
-
211
+
192
212
  Returns:
193
213
  Cached result value or None if not found or expired
194
214
  """
195
215
  async with self._lock:
196
216
  entry = self._cache.get(tool, {}).get(arguments_hash)
197
-
217
+
198
218
  if not entry:
199
219
  self._stats["misses"] += 1
200
220
  return None
201
-
221
+
202
222
  if self._is_expired(entry):
203
223
  # Prune expired entry
204
224
  del self._cache[tool][arguments_hash]
205
225
  if not self._cache[tool]:
206
226
  del self._cache[tool]
207
-
227
+
208
228
  self._stats["expirations"] += 1
209
229
  self._stats["misses"] += 1
210
230
  return None
211
-
231
+
212
232
  self._stats["hits"] += 1
213
233
  return entry.result
214
234
 
@@ -218,11 +238,11 @@ class InMemoryCache(CacheInterface):
218
238
  arguments_hash: str,
219
239
  result: Any,
220
240
  *,
221
- ttl: Optional[int] = None,
241
+ ttl: int | None = None,
222
242
  ) -> None:
223
243
  """
224
244
  Set a cache entry with optional custom TTL.
225
-
245
+
226
246
  Args:
227
247
  tool: Tool name
228
248
  arguments_hash: Hash of the arguments
@@ -231,11 +251,11 @@ class InMemoryCache(CacheInterface):
231
251
  """
232
252
  async with self._lock:
233
253
  now = datetime.now()
234
-
254
+
235
255
  # Calculate expiration
236
256
  use_ttl = ttl if ttl is not None else self._default_ttl
237
257
  expires_at = now + timedelta(seconds=use_ttl) if use_ttl is not None else None
238
-
258
+
239
259
  # Create entry
240
260
  entry = CacheEntry(
241
261
  tool=tool,
@@ -244,20 +264,17 @@ class InMemoryCache(CacheInterface):
244
264
  created_at=now,
245
265
  expires_at=expires_at,
246
266
  )
247
-
267
+
248
268
  # Store in cache
249
269
  self._cache.setdefault(tool, {})[arguments_hash] = entry
250
270
  self._stats["sets"] += 1
251
-
252
- logger.debug(
253
- f"Cached result for {tool} (TTL: "
254
- f"{use_ttl if use_ttl is not None else 'none'}s)"
255
- )
256
271
 
257
- async def invalidate(self, tool: str, arguments_hash: Optional[str] = None) -> None:
272
+ logger.debug(f"Cached result for {tool} (TTL: {use_ttl if use_ttl is not None else 'none'}s)")
273
+
274
+ async def invalidate(self, tool: str, arguments_hash: str | None = None) -> None:
258
275
  """
259
276
  Invalidate cache entries for a tool.
260
-
277
+
261
278
  Args:
262
279
  tool: Tool name
263
280
  arguments_hash: Optional arguments hash. If None, all entries for the tool are invalidated.
@@ -265,7 +282,7 @@ class InMemoryCache(CacheInterface):
265
282
  async with self._lock:
266
283
  if tool not in self._cache:
267
284
  return
268
-
285
+
269
286
  if arguments_hash:
270
287
  # Invalidate specific entry
271
288
  self._cache[tool].pop(arguments_hash, None)
@@ -279,7 +296,7 @@ class InMemoryCache(CacheInterface):
279
296
  del self._cache[tool]
280
297
  self._stats["invalidations"] += count
281
298
  logger.debug(f"Invalidated all cache entries for {tool} ({count} entries)")
282
-
299
+
283
300
  async def clear(self) -> None:
284
301
  """Clear all cache entries."""
285
302
  async with self._lock:
@@ -287,11 +304,11 @@ class InMemoryCache(CacheInterface):
287
304
  self._cache.clear()
288
305
  self._stats["invalidations"] += count
289
306
  logger.debug(f"Cleared entire cache ({count} entries)")
290
-
291
- async def get_stats(self) -> Dict[str, Any]:
307
+
308
+ async def get_stats(self) -> dict[str, Any]:
292
309
  """
293
310
  Get cache statistics.
294
-
311
+
295
312
  Returns:
296
313
  Dict with hits, misses, sets, invalidations, and entry counts
297
314
  """
@@ -300,20 +317,21 @@ class InMemoryCache(CacheInterface):
300
317
  stats["implemented"] = True
301
318
  stats["entry_count"] = sum(len(entries) for entries in self._cache.values())
302
319
  stats["tool_count"] = len(self._cache)
303
-
320
+
304
321
  # Calculate hit rate
305
322
  total_gets = stats["hits"] + stats["misses"]
306
323
  stats["hit_rate"] = stats["hits"] / total_gets if total_gets > 0 else 0.0
307
-
324
+
308
325
  return stats
309
326
 
327
+
310
328
  # --------------------------------------------------------------------------- #
311
329
  # Executor wrapper
312
330
  # --------------------------------------------------------------------------- #
313
331
  class CachingToolExecutor:
314
332
  """
315
333
  Executor wrapper that transparently caches successful tool results.
316
-
334
+
317
335
  This wrapper intercepts tool calls, checks if results are available in cache,
318
336
  and only executes uncached calls. Successful results are automatically stored
319
337
  in the cache for future use.
@@ -324,13 +342,13 @@ class CachingToolExecutor:
324
342
  executor: Any,
325
343
  cache: CacheInterface,
326
344
  *,
327
- default_ttl: Optional[int] = None,
328
- tool_ttls: Optional[Dict[str, int]] = None,
329
- cacheable_tools: Optional[List[str]] = None,
345
+ default_ttl: int | None = None,
346
+ tool_ttls: dict[str, int] | None = None,
347
+ cacheable_tools: list[str] | None = None,
330
348
  ) -> None:
331
349
  """
332
350
  Initialize the caching executor.
333
-
351
+
334
352
  Args:
335
353
  executor: The underlying executor to wrap
336
354
  cache: Cache implementation to use
@@ -343,51 +361,50 @@ class CachingToolExecutor:
343
361
  self.default_ttl = default_ttl
344
362
  self.tool_ttls = tool_ttls or {}
345
363
  self.cacheable_tools = set(cacheable_tools) if cacheable_tools else None
346
-
364
+
347
365
  logger.debug(
348
- f"Initialized CachingToolExecutor with {len(self.tool_ttls)} custom TTLs, "
349
- f"default TTL={default_ttl}s"
366
+ f"Initialized CachingToolExecutor with {len(self.tool_ttls)} custom TTLs, default TTL={default_ttl}s"
350
367
  )
351
368
 
352
369
  # ---------------------------- helpers ----------------------------- #
353
370
  @staticmethod
354
- def _hash_arguments(arguments: Dict[str, Any]) -> str:
371
+ def _hash_arguments(arguments: dict[str, Any]) -> str:
355
372
  """
356
373
  Generate a stable hash for tool arguments.
357
-
374
+
358
375
  Args:
359
376
  arguments: Tool arguments dict
360
-
377
+
361
378
  Returns:
362
379
  MD5 hash of the sorted JSON representation
363
380
  """
364
381
  try:
365
382
  blob = json.dumps(arguments, sort_keys=True, default=str)
366
- return hashlib.md5(blob.encode()).hexdigest()
383
+ return hashlib.md5(blob.encode(), usedforsecurity=False).hexdigest() # nosec B324
367
384
  except Exception as e:
368
385
  logger.warning(f"Error hashing arguments: {e}")
369
386
  # Fallback to a string representation
370
- return hashlib.md5(str(arguments).encode()).hexdigest()
387
+ return hashlib.md5(str(arguments).encode(), usedforsecurity=False).hexdigest() # nosec B324
371
388
 
372
389
  def _is_cacheable(self, tool: str) -> bool:
373
390
  """
374
391
  Check if a tool is cacheable.
375
-
392
+
376
393
  Args:
377
394
  tool: Tool name
378
-
395
+
379
396
  Returns:
380
397
  True if the tool should be cached, False otherwise
381
398
  """
382
399
  return self.cacheable_tools is None or tool in self.cacheable_tools
383
400
 
384
- def _ttl_for(self, tool: str) -> Optional[int]:
401
+ def _ttl_for(self, tool: str) -> int | None:
385
402
  """
386
403
  Get the TTL for a specific tool.
387
-
404
+
388
405
  Args:
389
406
  tool: Tool name
390
-
407
+
391
408
  Returns:
392
409
  Tool-specific TTL or default TTL
393
410
  """
@@ -396,31 +413,31 @@ class CachingToolExecutor:
396
413
  # ------------------------------ API ------------------------------- #
397
414
  async def execute(
398
415
  self,
399
- calls: List[ToolCall],
416
+ calls: list[ToolCall],
400
417
  *,
401
- timeout: Optional[float] = None,
418
+ timeout: float | None = None,
402
419
  use_cache: bool = True,
403
- ) -> List[ToolResult]:
420
+ ) -> list[ToolResult]:
404
421
  """
405
422
  Execute tool calls with caching.
406
-
423
+
407
424
  Args:
408
425
  calls: List of tool calls to execute
409
426
  timeout: Optional timeout for execution
410
427
  use_cache: Whether to use cached results
411
-
428
+
412
429
  Returns:
413
430
  List of tool results in the same order as calls
414
431
  """
415
432
  # Handle empty calls
416
433
  if not calls:
417
434
  return []
418
-
435
+
419
436
  # ------------------------------------------------------------------
420
437
  # 1. Split calls into cached / uncached buckets
421
438
  # ------------------------------------------------------------------
422
- cached_hits: List[Tuple[int, ToolResult]] = []
423
- uncached: List[Tuple[int, ToolCall]] = []
439
+ cached_hits: list[tuple[int, ToolResult]] = []
440
+ uncached: list[tuple[int, ToolCall]] = []
424
441
 
425
442
  if use_cache:
426
443
  for idx, call in enumerate(calls):
@@ -428,10 +445,19 @@ class CachingToolExecutor:
428
445
  logger.debug(f"Tool {call.tool} is not cacheable, executing directly")
429
446
  uncached.append((idx, call))
430
447
  continue
431
-
432
- h = self._hash_arguments(call.arguments)
433
- cached_val = await self.cache.get(call.tool, h)
434
-
448
+
449
+ # Use idempotency_key if available, otherwise hash arguments
450
+ cache_key = call.idempotency_key or self._hash_arguments(call.arguments)
451
+
452
+ # Trace cache lookup operation
453
+ with trace_cache_operation("lookup", call.tool):
454
+ cached_val = await self.cache.get(call.tool, cache_key)
455
+
456
+ # Record metrics
457
+ metrics = get_metrics()
458
+ if metrics:
459
+ metrics.record_cache_operation(call.tool, "lookup", hit=(cached_val is not None))
460
+
435
461
  if cached_val is None:
436
462
  # Cache miss
437
463
  logger.debug(f"Cache miss for {call.tool}")
@@ -439,7 +465,7 @@ class CachingToolExecutor:
439
465
  else:
440
466
  # Cache hit
441
467
  logger.debug(f"Cache hit for {call.tool}")
442
- now = datetime.now(timezone.utc)
468
+ now = datetime.now(UTC)
443
469
  cached_hits.append(
444
470
  (
445
471
  idx,
@@ -473,37 +499,41 @@ class CachingToolExecutor:
473
499
  executor_kwargs = {"timeout": timeout}
474
500
  if hasattr(self.executor, "use_cache"):
475
501
  executor_kwargs["use_cache"] = False
476
-
477
- uncached_results = await self.executor.execute(
478
- [call for _, call in uncached], **executor_kwargs
479
- )
502
+
503
+ uncached_results = await self.executor.execute([call for _, call in uncached], **executor_kwargs)
480
504
 
481
505
  # ------------------------------------------------------------------
482
506
  # 3. Insert fresh results into cache
483
507
  # ------------------------------------------------------------------
484
508
  if use_cache:
485
509
  cache_tasks = []
486
- for (idx, call), result in zip(uncached, uncached_results):
510
+ metrics = get_metrics()
511
+
512
+ for (_idx, call), result in zip(uncached, uncached_results, strict=False):
487
513
  if result.error is None and self._is_cacheable(call.tool):
488
514
  ttl = self._ttl_for(call.tool)
489
515
  logger.debug(f"Caching result for {call.tool} with TTL={ttl}s")
490
-
491
- # Create task but don't await yet (for concurrent caching)
492
- task = self.cache.set(
493
- call.tool,
494
- self._hash_arguments(call.arguments),
495
- result.result,
496
- ttl=ttl,
497
- )
498
- cache_tasks.append(task)
499
-
516
+
517
+ # Use idempotency_key if available, otherwise hash arguments
518
+ cache_key = call.idempotency_key or self._hash_arguments(call.arguments)
519
+
520
+ # Trace and record cache set operation
521
+ # Bind loop variables to avoid B023 error
522
+ async def cache_with_trace(tool=call.tool, key=cache_key, value=result.result, ttl_val=ttl):
523
+ with trace_cache_operation("set", tool, attributes={"ttl": ttl_val}):
524
+ await self.cache.set(tool, key, value, ttl=ttl_val)
525
+ if metrics:
526
+ metrics.record_cache_operation(tool, "set")
527
+
528
+ cache_tasks.append(cache_with_trace())
529
+
500
530
  # Flag as non-cached so callers can tell
501
531
  if hasattr(result, "cached"):
502
532
  result.cached = False
503
533
  else:
504
534
  # For older ToolResult objects that might not have cached attribute
505
- setattr(result, "cached", False)
506
-
535
+ result.cached = False
536
+
507
537
  # Wait for all cache operations to complete
508
538
  if cache_tasks:
509
539
  await asyncio.gather(*cache_tasks)
@@ -511,10 +541,10 @@ class CachingToolExecutor:
511
541
  # ------------------------------------------------------------------
512
542
  # 4. Merge cached-hits + fresh results in original order
513
543
  # ------------------------------------------------------------------
514
- merged: List[Optional[ToolResult]] = [None] * len(calls)
544
+ merged: list[ToolResult | None] = [None] * len(calls)
515
545
  for idx, hit in cached_hits:
516
546
  merged[idx] = hit
517
- for (idx, _), fresh in zip(uncached, uncached_results):
547
+ for (idx, _), fresh in zip(uncached, uncached_results, strict=False):
518
548
  merged[idx] = fresh
519
549
 
520
550
  # If calls was empty, merged remains []
@@ -524,22 +554,23 @@ class CachingToolExecutor:
524
554
  # --------------------------------------------------------------------------- #
525
555
  # Convenience decorators
526
556
  # --------------------------------------------------------------------------- #
527
- def cacheable(ttl: Optional[int] = None):
557
+ def cacheable(ttl: int | None = None):
528
558
  """
529
559
  Decorator to mark a tool class as cacheable.
530
-
560
+
531
561
  Example:
532
562
  @cacheable(ttl=600) # Cache for 10 minutes
533
563
  class WeatherTool:
534
564
  async def execute(self, location: str) -> Dict[str, Any]:
535
565
  # Implementation
536
-
566
+
537
567
  Args:
538
568
  ttl: Optional custom time-to-live in seconds
539
-
569
+
540
570
  Returns:
541
571
  Decorated class with caching metadata
542
572
  """
573
+
543
574
  def decorator(cls):
544
575
  cls._cacheable = True # Runtime flag picked up by higher-level code
545
576
  if ttl is not None:
@@ -549,28 +580,31 @@ def cacheable(ttl: Optional[int] = None):
549
580
  return decorator
550
581
 
551
582
 
552
- def invalidate_cache(tool: str, arguments: Optional[Dict[str, Any]] = None):
583
+ def invalidate_cache(tool: str, arguments: dict[str, Any] | None = None):
553
584
  """
554
585
  Create an async function that invalidates specific cache entries.
555
-
586
+
556
587
  Example:
557
588
  invalidator = invalidate_cache("weather", {"location": "London"})
558
589
  await invalidator(cache) # Call with a cache instance
559
-
590
+
560
591
  Args:
561
592
  tool: Tool name
562
593
  arguments: Optional arguments dict. If None, all entries for the tool are invalidated.
563
-
594
+
564
595
  Returns:
565
596
  Async function that takes a cache instance and invalidates entries
566
597
  """
598
+
567
599
  async def _invalidate(cache: CacheInterface):
568
600
  if arguments is not None:
569
- h = hashlib.md5(json.dumps(arguments, sort_keys=True, default=str).encode()).hexdigest()
601
+ h = hashlib.md5(
602
+ json.dumps(arguments, sort_keys=True, default=str).encode(), usedforsecurity=False
603
+ ).hexdigest() # nosec B324
570
604
  await cache.invalidate(tool, h)
571
605
  logger.debug(f"Invalidated cache entry for {tool} with specific arguments")
572
606
  else:
573
607
  await cache.invalidate(tool)
574
608
  logger.debug(f"Invalidated all cache entries for {tool}")
575
609
 
576
- return _invalidate
610
+ return _invalidate