cacheado 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cache.py +678 -0
- cache_policies/__init__.py +0 -0
- cache_policies/cache_policy_manager.py +187 -0
- cache_scopes/__init__.py +0 -0
- cache_scopes/scope_config.py +228 -0
- cache_types.py +6 -0
- cacheado-1.0.1.dist-info/METADATA +553 -0
- cacheado-1.0.1.dist-info/RECORD +21 -0
- cacheado-1.0.1.dist-info/WHEEL +5 -0
- cacheado-1.0.1.dist-info/licenses/LICENSE +21 -0
- cacheado-1.0.1.dist-info/top_level.txt +7 -0
- eviction_policies/__init__.py +0 -0
- eviction_policies/lre_eviction.py +130 -0
- protocols/__init__.py +0 -0
- protocols/cache.py +183 -0
- protocols/cache_policy_manager_protocol.py +82 -0
- protocols/eviction_policy.py +70 -0
- protocols/scope.py +85 -0
- protocols/storage_provider.py +67 -0
- storages/__init__.py +0 -0
- storages/in_memory.py +109 -0
cache.py
ADDED
|
@@ -0,0 +1,678 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
import pickle
|
|
4
|
+
import threading
|
|
5
|
+
import time
|
|
6
|
+
from collections import defaultdict
|
|
7
|
+
from functools import wraps
|
|
8
|
+
from typing import Any, Callable, DefaultDict, Dict, List, Optional, Tuple, Union
|
|
9
|
+
|
|
10
|
+
from typing_extensions import ParamSpec, TypeVar
|
|
11
|
+
|
|
12
|
+
from cache_scopes.scope_config import ScopeConfig
|
|
13
|
+
from cache_types import _CacheKey, _CacheScope, _CacheValue
|
|
14
|
+
from protocols.cache_policy_manager_protocol import ICachePolicyManager
|
|
15
|
+
from protocols.storage_provider import IStorageProvider
|
|
16
|
+
|
|
17
|
+
P = ParamSpec("P")
|
|
18
|
+
T = TypeVar("T")
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class Cache:
|
|
22
|
+
"""
|
|
23
|
+
Cache implementation using Dependency Injection instead of Singleton pattern.
|
|
24
|
+
|
|
25
|
+
This class is the central orchestrator. It manages:
|
|
26
|
+
- Tenancy (scoping)
|
|
27
|
+
- Stampede Protection (calculation locks)
|
|
28
|
+
- Statistics (hits/misses)
|
|
29
|
+
|
|
30
|
+
It delegates storage to an injected IStorageProvider and
|
|
31
|
+
eviction/cleanup to an injected IEvictionPolicy via the
|
|
32
|
+
CachePolicyManager.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
__slots__ = (
|
|
36
|
+
"_storage",
|
|
37
|
+
"_policy_manager",
|
|
38
|
+
"_scope_config",
|
|
39
|
+
"_calculation_locks",
|
|
40
|
+
"_hits",
|
|
41
|
+
"_misses",
|
|
42
|
+
"_evictions",
|
|
43
|
+
"_instance_lock",
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
def __init__(self) -> None:
|
|
47
|
+
"""Initializes the cache orchestrator's state."""
|
|
48
|
+
self._storage: Optional[IStorageProvider] = None
|
|
49
|
+
self._policy_manager: Optional[ICachePolicyManager] = None
|
|
50
|
+
self._scope_config: Optional[ScopeConfig] = None
|
|
51
|
+
self._calculation_locks: DefaultDict[_CacheKey, threading.Lock] = defaultdict(threading.Lock)
|
|
52
|
+
self._hits: int = 0
|
|
53
|
+
self._misses: int = 0
|
|
54
|
+
self._evictions: int = 0
|
|
55
|
+
self._instance_lock: threading.Lock = threading.Lock()
|
|
56
|
+
|
|
57
|
+
def configure(self, backend: IStorageProvider, policy_manager: ICachePolicyManager, scope_config: ScopeConfig) -> None:
|
|
58
|
+
"""
|
|
59
|
+
Configures and starts the cache. Must be called once.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
backend (IStorageProvider): The storage backend (e.g., InMemoryStorageProvider).
|
|
63
|
+
policy_manager (ICachePolicyManager): Policy manager (required).
|
|
64
|
+
scope_config (ScopeConfig): Scope configuration (defaults to DEFAULT_SCOPE_CONFIG).
|
|
65
|
+
"""
|
|
66
|
+
if self._policy_manager is None:
|
|
67
|
+
with self._instance_lock:
|
|
68
|
+
if self._policy_manager is None:
|
|
69
|
+
self._storage = backend
|
|
70
|
+
self._scope_config = scope_config
|
|
71
|
+
self._policy_manager = policy_manager
|
|
72
|
+
self._policy_manager.start_background_cleanup()
|
|
73
|
+
else:
|
|
74
|
+
logging.warning("Cache has already been configured.")
|
|
75
|
+
else:
|
|
76
|
+
logging.warning("Cache has already been configured.")
|
|
77
|
+
|
|
78
|
+
def _get_all_keys_from_storage(self) -> List[_CacheKey]:
|
|
79
|
+
"""
|
|
80
|
+
(Hook) Returns all keys from the injected storage provider.
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
List[_CacheKey]: A copy of the current cache keys.
|
|
84
|
+
"""
|
|
85
|
+
if self._storage:
|
|
86
|
+
return self._storage.get_all_keys()
|
|
87
|
+
return []
|
|
88
|
+
|
|
89
|
+
def _get_value_no_lock_from_storage(self, key: _CacheKey) -> Optional[_CacheValue]:
|
|
90
|
+
"""
|
|
91
|
+
(Hook) Performs a non-locking read from storage.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
key (_CacheKey): The internal key to look up.
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
Optional[_CacheValue]: The stored tuple (value, expiry) or None.
|
|
98
|
+
"""
|
|
99
|
+
if self._storage:
|
|
100
|
+
return self._storage.get_value_no_lock(key)
|
|
101
|
+
return None
|
|
102
|
+
|
|
103
|
+
def _internal_get(self, key: _CacheKey, namespace: str) -> Optional[Any]:
|
|
104
|
+
"""
|
|
105
|
+
Orchestrates getting an item. Delegates storage, checks expiry, notifies policy.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
key (_CacheKey): The internal key to get.
|
|
109
|
+
namespace (str): The namespace of the key (for policy tracking).
|
|
110
|
+
|
|
111
|
+
Returns:
|
|
112
|
+
Optional[Any]: The cached value or None if not found/expired.
|
|
113
|
+
"""
|
|
114
|
+
if not self._storage or not self._policy_manager:
|
|
115
|
+
logging.error("Cache used before 'configure()' was called.")
|
|
116
|
+
return None
|
|
117
|
+
|
|
118
|
+
value_tuple = self._storage.get(key)
|
|
119
|
+
|
|
120
|
+
if value_tuple is None:
|
|
121
|
+
self._misses += 1
|
|
122
|
+
return None
|
|
123
|
+
|
|
124
|
+
value, expiry = value_tuple
|
|
125
|
+
current_time = time.monotonic()
|
|
126
|
+
|
|
127
|
+
if current_time > expiry:
|
|
128
|
+
self._internal_evict(key, namespace, notify_policy=True)
|
|
129
|
+
self._misses += 1
|
|
130
|
+
return None
|
|
131
|
+
|
|
132
|
+
self._hits += 1
|
|
133
|
+
self._policy_manager.notify_get(key, namespace)
|
|
134
|
+
return value
|
|
135
|
+
|
|
136
|
+
def _internal_set(
|
|
137
|
+
self, key: _CacheKey, value: Any, ttl_seconds: Union[int, float], namespace: str, max_items: Optional[int]
|
|
138
|
+
) -> None:
|
|
139
|
+
"""
|
|
140
|
+
Orchestrates setting an item.
|
|
141
|
+
Delegates storage, then notifies policy to check limits.
|
|
142
|
+
|
|
143
|
+
Args:
|
|
144
|
+
key (_CacheKey): The internal key to set.
|
|
145
|
+
value (Any): The value to store.
|
|
146
|
+
ttl_seconds (Union[int, float]): The time-to-live in seconds.
|
|
147
|
+
namespace (str): The namespace of the key.
|
|
148
|
+
max_items (Optional[int]): The max_items limit for this namespace.
|
|
149
|
+
"""
|
|
150
|
+
if not self._storage or not self._policy_manager:
|
|
151
|
+
logging.error("Cache used before 'configure()' was called.")
|
|
152
|
+
return
|
|
153
|
+
|
|
154
|
+
if ttl_seconds <= 0:
|
|
155
|
+
return
|
|
156
|
+
|
|
157
|
+
expiry = time.monotonic() + ttl_seconds
|
|
158
|
+
self._storage.set(key, (value, expiry))
|
|
159
|
+
|
|
160
|
+
key_to_evict = self._policy_manager.notify_set(key, namespace, max_items)
|
|
161
|
+
if key_to_evict:
|
|
162
|
+
evicted_ns = key_to_evict[1]
|
|
163
|
+
self._internal_evict(key_to_evict, evicted_ns, notify_policy=True)
|
|
164
|
+
|
|
165
|
+
def _internal_evict(self, key: _CacheKey, namespace: str, notify_policy: bool = True) -> None:
|
|
166
|
+
"""
|
|
167
|
+
Orchestrates evicting an item.
|
|
168
|
+
Delegates to storage, cleans up calculation locks, notifies policy.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
key (_CacheKey): The internal key to evict.
|
|
172
|
+
namespace (str): The namespace of the key.
|
|
173
|
+
notify_policy (bool): Whether to notify the policy manager.
|
|
174
|
+
"""
|
|
175
|
+
if not self._storage or not self._policy_manager:
|
|
176
|
+
logging.error("Cache used before 'configure()' was called.")
|
|
177
|
+
return
|
|
178
|
+
|
|
179
|
+
self._storage.evict(key)
|
|
180
|
+
self._evictions += 1
|
|
181
|
+
|
|
182
|
+
if key in self._calculation_locks:
|
|
183
|
+
del self._calculation_locks[key]
|
|
184
|
+
|
|
185
|
+
if notify_policy:
|
|
186
|
+
self._policy_manager.notify_evict(key, namespace)
|
|
187
|
+
|
|
188
|
+
def _make_args_key(self, *args: Any, **kwargs: Any) -> Tuple[Any, ...]:
|
|
189
|
+
"""
|
|
190
|
+
Creates a hashable key from function arguments using pickle.
|
|
191
|
+
|
|
192
|
+
This method serializes all arguments, including complex objects
|
|
193
|
+
like Pydantic models, dictionaries, and lists, into a stable
|
|
194
|
+
byte representation, which is then wrapped in a tuple to conform
|
|
195
|
+
to the _CacheKey structure.
|
|
196
|
+
|
|
197
|
+
Args:
|
|
198
|
+
*args: Positional arguments.
|
|
199
|
+
**kwargs: Keyword arguments.
|
|
200
|
+
|
|
201
|
+
Returns:
|
|
202
|
+
Tuple[Any, ...]: A hashable tuple containing the serialized arguments.
|
|
203
|
+
|
|
204
|
+
Raises:
|
|
205
|
+
TypeError: If the arguments cannot be serialized by pickle,
|
|
206
|
+
which is caught by the cache wrappers to skip caching.
|
|
207
|
+
"""
|
|
208
|
+
try:
|
|
209
|
+
key_representation = (args, tuple(sorted(kwargs.items())))
|
|
210
|
+
serialized_key = pickle.dumps(key_representation, protocol=pickle.HIGHEST_PROTOCOL)
|
|
211
|
+
except (pickle.PicklingError, TypeError) as e:
|
|
212
|
+
logging.warning(f"Failed to serialize arguments for caching. Object may be unpickleable: {e}")
|
|
213
|
+
raise TypeError(f"Unhashable (unpickleable) arguments: {e}")
|
|
214
|
+
|
|
215
|
+
return (serialized_key,)
|
|
216
|
+
|
|
217
|
+
def _get_scope_prefix(self, scope: _CacheScope, scope_params: Optional[Dict[str, Any]] = None, **kwargs: Any) -> str:
|
|
218
|
+
"""
|
|
219
|
+
Gets the tenancy prefix based on the scope using the configured scope hierarchy.
|
|
220
|
+
|
|
221
|
+
Args:
|
|
222
|
+
scope (_CacheScope): The scope level name or tuple of scope path.
|
|
223
|
+
scope_params (Optional[Dict[str, Any]]): Parameters for scope construction.
|
|
224
|
+
**kwargs: Additional parameters (for backward compatibility).
|
|
225
|
+
|
|
226
|
+
Returns:
|
|
227
|
+
str: The scope prefix path.
|
|
228
|
+
"""
|
|
229
|
+
all_params = scope_params or {}
|
|
230
|
+
all_params.update(kwargs)
|
|
231
|
+
|
|
232
|
+
if not self._scope_config:
|
|
233
|
+
raise RuntimeError("Cache not configured")
|
|
234
|
+
|
|
235
|
+
if scope == "global":
|
|
236
|
+
return "global"
|
|
237
|
+
|
|
238
|
+
if isinstance(scope, str):
|
|
239
|
+
self._scope_config.validate_scope_params(scope, all_params)
|
|
240
|
+
return self._scope_config.build_scope_path(all_params)
|
|
241
|
+
elif isinstance(scope, tuple):
|
|
242
|
+
target_level = scope[-1]
|
|
243
|
+
self._scope_config.validate_scope_params(target_level, all_params)
|
|
244
|
+
return self._scope_config.build_scope_path(all_params)
|
|
245
|
+
else:
|
|
246
|
+
raise ValueError(f"Invalid scope type: {type(scope)}")
|
|
247
|
+
|
|
248
|
+
def _make_cache_key(
|
|
249
|
+
self, func_name: str, args_key: Tuple[Any, ...], scope: _CacheScope, func_kwargs: Dict[str, Any]
|
|
250
|
+
) -> _CacheKey:
|
|
251
|
+
"""
|
|
252
|
+
Creates the final composite key for decorated functions.
|
|
253
|
+
|
|
254
|
+
Args:
|
|
255
|
+
func_name (str): The name of the decorated function.
|
|
256
|
+
args_key (Tuple[Any, ...]): The hashable key from function args.
|
|
257
|
+
scope (_CacheScope): The scope for this cache entry.
|
|
258
|
+
func_kwargs (Dict[str, Any]): The kwargs passed to the function (to find scope params).
|
|
259
|
+
|
|
260
|
+
Returns:
|
|
261
|
+
_CacheKey: The final composite internal key.
|
|
262
|
+
"""
|
|
263
|
+
prefix = self._get_scope_prefix(scope, scope_params=func_kwargs)
|
|
264
|
+
return (prefix, func_name, args_key)
|
|
265
|
+
|
|
266
|
+
def _make_programmatic_key(
|
|
267
|
+
self, key: Any, scope: _CacheScope, scope_params: Optional[Dict[str, Any]] = None, **kwargs: Any
|
|
268
|
+
) -> _CacheKey:
|
|
269
|
+
"""
|
|
270
|
+
Creates the final composite key for programmatic access.
|
|
271
|
+
|
|
272
|
+
Args:
|
|
273
|
+
key (Any): The public key provided by the user.
|
|
274
|
+
scope (_CacheScope): The scope for this cache entry.
|
|
275
|
+
scope_params (Optional[Dict[str, Any]]): Parameters for scope construction.
|
|
276
|
+
**kwargs: Additional parameters (for backward compatibility).
|
|
277
|
+
|
|
278
|
+
Returns:
|
|
279
|
+
_CacheKey: The final composite internal key.
|
|
280
|
+
"""
|
|
281
|
+
all_params = scope_params or {}
|
|
282
|
+
all_params.update(kwargs)
|
|
283
|
+
prefix = self._get_scope_prefix(scope, scope_params=all_params)
|
|
284
|
+
namespace = "__programmatic__"
|
|
285
|
+
return (prefix, namespace, (key,))
|
|
286
|
+
|
|
287
|
+
def cache(
|
|
288
|
+
self, ttl_seconds: Union[int, float], scope: _CacheScope = "global", max_items: Optional[int] = None
|
|
289
|
+
) -> Callable[[Callable[P, T]], Callable[P, T]]:
|
|
290
|
+
"""
|
|
291
|
+
Decorator factory for caching function results with proper type preservation.
|
|
292
|
+
|
|
293
|
+
Args:
|
|
294
|
+
ttl_seconds (Union[int, float]): Time-to-live (in seconds) for cached items.
|
|
295
|
+
scope (_CacheScope): The cache scope ('global', 'organization', 'user').
|
|
296
|
+
If 'organization' or 'user', the decorated function MUST
|
|
297
|
+
accept `organization_id` or `user_id` as a kwarg.
|
|
298
|
+
max_items (Optional[int]): Max number of items to cache for this specific
|
|
299
|
+
function.
|
|
300
|
+
|
|
301
|
+
Returns:
|
|
302
|
+
Callable: A decorator function that preserves the original function signature.
|
|
303
|
+
"""
|
|
304
|
+
|
|
305
|
+
def _decorator(func: Callable[P, T]) -> Callable[P, T]:
|
|
306
|
+
namespace = func.__name__
|
|
307
|
+
|
|
308
|
+
wrapper = (
|
|
309
|
+
self._create_async_wrapper(func, ttl_seconds, scope, namespace, max_items) # type: ignore
|
|
310
|
+
if asyncio.iscoroutinefunction(func)
|
|
311
|
+
else self._create_sync_wrapper(func, ttl_seconds, scope, namespace, max_items) # type: ignore
|
|
312
|
+
)
|
|
313
|
+
|
|
314
|
+
return wraps(func)(wrapper) # type: ignore
|
|
315
|
+
|
|
316
|
+
return _decorator
|
|
317
|
+
|
|
318
|
+
def _create_sync_wrapper(
|
|
319
|
+
self,
|
|
320
|
+
func: Callable[P, T],
|
|
321
|
+
ttl_seconds: Union[int, float],
|
|
322
|
+
scope: _CacheScope,
|
|
323
|
+
namespace: str,
|
|
324
|
+
max_items: Optional[int],
|
|
325
|
+
) -> Callable[P, T]:
|
|
326
|
+
"""
|
|
327
|
+
Creates sync wrapper with stampede protection and proper type preservation.
|
|
328
|
+
|
|
329
|
+
Args:
|
|
330
|
+
func (Callable): The synchronous function to wrap.
|
|
331
|
+
ttl_seconds (Union[int, float]): The TTL for cache entries.
|
|
332
|
+
scope (_CacheScope): The scope for this function.
|
|
333
|
+
namespace (str): The namespace (function name) for policy tracking.
|
|
334
|
+
max_items (Optional[int]): The max_items limit for this namespace.
|
|
335
|
+
|
|
336
|
+
Returns:
|
|
337
|
+
Callable: The wrapped synchronous function with preserved signature.
|
|
338
|
+
"""
|
|
339
|
+
|
|
340
|
+
@wraps(func)
|
|
341
|
+
def _sync_wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
|
|
342
|
+
try:
|
|
343
|
+
args_key = self._make_args_key(*args, **kwargs)
|
|
344
|
+
key = self._make_cache_key(func.__name__, args_key, scope, kwargs)
|
|
345
|
+
except TypeError as e:
|
|
346
|
+
logging.warning(f"Unhashable arguments in {func.__name__}: {e}. Skipping cache.")
|
|
347
|
+
return func(*args, **kwargs)
|
|
348
|
+
except ValueError as e:
|
|
349
|
+
logging.warning(f"Cache scope error for {func.__name__}: {e}. Skipping cache.")
|
|
350
|
+
return func(*args, **kwargs)
|
|
351
|
+
|
|
352
|
+
cached_value = self._internal_get(key, namespace)
|
|
353
|
+
if cached_value is not None:
|
|
354
|
+
return cached_value # type: ignore
|
|
355
|
+
|
|
356
|
+
calc_lock = self._calculation_locks[key]
|
|
357
|
+
|
|
358
|
+
with calc_lock:
|
|
359
|
+
try:
|
|
360
|
+
cached_value = self._internal_get(key, namespace)
|
|
361
|
+
if cached_value is not None:
|
|
362
|
+
return cached_value # type: ignore
|
|
363
|
+
|
|
364
|
+
logging.info(f"Cache miss and calculation for key: {key}")
|
|
365
|
+
new_value = func(*args, **kwargs)
|
|
366
|
+
|
|
367
|
+
self._internal_set(key, new_value, ttl_seconds, namespace, max_items)
|
|
368
|
+
return new_value
|
|
369
|
+
except Exception as e:
|
|
370
|
+
logging.error(f"Error in cache wrapper for {func.__name__}: {e}")
|
|
371
|
+
return func(*args, **kwargs) # type: ignore
|
|
372
|
+
|
|
373
|
+
return _sync_wrapper
|
|
374
|
+
|
|
375
|
+
def _create_async_wrapper(
|
|
376
|
+
self,
|
|
377
|
+
func: Callable[P, T],
|
|
378
|
+
ttl_seconds: Union[int, float],
|
|
379
|
+
scope: _CacheScope,
|
|
380
|
+
namespace: str,
|
|
381
|
+
max_items: Optional[int],
|
|
382
|
+
) -> Callable[P, T]:
|
|
383
|
+
"""
|
|
384
|
+
Creates async wrapper with stampede protection and proper type preservation.
|
|
385
|
+
|
|
386
|
+
Args:
|
|
387
|
+
func (Callable): The asynchronous function to wrap.
|
|
388
|
+
ttl_seconds (Union[int, float]): The TTL for cache entries.
|
|
389
|
+
scope (_CacheScope): The scope for this function.
|
|
390
|
+
namespace (str): The namespace (function name) for policy tracking.
|
|
391
|
+
max_items (Optional[int]): The max_items limit for this namespace.
|
|
392
|
+
|
|
393
|
+
Returns:
|
|
394
|
+
Callable: The wrapped asynchronous function with preserved signature.
|
|
395
|
+
"""
|
|
396
|
+
|
|
397
|
+
@wraps(func)
|
|
398
|
+
async def _async_wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
|
|
399
|
+
try:
|
|
400
|
+
args_key = self._make_args_key(*args, **kwargs)
|
|
401
|
+
key = self._make_cache_key(func.__name__, args_key, scope, kwargs)
|
|
402
|
+
except TypeError as e:
|
|
403
|
+
logging.warning(f"Unhashable arguments in {func.__name__}: {e}. Skipping cache.")
|
|
404
|
+
return await func(*args, **kwargs) # type: ignore[misc,no-any-return]
|
|
405
|
+
except ValueError as e:
|
|
406
|
+
logging.warning(f"Cache scope error for {func.__name__}: {e}. Skipping cache.")
|
|
407
|
+
return await func(*args, **kwargs) # type: ignore[misc,no-any-return]
|
|
408
|
+
|
|
409
|
+
cached_value = await asyncio.to_thread(self._internal_get, key, namespace)
|
|
410
|
+
if cached_value is not None:
|
|
411
|
+
return cached_value # type: ignore
|
|
412
|
+
|
|
413
|
+
calc_lock = self._calculation_locks[key]
|
|
414
|
+
|
|
415
|
+
await asyncio.to_thread(calc_lock.acquire)
|
|
416
|
+
try:
|
|
417
|
+
try:
|
|
418
|
+
cached_value = await asyncio.to_thread(self._internal_get, key, namespace)
|
|
419
|
+
if cached_value is not None:
|
|
420
|
+
return cached_value # type: ignore
|
|
421
|
+
|
|
422
|
+
logging.info(f"Cache miss and calculation for key: {key}")
|
|
423
|
+
new_value = await func(*args, **kwargs) # type: ignore
|
|
424
|
+
|
|
425
|
+
await asyncio.to_thread(self._internal_set, key, new_value, ttl_seconds, namespace, max_items)
|
|
426
|
+
return new_value # type: ignore[no-any-return]
|
|
427
|
+
except Exception as e:
|
|
428
|
+
logging.error(f"Error in async cache wrapper for {func.__name__}: {e}")
|
|
429
|
+
return await func(*args, **kwargs) # type: ignore[misc,no-any-return]
|
|
430
|
+
finally:
|
|
431
|
+
calc_lock.release()
|
|
432
|
+
|
|
433
|
+
return _async_wrapper # type: ignore
|
|
434
|
+
|
|
435
|
+
def get(
|
|
436
|
+
self, key: Any, scope: _CacheScope = "global", scope_params: Optional[Dict[str, Any]] = None, **kwargs: Any
|
|
437
|
+
) -> Optional[Any]:
|
|
438
|
+
"""
|
|
439
|
+
Gets an item programmatically from the cache.
|
|
440
|
+
|
|
441
|
+
Args:
|
|
442
|
+
key (Any): The key to look up (must be hashable).
|
|
443
|
+
scope (_CacheScope): The scope level or path.
|
|
444
|
+
scope_params (Optional[Dict[str, Any]]): Parameters for scope construction.
|
|
445
|
+
**kwargs: Additional parameters (for backward compatibility).
|
|
446
|
+
|
|
447
|
+
Returns:
|
|
448
|
+
Optional[Any]: The cached value or None if not found or expired.
|
|
449
|
+
"""
|
|
450
|
+
try:
|
|
451
|
+
cache_key = self._make_programmatic_key(key, scope, scope_params=scope_params, **kwargs)
|
|
452
|
+
namespace = cache_key[1]
|
|
453
|
+
return self._internal_get(cache_key, namespace)
|
|
454
|
+
except Exception as e:
|
|
455
|
+
logging.error(f"Error in cache get operation: {e}")
|
|
456
|
+
return None
|
|
457
|
+
|
|
458
|
+
def set(
|
|
459
|
+
self,
|
|
460
|
+
key: Any,
|
|
461
|
+
value: Any,
|
|
462
|
+
ttl_seconds: Union[int, float],
|
|
463
|
+
scope: _CacheScope = "global",
|
|
464
|
+
scope_params: Optional[Dict[str, Any]] = None,
|
|
465
|
+
**kwargs: Any,
|
|
466
|
+
) -> None:
|
|
467
|
+
"""
|
|
468
|
+
Sets an item programmatically in the cache.
|
|
469
|
+
|
|
470
|
+
Args:
|
|
471
|
+
key (Any): The key (must be hashable).
|
|
472
|
+
value (Any): The value to store.
|
|
473
|
+
ttl_seconds (Union[int, float]): Time-to-live in seconds.
|
|
474
|
+
scope (_CacheScope): The scope level or path.
|
|
475
|
+
scope_params (Optional[Dict[str, Any]]): Parameters for scope construction.
|
|
476
|
+
**kwargs: Additional parameters (for backward compatibility).
|
|
477
|
+
"""
|
|
478
|
+
try:
|
|
479
|
+
cache_key = self._make_programmatic_key(key, scope, scope_params=scope_params, **kwargs)
|
|
480
|
+
namespace = cache_key[1]
|
|
481
|
+
self._internal_set(cache_key, value, ttl_seconds, namespace, max_items=None)
|
|
482
|
+
except Exception as e:
|
|
483
|
+
logging.error(f"Error in cache set operation: {e}")
|
|
484
|
+
raise
|
|
485
|
+
|
|
486
|
+
def evict(
|
|
487
|
+
self, key: Any, scope: _CacheScope = "global", scope_params: Optional[Dict[str, Any]] = None, **kwargs: Any
|
|
488
|
+
) -> None:
|
|
489
|
+
"""
|
|
490
|
+
Removes a specific item programmatically from the cache.
|
|
491
|
+
|
|
492
|
+
Args:
|
|
493
|
+
key (Any): The key to remove (must be hashable).
|
|
494
|
+
scope (_CacheScope): The scope level or path.
|
|
495
|
+
scope_params (Optional[Dict[str, Any]]): Parameters for scope construction.
|
|
496
|
+
**kwargs: Additional parameters (for backward compatibility).
|
|
497
|
+
"""
|
|
498
|
+
try:
|
|
499
|
+
cache_key = self._make_programmatic_key(key, scope, scope_params=scope_params, **kwargs)
|
|
500
|
+
namespace = cache_key[1]
|
|
501
|
+
self._internal_evict(cache_key, namespace, notify_policy=True)
|
|
502
|
+
except Exception as e:
|
|
503
|
+
logging.error(f"Error in cache evict operation: {e}")
|
|
504
|
+
|
|
505
|
+
def clear(self) -> None:
|
|
506
|
+
"""
|
|
507
|
+
Safely clears the entire cache (storage and policy).
|
|
508
|
+
"""
|
|
509
|
+
try:
|
|
510
|
+
with self._instance_lock:
|
|
511
|
+
if self._storage:
|
|
512
|
+
self._storage.clear()
|
|
513
|
+
|
|
514
|
+
if self._policy_manager:
|
|
515
|
+
self._policy_manager.notify_clear()
|
|
516
|
+
|
|
517
|
+
self._calculation_locks.clear()
|
|
518
|
+
self._hits = 0
|
|
519
|
+
self._misses = 0
|
|
520
|
+
self._evictions = 0
|
|
521
|
+
|
|
522
|
+
logging.warning("Cache has been cleared.")
|
|
523
|
+
except Exception as e:
|
|
524
|
+
logging.error(f"Error clearing cache: {e}")
|
|
525
|
+
raise
|
|
526
|
+
|
|
527
|
+
async def aget(
|
|
528
|
+
self, key: Any, scope: _CacheScope = "global", scope_params: Optional[Dict[str, Any]] = None, **kwargs: Any
|
|
529
|
+
) -> Optional[Any]:
|
|
530
|
+
"""
|
|
531
|
+
Asynchronously gets an item programmatically from the cache.
|
|
532
|
+
(Runs the synchronous 'get' in a separate thread).
|
|
533
|
+
|
|
534
|
+
Args:
|
|
535
|
+
key (Any): The key to look up (must be hashable).
|
|
536
|
+
scope (_CacheScope): The scope level or path.
|
|
537
|
+
scope_params (Optional[Dict[str, Any]]): Parameters for scope construction.
|
|
538
|
+
**kwargs: Additional parameters (for backward compatibility).
|
|
539
|
+
|
|
540
|
+
Returns:
|
|
541
|
+
Optional[Any]: The cached value or None.
|
|
542
|
+
"""
|
|
543
|
+
return await asyncio.to_thread(self.get, key, scope=scope, scope_params=scope_params, **kwargs)
|
|
544
|
+
|
|
545
|
+
async def aset(
|
|
546
|
+
self,
|
|
547
|
+
key: Any,
|
|
548
|
+
value: Any,
|
|
549
|
+
ttl_seconds: Union[int, float],
|
|
550
|
+
scope: _CacheScope = "global",
|
|
551
|
+
scope_params: Optional[Dict[str, Any]] = None,
|
|
552
|
+
**kwargs: Any,
|
|
553
|
+
) -> None:
|
|
554
|
+
"""
|
|
555
|
+
Asynchronously sets an item programmatically in the cache.
|
|
556
|
+
(Runs the synchronous 'set' in a separate thread).
|
|
557
|
+
|
|
558
|
+
Args:
|
|
559
|
+
key (Any): The key (must be hashable).
|
|
560
|
+
value (Any): The value to store.
|
|
561
|
+
ttl_seconds (Union[int, float]): Time-to-live in seconds.
|
|
562
|
+
scope (_CacheScope): The scope level or path.
|
|
563
|
+
scope_params (Optional[Dict[str, Any]]): Parameters for scope construction.
|
|
564
|
+
**kwargs: Additional parameters (for backward compatibility).
|
|
565
|
+
"""
|
|
566
|
+
await asyncio.to_thread(self.set, key, value, ttl_seconds, scope=scope, scope_params=scope_params, **kwargs)
|
|
567
|
+
|
|
568
|
+
async def aevict(
|
|
569
|
+
self, key: Any, scope: _CacheScope = "global", scope_params: Optional[Dict[str, Any]] = None, **kwargs: Any
|
|
570
|
+
) -> None:
|
|
571
|
+
"""
|
|
572
|
+
Asynchronously removes a specific item programmatically.
|
|
573
|
+
(Runs the synchronous 'evict' in a separate thread).
|
|
574
|
+
|
|
575
|
+
Args:
|
|
576
|
+
key (Any): The key to remove (must be hashable).
|
|
577
|
+
scope (_CacheScope): The scope level or path.
|
|
578
|
+
scope_params (Optional[Dict[str, Any]]): Parameters for scope construction.
|
|
579
|
+
**kwargs: Additional parameters (for backward compatibility).
|
|
580
|
+
"""
|
|
581
|
+
await asyncio.to_thread(self.evict, key, scope=scope, scope_params=scope_params, **kwargs)
|
|
582
|
+
|
|
583
|
+
async def aclear(self) -> None:
|
|
584
|
+
"""
|
|
585
|
+
Asynchronously clears the entire cache.
|
|
586
|
+
(Runs the synchronous 'clear' in a separate thread).
|
|
587
|
+
"""
|
|
588
|
+
await asyncio.to_thread(self.clear)
|
|
589
|
+
|
|
590
|
+
def stats(self) -> Dict[str, Any]:
|
|
591
|
+
"""
|
|
592
|
+
Returns a dictionary of cache observability statistics.
|
|
593
|
+
|
|
594
|
+
Returns:
|
|
595
|
+
Dict[str, Any]: A dict containing keys like 'hits', 'misses',
|
|
596
|
+
'evictions', 'current_size', etc.
|
|
597
|
+
"""
|
|
598
|
+
with self._instance_lock:
|
|
599
|
+
g_size = 0
|
|
600
|
+
ns_count = 0
|
|
601
|
+
if self._policy_manager:
|
|
602
|
+
g_size = self._policy_manager.get_global_size()
|
|
603
|
+
ns_count = self._policy_manager.get_namespace_count()
|
|
604
|
+
|
|
605
|
+
return {
|
|
606
|
+
"hits": self._hits,
|
|
607
|
+
"misses": self._misses,
|
|
608
|
+
"evictions": self._evictions,
|
|
609
|
+
"current_size": g_size,
|
|
610
|
+
"tracked_namespaces": ns_count,
|
|
611
|
+
"total_calc_locks": len(self._calculation_locks),
|
|
612
|
+
}
|
|
613
|
+
|
|
614
|
+
def evict_by_scope(self, scope: _CacheScope, scope_params: Optional[Dict[str, Any]] = None, **kwargs: Any) -> int:
|
|
615
|
+
"""
|
|
616
|
+
Granularly evicts all items belonging to a specific scope.
|
|
617
|
+
|
|
618
|
+
Example:
|
|
619
|
+
evict_by_scope("organization", scope_params={"organization_id": "org_123"})
|
|
620
|
+
|
|
621
|
+
Args:
|
|
622
|
+
scope (_CacheScope): The scope to target.
|
|
623
|
+
scope_params (Optional[Dict[str, Any]]): Parameters for scope construction.
|
|
624
|
+
**kwargs: Additional parameters (for backward compatibility).
|
|
625
|
+
|
|
626
|
+
Returns:
|
|
627
|
+
int: The number of items successfully evicted.
|
|
628
|
+
"""
|
|
629
|
+
if not self._storage or not self._policy_manager or not self._scope_config:
|
|
630
|
+
logging.error("Cache used before 'configure()' was called.")
|
|
631
|
+
return 0
|
|
632
|
+
|
|
633
|
+
try:
|
|
634
|
+
all_params = scope_params or {}
|
|
635
|
+
all_params.update(kwargs)
|
|
636
|
+
prefix = self._get_scope_prefix(scope, scope_params=all_params)
|
|
637
|
+
except ValueError as e:
|
|
638
|
+
logging.error(f"Failed to evict by scope: {e}")
|
|
639
|
+
return 0
|
|
640
|
+
|
|
641
|
+
all_keys = self._storage.get_all_keys()
|
|
642
|
+
evicted_count = 0
|
|
643
|
+
|
|
644
|
+
for key in all_keys:
|
|
645
|
+
if key[0] == prefix or (self._scope_config and self._scope_config.is_descendant_of(key[0], prefix)):
|
|
646
|
+
namespace = key[1]
|
|
647
|
+
self._internal_evict(key, namespace, notify_policy=True)
|
|
648
|
+
evicted_count += 1
|
|
649
|
+
|
|
650
|
+
if evicted_count > 0:
|
|
651
|
+
logging.warning(f"Evicted {evicted_count} items for scope: {prefix}")
|
|
652
|
+
|
|
653
|
+
return evicted_count
|
|
654
|
+
|
|
655
|
+
|
|
656
|
+
def create_cache(
|
|
657
|
+
backend: IStorageProvider, policy_manager: ICachePolicyManager, scope_config: Optional[ScopeConfig] = None
|
|
658
|
+
) -> Cache:
|
|
659
|
+
"""
|
|
660
|
+
Factory function to create and configure a Cache instance.
|
|
661
|
+
|
|
662
|
+
This replaces the Singleton pattern with explicit dependency injection.
|
|
663
|
+
|
|
664
|
+
Args:
|
|
665
|
+
backend: Storage backend (required)
|
|
666
|
+
policy_manager: Policy manager (required)
|
|
667
|
+
scope_config: Scope configuration
|
|
668
|
+
|
|
669
|
+
Returns:
|
|
670
|
+
Configured Cache instance
|
|
671
|
+
"""
|
|
672
|
+
cache = Cache()
|
|
673
|
+
|
|
674
|
+
if hasattr(policy_manager, "_cache") and policy_manager._cache is None:
|
|
675
|
+
policy_manager._cache = cache
|
|
676
|
+
|
|
677
|
+
cache.configure(backend=backend, policy_manager=policy_manager, scope_config=scope_config or ScopeConfig())
|
|
678
|
+
return cache
|
|
File without changes
|