cachu 0.2.3__py3-none-any.whl → 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cachu/__init__.py +8 -12
- cachu/backends/__init__.py +65 -5
- cachu/backends/memory.py +159 -57
- cachu/backends/redis.py +160 -28
- cachu/backends/sqlite.py +326 -41
- cachu/config.py +6 -6
- cachu/decorator.py +354 -87
- cachu/keys.py +8 -0
- cachu/mutex.py +247 -0
- cachu/operations.py +171 -23
- {cachu-0.2.3.dist-info → cachu-0.2.5.dist-info}/METADATA +7 -10
- cachu-0.2.5.dist-info/RECORD +15 -0
- cachu/async_decorator.py +0 -262
- cachu/async_operations.py +0 -178
- cachu/backends/async_base.py +0 -50
- cachu/backends/async_memory.py +0 -111
- cachu/backends/async_redis.py +0 -141
- cachu/backends/async_sqlite.py +0 -256
- cachu/backends/file.py +0 -10
- cachu-0.2.3.dist-info/RECORD +0 -21
- {cachu-0.2.3.dist-info → cachu-0.2.5.dist-info}/WHEEL +0 -0
- {cachu-0.2.3.dist-info → cachu-0.2.5.dist-info}/top_level.txt +0 -0
cachu/decorator.py
CHANGED
|
@@ -1,42 +1,54 @@
|
|
|
1
|
-
"""Cache decorator implementation.
|
|
1
|
+
"""Cache decorator implementation with unified sync and async support.
|
|
2
2
|
"""
|
|
3
|
+
import asyncio
|
|
3
4
|
import logging
|
|
4
5
|
import os
|
|
5
6
|
import threading
|
|
6
7
|
import time
|
|
7
|
-
from collections.abc import Callable
|
|
8
|
+
from collections.abc import Awaitable, Callable
|
|
8
9
|
from functools import wraps
|
|
9
10
|
from typing import Any
|
|
10
11
|
|
|
11
12
|
from .backends import NO_VALUE, Backend
|
|
12
|
-
from .backends.file import FileBackend
|
|
13
13
|
from .backends.memory import MemoryBackend
|
|
14
|
+
from .backends.sqlite import SqliteBackend
|
|
14
15
|
from .config import _get_caller_package, get_config, is_disabled
|
|
15
16
|
from .keys import make_key_generator, mangle_key
|
|
16
17
|
from .types import CacheEntry, CacheInfo, CacheMeta
|
|
17
18
|
|
|
18
19
|
logger = logging.getLogger(__name__)
|
|
19
20
|
|
|
20
|
-
_backends: dict[tuple[str | None, str, int], Backend] = {}
|
|
21
|
-
_backends_lock = threading.Lock()
|
|
22
21
|
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
def _get_backend(package: str | None, backend_type: str, ttl: int) -> Backend:
|
|
28
|
-
"""Get or create a backend instance.
|
|
22
|
+
class CacheManager:
|
|
23
|
+
"""Unified manager for cache backends and statistics.
|
|
29
24
|
"""
|
|
30
|
-
key = (package, backend_type, ttl)
|
|
31
|
-
|
|
32
|
-
with _backends_lock:
|
|
33
|
-
if key in _backends:
|
|
34
|
-
return _backends[key]
|
|
35
25
|
|
|
26
|
+
def __init__(self) -> None:
|
|
27
|
+
self.backends: dict[tuple[str | None, str, int], Backend] = {}
|
|
28
|
+
self.stats: dict[int, tuple[int, int]] = {}
|
|
29
|
+
self._sync_lock = threading.Lock()
|
|
30
|
+
self._async_lock: asyncio.Lock | None = None
|
|
31
|
+
self._stats_lock = threading.Lock()
|
|
32
|
+
|
|
33
|
+
def _get_async_lock(self) -> asyncio.Lock:
|
|
34
|
+
"""Lazy-create async lock (must be called from async context).
|
|
35
|
+
"""
|
|
36
|
+
if self._async_lock is None:
|
|
37
|
+
self._async_lock = asyncio.Lock()
|
|
38
|
+
return self._async_lock
|
|
39
|
+
|
|
40
|
+
def _create_backend(
|
|
41
|
+
self,
|
|
42
|
+
package: str | None,
|
|
43
|
+
backend_type: str,
|
|
44
|
+
ttl: int,
|
|
45
|
+
) -> Backend:
|
|
46
|
+
"""Create a backend instance (called with lock held).
|
|
47
|
+
"""
|
|
36
48
|
cfg = get_config(package)
|
|
37
49
|
|
|
38
50
|
if backend_type == 'memory':
|
|
39
|
-
backend = MemoryBackend()
|
|
51
|
+
backend: Backend = MemoryBackend()
|
|
40
52
|
elif backend_type == 'file':
|
|
41
53
|
if ttl < 60:
|
|
42
54
|
filename = f'cache{ttl}sec.db'
|
|
@@ -49,19 +61,100 @@ def _get_backend(package: str | None, backend_type: str, ttl: int) -> Backend:
|
|
|
49
61
|
filename = f'{package}_{filename}'
|
|
50
62
|
|
|
51
63
|
filepath = os.path.join(cfg.file_dir, filename)
|
|
52
|
-
backend =
|
|
64
|
+
backend = SqliteBackend(filepath)
|
|
53
65
|
elif backend_type == 'redis':
|
|
54
66
|
from .backends.redis import RedisBackend
|
|
55
|
-
backend = RedisBackend(cfg.redis_url, cfg.
|
|
67
|
+
backend = RedisBackend(cfg.redis_url, cfg.lock_timeout)
|
|
56
68
|
else:
|
|
57
69
|
raise ValueError(f'Unknown backend type: {backend_type}')
|
|
58
70
|
|
|
59
|
-
_backends[key] = backend
|
|
60
71
|
logger.debug(f"Created {backend_type} backend for package '{package}', {ttl}s TTL")
|
|
61
72
|
return backend
|
|
62
73
|
|
|
74
|
+
def get_backend(self, package: str | None, backend_type: str, ttl: int) -> Backend:
|
|
75
|
+
"""Get or create a backend instance (sync).
|
|
76
|
+
"""
|
|
77
|
+
key = (package, backend_type, ttl)
|
|
78
|
+
with self._sync_lock:
|
|
79
|
+
if key not in self.backends:
|
|
80
|
+
self.backends[key] = self._create_backend(package, backend_type, ttl)
|
|
81
|
+
return self.backends[key]
|
|
82
|
+
|
|
83
|
+
async def aget_backend(
|
|
84
|
+
self,
|
|
85
|
+
package: str | None,
|
|
86
|
+
backend_type: str,
|
|
87
|
+
ttl: int,
|
|
88
|
+
) -> Backend:
|
|
89
|
+
"""Get or create a backend instance (async).
|
|
90
|
+
"""
|
|
91
|
+
key = (package, backend_type, ttl)
|
|
92
|
+
async with self._get_async_lock():
|
|
93
|
+
if key not in self.backends:
|
|
94
|
+
self.backends[key] = self._create_backend(package, backend_type, ttl)
|
|
95
|
+
return self.backends[key]
|
|
96
|
+
|
|
97
|
+
def record_hit(self, fn: Callable[..., Any]) -> None:
|
|
98
|
+
"""Record a cache hit for the function.
|
|
99
|
+
"""
|
|
100
|
+
fn_id = id(fn)
|
|
101
|
+
with self._stats_lock:
|
|
102
|
+
hits, misses = self.stats.get(fn_id, (0, 0))
|
|
103
|
+
self.stats[fn_id] = (hits + 1, misses)
|
|
104
|
+
|
|
105
|
+
def record_miss(self, fn: Callable[..., Any]) -> None:
|
|
106
|
+
"""Record a cache miss for the function.
|
|
107
|
+
"""
|
|
108
|
+
fn_id = id(fn)
|
|
109
|
+
with self._stats_lock:
|
|
110
|
+
hits, misses = self.stats.get(fn_id, (0, 0))
|
|
111
|
+
self.stats[fn_id] = (hits, misses + 1)
|
|
112
|
+
|
|
113
|
+
def get_stats(self, fn: Callable[..., Any]) -> tuple[int, int]:
|
|
114
|
+
"""Get (hits, misses) for a function.
|
|
115
|
+
"""
|
|
116
|
+
fn_id = id(fn)
|
|
117
|
+
with self._stats_lock:
|
|
118
|
+
return self.stats.get(fn_id, (0, 0))
|
|
119
|
+
|
|
120
|
+
def clear(self, package: str | None = None) -> None:
|
|
121
|
+
"""Clear backend instances (sync).
|
|
122
|
+
"""
|
|
123
|
+
with self._sync_lock:
|
|
124
|
+
if package is None:
|
|
125
|
+
for backend in self.backends.values():
|
|
126
|
+
backend.close()
|
|
127
|
+
self.backends.clear()
|
|
128
|
+
else:
|
|
129
|
+
keys_to_delete = [k for k in self.backends if k[0] == package]
|
|
130
|
+
for key in keys_to_delete:
|
|
131
|
+
self.backends[key].close()
|
|
132
|
+
del self.backends[key]
|
|
133
|
+
|
|
134
|
+
async def aclear(self, package: str | None = None) -> None:
|
|
135
|
+
"""Clear backend instances (async).
|
|
136
|
+
"""
|
|
137
|
+
async with self._get_async_lock():
|
|
138
|
+
if package is None:
|
|
139
|
+
for backend in self.backends.values():
|
|
140
|
+
await backend.aclose()
|
|
141
|
+
self.backends.clear()
|
|
142
|
+
else:
|
|
143
|
+
keys_to_delete = [k for k in self.backends if k[0] == package]
|
|
144
|
+
for key in keys_to_delete:
|
|
145
|
+
await self.backends[key].aclose()
|
|
146
|
+
del self.backends[key]
|
|
147
|
+
|
|
63
148
|
|
|
64
|
-
|
|
149
|
+
manager = CacheManager()
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def get_backend(
|
|
153
|
+
backend_type: str | None = None,
|
|
154
|
+
package: str | None = None,
|
|
155
|
+
*,
|
|
156
|
+
ttl: int,
|
|
157
|
+
) -> Backend:
|
|
65
158
|
"""Get a backend instance.
|
|
66
159
|
|
|
67
160
|
Args:
|
|
@@ -76,7 +169,86 @@ def get_backend(backend_type: str | None = None, package: str | None = None, *,
|
|
|
76
169
|
cfg = get_config(package)
|
|
77
170
|
backend_type = cfg.backend
|
|
78
171
|
|
|
79
|
-
return
|
|
172
|
+
return manager.get_backend(package, backend_type, ttl)
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
async def aget_backend(
|
|
176
|
+
backend_type: str | None = None,
|
|
177
|
+
package: str | None = None,
|
|
178
|
+
*,
|
|
179
|
+
ttl: int,
|
|
180
|
+
) -> Backend:
|
|
181
|
+
"""Get a backend instance (async).
|
|
182
|
+
|
|
183
|
+
Args:
|
|
184
|
+
backend_type: 'memory', 'file', or 'redis'. Uses config default if None.
|
|
185
|
+
package: Package name. Auto-detected if None.
|
|
186
|
+
ttl: TTL in seconds (used for backend separation).
|
|
187
|
+
"""
|
|
188
|
+
if package is None:
|
|
189
|
+
package = _get_caller_package()
|
|
190
|
+
|
|
191
|
+
if backend_type is None:
|
|
192
|
+
cfg = get_config(package)
|
|
193
|
+
backend_type = cfg.backend
|
|
194
|
+
|
|
195
|
+
return await manager.aget_backend(package, backend_type, ttl)
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def _validate_entry(
|
|
199
|
+
value: Any,
|
|
200
|
+
created_at: float | None,
|
|
201
|
+
validate: Callable[[CacheEntry], bool] | None,
|
|
202
|
+
) -> bool:
|
|
203
|
+
"""Validate a cached entry using the validate callback.
|
|
204
|
+
"""
|
|
205
|
+
if validate is None or created_at is None:
|
|
206
|
+
return True
|
|
207
|
+
|
|
208
|
+
entry = CacheEntry(
|
|
209
|
+
value=value,
|
|
210
|
+
created_at=created_at,
|
|
211
|
+
age=time.time() - created_at,
|
|
212
|
+
)
|
|
213
|
+
return validate(entry)
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def _attach_helpers(
|
|
217
|
+
wrapper: Callable[..., Any],
|
|
218
|
+
key_generator: Callable[..., str],
|
|
219
|
+
resolved_package: str | None,
|
|
220
|
+
resolved_backend: str,
|
|
221
|
+
ttl: int,
|
|
222
|
+
is_async: bool,
|
|
223
|
+
) -> None:
|
|
224
|
+
"""Attach .invalidate() and .refresh() methods to wrapper.
|
|
225
|
+
"""
|
|
226
|
+
if is_async:
|
|
227
|
+
async def invalidate(**kwargs: Any) -> None:
|
|
228
|
+
backend = await manager.aget_backend(resolved_package, resolved_backend, ttl)
|
|
229
|
+
cfg = get_config(resolved_package)
|
|
230
|
+
cache_key = mangle_key(key_generator(**kwargs), cfg.key_prefix, ttl)
|
|
231
|
+
await backend.adelete(cache_key)
|
|
232
|
+
|
|
233
|
+
async def refresh(**kwargs: Any) -> Any:
|
|
234
|
+
await invalidate(**kwargs)
|
|
235
|
+
return await wrapper(**kwargs)
|
|
236
|
+
|
|
237
|
+
wrapper.invalidate = invalidate # type: ignore
|
|
238
|
+
wrapper.refresh = refresh # type: ignore
|
|
239
|
+
else:
|
|
240
|
+
def invalidate(**kwargs: Any) -> None:
|
|
241
|
+
backend = manager.get_backend(resolved_package, resolved_backend, ttl)
|
|
242
|
+
cfg = get_config(resolved_package)
|
|
243
|
+
cache_key = mangle_key(key_generator(**kwargs), cfg.key_prefix, ttl)
|
|
244
|
+
backend.delete(cache_key)
|
|
245
|
+
|
|
246
|
+
def refresh(**kwargs: Any) -> Any:
|
|
247
|
+
invalidate(**kwargs)
|
|
248
|
+
return wrapper(**kwargs)
|
|
249
|
+
|
|
250
|
+
wrapper.invalidate = invalidate # type: ignore
|
|
251
|
+
wrapper.refresh = refresh # type: ignore
|
|
80
252
|
|
|
81
253
|
|
|
82
254
|
def cache(
|
|
@@ -88,7 +260,10 @@ def cache(
|
|
|
88
260
|
validate: Callable[[CacheEntry], bool] | None = None,
|
|
89
261
|
package: str | None = None,
|
|
90
262
|
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
|
91
|
-
"""
|
|
263
|
+
"""Universal cache decorator for sync and async functions.
|
|
264
|
+
|
|
265
|
+
Automatically detects async functions and uses appropriate code path.
|
|
266
|
+
Includes dogpile prevention using per-key mutexes.
|
|
92
267
|
|
|
93
268
|
Args:
|
|
94
269
|
ttl: Time-to-live in seconds (default: 300)
|
|
@@ -110,6 +285,10 @@ def cache(
|
|
|
110
285
|
def get_user(user_id: int) -> dict:
|
|
111
286
|
return fetch_user(user_id)
|
|
112
287
|
|
|
288
|
+
@cache(ttl=300, tag='users')
|
|
289
|
+
async def get_user_async(user_id: int) -> dict:
|
|
290
|
+
return await fetch_user(user_id)
|
|
291
|
+
|
|
113
292
|
# Normal call
|
|
114
293
|
user = get_user(123)
|
|
115
294
|
|
|
@@ -118,6 +297,12 @@ def cache(
|
|
|
118
297
|
|
|
119
298
|
# Force refresh
|
|
120
299
|
user = get_user(123, _overwrite_cache=True)
|
|
300
|
+
|
|
301
|
+
# Invalidate specific entry
|
|
302
|
+
get_user.invalidate(user_id=123)
|
|
303
|
+
|
|
304
|
+
# Refresh specific entry
|
|
305
|
+
user = get_user.refresh(user_id=123)
|
|
121
306
|
"""
|
|
122
307
|
resolved_package = package if package is not None else _get_caller_package()
|
|
123
308
|
|
|
@@ -129,6 +314,7 @@ def cache(
|
|
|
129
314
|
|
|
130
315
|
def decorator(fn: Callable[..., Any]) -> Callable[..., Any]:
|
|
131
316
|
key_generator = make_key_generator(fn, tag, exclude)
|
|
317
|
+
is_async = asyncio.iscoroutinefunction(fn)
|
|
132
318
|
|
|
133
319
|
meta = CacheMeta(
|
|
134
320
|
ttl=ttl,
|
|
@@ -141,74 +327,129 @@ def cache(
|
|
|
141
327
|
key_generator=key_generator,
|
|
142
328
|
)
|
|
143
329
|
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
330
|
+
if is_async:
|
|
331
|
+
@wraps(fn)
|
|
332
|
+
async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
333
|
+
skip_cache = kwargs.pop('_skip_cache', False)
|
|
334
|
+
overwrite_cache = kwargs.pop('_overwrite_cache', False)
|
|
148
335
|
|
|
149
|
-
|
|
150
|
-
|
|
336
|
+
if is_disabled() or skip_cache:
|
|
337
|
+
return await fn(*args, **kwargs)
|
|
151
338
|
|
|
152
|
-
|
|
153
|
-
|
|
339
|
+
backend_inst = await manager.aget_backend(
|
|
340
|
+
resolved_package,
|
|
341
|
+
resolved_backend,
|
|
342
|
+
ttl,
|
|
343
|
+
)
|
|
344
|
+
cfg = get_config(resolved_package)
|
|
154
345
|
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
entry = CacheEntry(
|
|
164
|
-
value=value,
|
|
165
|
-
created_at=created_at,
|
|
166
|
-
age=time.time() - created_at,
|
|
167
|
-
)
|
|
168
|
-
if not validate(entry):
|
|
169
|
-
logger.debug(f'Cache validation failed for {fn.__name__}')
|
|
170
|
-
else:
|
|
171
|
-
_record_hit(wrapper)
|
|
172
|
-
return value
|
|
173
|
-
else:
|
|
174
|
-
_record_hit(wrapper)
|
|
346
|
+
base_key = key_generator(*args, **kwargs)
|
|
347
|
+
cache_key = mangle_key(base_key, cfg.key_prefix, ttl)
|
|
348
|
+
|
|
349
|
+
if not overwrite_cache:
|
|
350
|
+
value, created_at = await backend_inst.aget_with_metadata(cache_key)
|
|
351
|
+
|
|
352
|
+
if value is not NO_VALUE and _validate_entry(value, created_at, validate):
|
|
353
|
+
manager.record_hit(async_wrapper)
|
|
175
354
|
return value
|
|
176
355
|
|
|
177
|
-
|
|
178
|
-
|
|
356
|
+
mutex = backend_inst.get_async_mutex(cache_key)
|
|
357
|
+
acquired = await mutex.acquire(timeout=cfg.lock_timeout)
|
|
358
|
+
try:
|
|
359
|
+
if not overwrite_cache:
|
|
360
|
+
value, created_at = await backend_inst.aget_with_metadata(cache_key)
|
|
361
|
+
if value is not NO_VALUE and _validate_entry(value, created_at, validate):
|
|
362
|
+
manager.record_hit(async_wrapper)
|
|
363
|
+
return value
|
|
179
364
|
|
|
180
|
-
|
|
365
|
+
manager.record_miss(async_wrapper)
|
|
366
|
+
result = await fn(*args, **kwargs)
|
|
181
367
|
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
368
|
+
if cache_if is None or cache_if(result):
|
|
369
|
+
await backend_inst.aset(cache_key, result, ttl)
|
|
370
|
+
logger.debug(f'Cached {fn.__name__} with key {cache_key}')
|
|
185
371
|
|
|
186
|
-
|
|
372
|
+
return result
|
|
373
|
+
finally:
|
|
374
|
+
if acquired:
|
|
375
|
+
await mutex.release()
|
|
187
376
|
|
|
188
|
-
|
|
189
|
-
|
|
377
|
+
async_wrapper._cache_meta = meta # type: ignore
|
|
378
|
+
async_wrapper._cache_key_generator = key_generator # type: ignore
|
|
379
|
+
_attach_helpers(async_wrapper, key_generator, resolved_package, resolved_backend, ttl, is_async=True)
|
|
380
|
+
return async_wrapper
|
|
190
381
|
|
|
191
|
-
|
|
382
|
+
else:
|
|
383
|
+
@wraps(fn)
|
|
384
|
+
def sync_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
385
|
+
skip_cache = kwargs.pop('_skip_cache', False)
|
|
386
|
+
overwrite_cache = kwargs.pop('_overwrite_cache', False)
|
|
192
387
|
|
|
193
|
-
|
|
388
|
+
if is_disabled() or skip_cache:
|
|
389
|
+
return fn(*args, **kwargs)
|
|
194
390
|
|
|
391
|
+
backend_inst = manager.get_backend(resolved_package, resolved_backend, ttl)
|
|
392
|
+
cfg = get_config(resolved_package)
|
|
195
393
|
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
394
|
+
base_key = key_generator(*args, **kwargs)
|
|
395
|
+
cache_key = mangle_key(base_key, cfg.key_prefix, ttl)
|
|
396
|
+
|
|
397
|
+
if not overwrite_cache:
|
|
398
|
+
value, created_at = backend_inst.get_with_metadata(cache_key)
|
|
399
|
+
|
|
400
|
+
if value is not NO_VALUE and _validate_entry(value, created_at, validate):
|
|
401
|
+
manager.record_hit(sync_wrapper)
|
|
402
|
+
return value
|
|
403
|
+
|
|
404
|
+
mutex = backend_inst.get_mutex(cache_key)
|
|
405
|
+
acquired = mutex.acquire(timeout=cfg.lock_timeout)
|
|
406
|
+
try:
|
|
407
|
+
if not overwrite_cache:
|
|
408
|
+
value, created_at = backend_inst.get_with_metadata(cache_key)
|
|
409
|
+
if value is not NO_VALUE and _validate_entry(value, created_at, validate):
|
|
410
|
+
manager.record_hit(sync_wrapper)
|
|
411
|
+
return value
|
|
412
|
+
|
|
413
|
+
manager.record_miss(sync_wrapper)
|
|
414
|
+
result = fn(*args, **kwargs)
|
|
415
|
+
|
|
416
|
+
if cache_if is None or cache_if(result):
|
|
417
|
+
backend_inst.set(cache_key, result, ttl)
|
|
418
|
+
logger.debug(f'Cached {fn.__name__} with key {cache_key}')
|
|
419
|
+
|
|
420
|
+
return result
|
|
421
|
+
finally:
|
|
422
|
+
if acquired:
|
|
423
|
+
mutex.release()
|
|
424
|
+
|
|
425
|
+
sync_wrapper._cache_meta = meta # type: ignore
|
|
426
|
+
sync_wrapper._cache_key_generator = key_generator # type: ignore
|
|
427
|
+
_attach_helpers(sync_wrapper, key_generator, resolved_package, resolved_backend, ttl, is_async=False)
|
|
428
|
+
return sync_wrapper
|
|
429
|
+
|
|
430
|
+
return decorator
|
|
203
431
|
|
|
204
432
|
|
|
205
|
-
def
|
|
206
|
-
|
|
433
|
+
def async_cache(
|
|
434
|
+
ttl: int = 300,
|
|
435
|
+
backend: str | None = None,
|
|
436
|
+
tag: str = '',
|
|
437
|
+
exclude: set[str] | None = None,
|
|
438
|
+
cache_if: Callable[[Any], bool] | None = None,
|
|
439
|
+
validate: Callable[[CacheEntry], bool] | None = None,
|
|
440
|
+
package: str | None = None,
|
|
441
|
+
) -> Callable[[Callable[..., Awaitable[Any]]], Callable[..., Awaitable[Any]]]:
|
|
442
|
+
"""Deprecated: Use @cache instead (auto-detects async).
|
|
207
443
|
"""
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
444
|
+
return cache(
|
|
445
|
+
ttl=ttl,
|
|
446
|
+
backend=backend,
|
|
447
|
+
tag=tag,
|
|
448
|
+
exclude=exclude,
|
|
449
|
+
cache_if=cache_if,
|
|
450
|
+
validate=validate,
|
|
451
|
+
package=package,
|
|
452
|
+
)
|
|
212
453
|
|
|
213
454
|
|
|
214
455
|
def get_cache_info(fn: Callable[..., Any]) -> CacheInfo:
|
|
@@ -220,16 +461,13 @@ def get_cache_info(fn: Callable[..., Any]) -> CacheInfo:
|
|
|
220
461
|
Returns
|
|
221
462
|
CacheInfo with hits, misses, and currsize
|
|
222
463
|
"""
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
with _stats_lock:
|
|
226
|
-
hits, misses = _stats.get(fn_id, (0, 0))
|
|
464
|
+
hits, misses = manager.get_stats(fn)
|
|
227
465
|
|
|
228
466
|
meta = getattr(fn, '_cache_meta', None)
|
|
229
467
|
if meta is None:
|
|
230
468
|
return CacheInfo(hits=hits, misses=misses, currsize=0)
|
|
231
469
|
|
|
232
|
-
backend_instance =
|
|
470
|
+
backend_instance = manager.get_backend(meta.package, meta.backend, meta.ttl)
|
|
233
471
|
cfg = get_config(meta.package)
|
|
234
472
|
|
|
235
473
|
fn_name = getattr(fn, '__wrapped__', fn).__name__
|
|
@@ -240,13 +478,42 @@ def get_cache_info(fn: Callable[..., Any]) -> CacheInfo:
|
|
|
240
478
|
return CacheInfo(hits=hits, misses=misses, currsize=currsize)
|
|
241
479
|
|
|
242
480
|
|
|
481
|
+
async def get_async_cache_info(fn: Callable[..., Any]) -> CacheInfo:
|
|
482
|
+
"""Get cache statistics for an async decorated function.
|
|
483
|
+
|
|
484
|
+
Args:
|
|
485
|
+
fn: A function decorated with @cache
|
|
486
|
+
|
|
487
|
+
Returns
|
|
488
|
+
CacheInfo with hits, misses, and currsize
|
|
489
|
+
"""
|
|
490
|
+
hits, misses = manager.get_stats(fn)
|
|
491
|
+
|
|
492
|
+
meta = getattr(fn, '_cache_meta', None)
|
|
493
|
+
if meta is None:
|
|
494
|
+
return CacheInfo(hits=hits, misses=misses, currsize=0)
|
|
495
|
+
|
|
496
|
+
backend_instance = await manager.aget_backend(meta.package, meta.backend, meta.ttl)
|
|
497
|
+
cfg = get_config(meta.package)
|
|
498
|
+
|
|
499
|
+
fn_name = getattr(fn, '__wrapped__', fn).__name__
|
|
500
|
+
pattern = f'*:{cfg.key_prefix}{fn_name}|*'
|
|
501
|
+
|
|
502
|
+
currsize = await backend_instance.acount(pattern)
|
|
503
|
+
|
|
504
|
+
return CacheInfo(hits=hits, misses=misses, currsize=currsize)
|
|
505
|
+
|
|
506
|
+
|
|
243
507
|
def clear_backends(package: str | None = None) -> None:
|
|
244
508
|
"""Clear all backend instances for a package. Primarily for testing.
|
|
245
509
|
"""
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
510
|
+
manager.clear(package)
|
|
511
|
+
|
|
512
|
+
|
|
513
|
+
async def clear_async_backends(package: str | None = None) -> None:
|
|
514
|
+
"""Clear all async backend instances for a package. Primarily for testing.
|
|
515
|
+
"""
|
|
516
|
+
await manager.aclear(package)
|
|
517
|
+
|
|
518
|
+
|
|
519
|
+
get_async_backend = aget_backend
|
cachu/keys.py
CHANGED
|
@@ -35,6 +35,14 @@ def _normalize_tag(tag: str) -> str:
|
|
|
35
35
|
return f'|{tag}|'
|
|
36
36
|
|
|
37
37
|
|
|
38
|
+
def _tag_to_pattern(tag: str | None) -> str | None:
|
|
39
|
+
"""Convert tag to cache key pattern for clearing.
|
|
40
|
+
"""
|
|
41
|
+
if not tag:
|
|
42
|
+
return None
|
|
43
|
+
return f'*{_normalize_tag(tag)}*'
|
|
44
|
+
|
|
45
|
+
|
|
38
46
|
def make_key_generator(
|
|
39
47
|
fn: Callable[..., Any],
|
|
40
48
|
tag: str = '',
|