cachu 0.2.4__py3-none-any.whl → 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cachu/__init__.py +4 -5
- cachu/backends/__init__.py +38 -21
- cachu/backends/memory.py +137 -135
- cachu/backends/redis.py +86 -65
- cachu/backends/sqlite.py +163 -123
- cachu/config.py +6 -0
- cachu/decorator.py +257 -275
- cachu/mutex.py +247 -0
- cachu/operations.py +27 -28
- {cachu-0.2.4.dist-info → cachu-0.2.5.dist-info}/METADATA +1 -2
- cachu-0.2.5.dist-info/RECORD +15 -0
- cachu/async_decorator.py +0 -262
- cachu/async_operations.py +0 -178
- cachu/backends/async_base.py +0 -50
- cachu/backends/async_memory.py +0 -111
- cachu/backends/async_redis.py +0 -141
- cachu/backends/async_sqlite.py +0 -256
- cachu/backends/file.py +0 -10
- cachu-0.2.4.dist-info/RECORD +0 -21
- {cachu-0.2.4.dist-info → cachu-0.2.5.dist-info}/WHEEL +0 -0
- {cachu-0.2.4.dist-info → cachu-0.2.5.dist-info}/top_level.txt +0 -0
cachu/decorator.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"""Cache decorator implementation with sync and async support.
|
|
1
|
+
"""Cache decorator implementation with unified sync and async support.
|
|
2
2
|
"""
|
|
3
3
|
import asyncio
|
|
4
4
|
import logging
|
|
@@ -9,9 +9,9 @@ from collections.abc import Awaitable, Callable
|
|
|
9
9
|
from functools import wraps
|
|
10
10
|
from typing import Any
|
|
11
11
|
|
|
12
|
-
from .backends import NO_VALUE,
|
|
13
|
-
from .backends.
|
|
14
|
-
from .backends.
|
|
12
|
+
from .backends import NO_VALUE, Backend
|
|
13
|
+
from .backends.memory import MemoryBackend
|
|
14
|
+
from .backends.sqlite import SqliteBackend
|
|
15
15
|
from .config import _get_caller_package, get_config, is_disabled
|
|
16
16
|
from .keys import make_key_generator, mangle_key
|
|
17
17
|
from .types import CacheEntry, CacheInfo, CacheMeta
|
|
@@ -20,50 +20,79 @@ logger = logging.getLogger(__name__)
|
|
|
20
20
|
|
|
21
21
|
|
|
22
22
|
class CacheManager:
|
|
23
|
-
"""
|
|
23
|
+
"""Unified manager for cache backends and statistics.
|
|
24
24
|
"""
|
|
25
25
|
|
|
26
26
|
def __init__(self) -> None:
|
|
27
27
|
self.backends: dict[tuple[str | None, str, int], Backend] = {}
|
|
28
28
|
self.stats: dict[int, tuple[int, int]] = {}
|
|
29
|
-
self.
|
|
29
|
+
self._sync_lock = threading.Lock()
|
|
30
|
+
self._async_lock: asyncio.Lock | None = None
|
|
30
31
|
self._stats_lock = threading.Lock()
|
|
31
32
|
|
|
32
|
-
def
|
|
33
|
-
"""
|
|
33
|
+
def _get_async_lock(self) -> asyncio.Lock:
|
|
34
|
+
"""Lazy-create async lock (must be called from async context).
|
|
34
35
|
"""
|
|
35
|
-
|
|
36
|
+
if self._async_lock is None:
|
|
37
|
+
self._async_lock = asyncio.Lock()
|
|
38
|
+
return self._async_lock
|
|
39
|
+
|
|
40
|
+
def _create_backend(
|
|
41
|
+
self,
|
|
42
|
+
package: str | None,
|
|
43
|
+
backend_type: str,
|
|
44
|
+
ttl: int,
|
|
45
|
+
) -> Backend:
|
|
46
|
+
"""Create a backend instance (called with lock held).
|
|
47
|
+
"""
|
|
48
|
+
cfg = get_config(package)
|
|
36
49
|
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
backend = MemoryBackend()
|
|
45
|
-
elif backend_type == 'file':
|
|
46
|
-
if ttl < 60:
|
|
47
|
-
filename = f'cache{ttl}sec.db'
|
|
48
|
-
elif ttl < 3600:
|
|
49
|
-
filename = f'cache{ttl // 60}min.db'
|
|
50
|
-
else:
|
|
51
|
-
filename = f'cache{ttl // 3600}hour.db'
|
|
52
|
-
|
|
53
|
-
if package:
|
|
54
|
-
filename = f'{package}_{filename}'
|
|
55
|
-
|
|
56
|
-
filepath = os.path.join(cfg.file_dir, filename)
|
|
57
|
-
backend = FileBackend(filepath)
|
|
58
|
-
elif backend_type == 'redis':
|
|
59
|
-
from .backends.redis import RedisBackend
|
|
60
|
-
backend = RedisBackend(cfg.redis_url)
|
|
50
|
+
if backend_type == 'memory':
|
|
51
|
+
backend: Backend = MemoryBackend()
|
|
52
|
+
elif backend_type == 'file':
|
|
53
|
+
if ttl < 60:
|
|
54
|
+
filename = f'cache{ttl}sec.db'
|
|
55
|
+
elif ttl < 3600:
|
|
56
|
+
filename = f'cache{ttl // 60}min.db'
|
|
61
57
|
else:
|
|
62
|
-
|
|
58
|
+
filename = f'cache{ttl // 3600}hour.db'
|
|
59
|
+
|
|
60
|
+
if package:
|
|
61
|
+
filename = f'{package}_{filename}'
|
|
63
62
|
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
63
|
+
filepath = os.path.join(cfg.file_dir, filename)
|
|
64
|
+
backend = SqliteBackend(filepath)
|
|
65
|
+
elif backend_type == 'redis':
|
|
66
|
+
from .backends.redis import RedisBackend
|
|
67
|
+
backend = RedisBackend(cfg.redis_url, cfg.lock_timeout)
|
|
68
|
+
else:
|
|
69
|
+
raise ValueError(f'Unknown backend type: {backend_type}')
|
|
70
|
+
|
|
71
|
+
logger.debug(f"Created {backend_type} backend for package '{package}', {ttl}s TTL")
|
|
72
|
+
return backend
|
|
73
|
+
|
|
74
|
+
def get_backend(self, package: str | None, backend_type: str, ttl: int) -> Backend:
|
|
75
|
+
"""Get or create a backend instance (sync).
|
|
76
|
+
"""
|
|
77
|
+
key = (package, backend_type, ttl)
|
|
78
|
+
with self._sync_lock:
|
|
79
|
+
if key not in self.backends:
|
|
80
|
+
self.backends[key] = self._create_backend(package, backend_type, ttl)
|
|
81
|
+
return self.backends[key]
|
|
82
|
+
|
|
83
|
+
async def aget_backend(
|
|
84
|
+
self,
|
|
85
|
+
package: str | None,
|
|
86
|
+
backend_type: str,
|
|
87
|
+
ttl: int,
|
|
88
|
+
) -> Backend:
|
|
89
|
+
"""Get or create a backend instance (async).
|
|
90
|
+
"""
|
|
91
|
+
key = (package, backend_type, ttl)
|
|
92
|
+
async with self._get_async_lock():
|
|
93
|
+
if key not in self.backends:
|
|
94
|
+
self.backends[key] = self._create_backend(package, backend_type, ttl)
|
|
95
|
+
return self.backends[key]
|
|
67
96
|
|
|
68
97
|
def record_hit(self, fn: Callable[..., Any]) -> None:
|
|
69
98
|
"""Record a cache hit for the function.
|
|
@@ -89,110 +118,35 @@ class CacheManager:
|
|
|
89
118
|
return self.stats.get(fn_id, (0, 0))
|
|
90
119
|
|
|
91
120
|
def clear(self, package: str | None = None) -> None:
|
|
92
|
-
"""Clear backend instances
|
|
121
|
+
"""Clear backend instances (sync).
|
|
93
122
|
"""
|
|
94
|
-
with self.
|
|
123
|
+
with self._sync_lock:
|
|
95
124
|
if package is None:
|
|
125
|
+
for backend in self.backends.values():
|
|
126
|
+
backend.close()
|
|
96
127
|
self.backends.clear()
|
|
97
128
|
else:
|
|
98
129
|
keys_to_delete = [k for k in self.backends if k[0] == package]
|
|
99
130
|
for key in keys_to_delete:
|
|
131
|
+
self.backends[key].close()
|
|
100
132
|
del self.backends[key]
|
|
101
133
|
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
"""Manages async cache backends and statistics.
|
|
105
|
-
"""
|
|
106
|
-
|
|
107
|
-
def __init__(self) -> None:
|
|
108
|
-
self.backends: dict[tuple[str | None, str, int], AsyncBackend] = {}
|
|
109
|
-
self.stats: dict[int, tuple[int, int]] = {}
|
|
110
|
-
self._backends_lock = asyncio.Lock()
|
|
111
|
-
self._stats_lock = asyncio.Lock()
|
|
112
|
-
|
|
113
|
-
async def get_backend(
|
|
114
|
-
self,
|
|
115
|
-
package: str | None,
|
|
116
|
-
backend_type: str,
|
|
117
|
-
ttl: int,
|
|
118
|
-
) -> AsyncBackend:
|
|
119
|
-
"""Get or create an async backend instance.
|
|
120
|
-
"""
|
|
121
|
-
key = (package, backend_type, ttl)
|
|
122
|
-
|
|
123
|
-
async with self._backends_lock:
|
|
124
|
-
if key in self.backends:
|
|
125
|
-
return self.backends[key]
|
|
126
|
-
|
|
127
|
-
cfg = get_config(package)
|
|
128
|
-
|
|
129
|
-
if backend_type == 'memory':
|
|
130
|
-
backend: AsyncBackend = AsyncMemoryBackend()
|
|
131
|
-
elif backend_type == 'file':
|
|
132
|
-
from .backends.sqlite import AsyncSqliteBackend
|
|
133
|
-
|
|
134
|
-
if ttl < 60:
|
|
135
|
-
filename = f'cache{ttl}sec.db'
|
|
136
|
-
elif ttl < 3600:
|
|
137
|
-
filename = f'cache{ttl // 60}min.db'
|
|
138
|
-
else:
|
|
139
|
-
filename = f'cache{ttl // 3600}hour.db'
|
|
140
|
-
|
|
141
|
-
if package:
|
|
142
|
-
filename = f'{package}_{filename}'
|
|
143
|
-
|
|
144
|
-
filepath = os.path.join(cfg.file_dir, filename)
|
|
145
|
-
backend = AsyncSqliteBackend(filepath)
|
|
146
|
-
elif backend_type == 'redis':
|
|
147
|
-
from .backends.redis import AsyncRedisBackend
|
|
148
|
-
backend = AsyncRedisBackend(cfg.redis_url)
|
|
149
|
-
else:
|
|
150
|
-
raise ValueError(f'Unknown backend type: {backend_type}')
|
|
151
|
-
|
|
152
|
-
self.backends[key] = backend
|
|
153
|
-
logger.debug(f"Created async {backend_type} backend for package '{package}', {ttl}s TTL")
|
|
154
|
-
return backend
|
|
155
|
-
|
|
156
|
-
async def record_hit(self, fn: Callable[..., Any]) -> None:
|
|
157
|
-
"""Record a cache hit for the async function.
|
|
158
|
-
"""
|
|
159
|
-
fn_id = id(fn)
|
|
160
|
-
async with self._stats_lock:
|
|
161
|
-
hits, misses = self.stats.get(fn_id, (0, 0))
|
|
162
|
-
self.stats[fn_id] = (hits + 1, misses)
|
|
163
|
-
|
|
164
|
-
async def record_miss(self, fn: Callable[..., Any]) -> None:
|
|
165
|
-
"""Record a cache miss for the async function.
|
|
166
|
-
"""
|
|
167
|
-
fn_id = id(fn)
|
|
168
|
-
async with self._stats_lock:
|
|
169
|
-
hits, misses = self.stats.get(fn_id, (0, 0))
|
|
170
|
-
self.stats[fn_id] = (hits, misses + 1)
|
|
171
|
-
|
|
172
|
-
async def get_stats(self, fn: Callable[..., Any]) -> tuple[int, int]:
|
|
173
|
-
"""Get (hits, misses) for a function.
|
|
174
|
-
"""
|
|
175
|
-
fn_id = id(fn)
|
|
176
|
-
async with self._stats_lock:
|
|
177
|
-
return self.stats.get(fn_id, (0, 0))
|
|
178
|
-
|
|
179
|
-
async def clear(self, package: str | None = None) -> None:
|
|
180
|
-
"""Clear backend instances, calling close() on each.
|
|
134
|
+
async def aclear(self, package: str | None = None) -> None:
|
|
135
|
+
"""Clear backend instances (async).
|
|
181
136
|
"""
|
|
182
|
-
async with self.
|
|
137
|
+
async with self._get_async_lock():
|
|
183
138
|
if package is None:
|
|
184
139
|
for backend in self.backends.values():
|
|
185
|
-
await backend.
|
|
140
|
+
await backend.aclose()
|
|
186
141
|
self.backends.clear()
|
|
187
142
|
else:
|
|
188
143
|
keys_to_delete = [k for k in self.backends if k[0] == package]
|
|
189
144
|
for key in keys_to_delete:
|
|
190
|
-
await self.backends[key].
|
|
145
|
+
await self.backends[key].aclose()
|
|
191
146
|
del self.backends[key]
|
|
192
147
|
|
|
193
148
|
|
|
194
149
|
manager = CacheManager()
|
|
195
|
-
async_manager = AsyncCacheManager()
|
|
196
150
|
|
|
197
151
|
|
|
198
152
|
def get_backend(
|
|
@@ -218,13 +172,13 @@ def get_backend(
|
|
|
218
172
|
return manager.get_backend(package, backend_type, ttl)
|
|
219
173
|
|
|
220
174
|
|
|
221
|
-
async def
|
|
175
|
+
async def aget_backend(
|
|
222
176
|
backend_type: str | None = None,
|
|
223
177
|
package: str | None = None,
|
|
224
178
|
*,
|
|
225
179
|
ttl: int,
|
|
226
|
-
) ->
|
|
227
|
-
"""Get
|
|
180
|
+
) -> Backend:
|
|
181
|
+
"""Get a backend instance (async).
|
|
228
182
|
|
|
229
183
|
Args:
|
|
230
184
|
backend_type: 'memory', 'file', or 'redis'. Uses config default if None.
|
|
@@ -238,7 +192,63 @@ async def get_async_backend(
|
|
|
238
192
|
cfg = get_config(package)
|
|
239
193
|
backend_type = cfg.backend
|
|
240
194
|
|
|
241
|
-
return await
|
|
195
|
+
return await manager.aget_backend(package, backend_type, ttl)
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def _validate_entry(
|
|
199
|
+
value: Any,
|
|
200
|
+
created_at: float | None,
|
|
201
|
+
validate: Callable[[CacheEntry], bool] | None,
|
|
202
|
+
) -> bool:
|
|
203
|
+
"""Validate a cached entry using the validate callback.
|
|
204
|
+
"""
|
|
205
|
+
if validate is None or created_at is None:
|
|
206
|
+
return True
|
|
207
|
+
|
|
208
|
+
entry = CacheEntry(
|
|
209
|
+
value=value,
|
|
210
|
+
created_at=created_at,
|
|
211
|
+
age=time.time() - created_at,
|
|
212
|
+
)
|
|
213
|
+
return validate(entry)
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def _attach_helpers(
|
|
217
|
+
wrapper: Callable[..., Any],
|
|
218
|
+
key_generator: Callable[..., str],
|
|
219
|
+
resolved_package: str | None,
|
|
220
|
+
resolved_backend: str,
|
|
221
|
+
ttl: int,
|
|
222
|
+
is_async: bool,
|
|
223
|
+
) -> None:
|
|
224
|
+
"""Attach .invalidate() and .refresh() methods to wrapper.
|
|
225
|
+
"""
|
|
226
|
+
if is_async:
|
|
227
|
+
async def invalidate(**kwargs: Any) -> None:
|
|
228
|
+
backend = await manager.aget_backend(resolved_package, resolved_backend, ttl)
|
|
229
|
+
cfg = get_config(resolved_package)
|
|
230
|
+
cache_key = mangle_key(key_generator(**kwargs), cfg.key_prefix, ttl)
|
|
231
|
+
await backend.adelete(cache_key)
|
|
232
|
+
|
|
233
|
+
async def refresh(**kwargs: Any) -> Any:
|
|
234
|
+
await invalidate(**kwargs)
|
|
235
|
+
return await wrapper(**kwargs)
|
|
236
|
+
|
|
237
|
+
wrapper.invalidate = invalidate # type: ignore
|
|
238
|
+
wrapper.refresh = refresh # type: ignore
|
|
239
|
+
else:
|
|
240
|
+
def invalidate(**kwargs: Any) -> None:
|
|
241
|
+
backend = manager.get_backend(resolved_package, resolved_backend, ttl)
|
|
242
|
+
cfg = get_config(resolved_package)
|
|
243
|
+
cache_key = mangle_key(key_generator(**kwargs), cfg.key_prefix, ttl)
|
|
244
|
+
backend.delete(cache_key)
|
|
245
|
+
|
|
246
|
+
def refresh(**kwargs: Any) -> Any:
|
|
247
|
+
invalidate(**kwargs)
|
|
248
|
+
return wrapper(**kwargs)
|
|
249
|
+
|
|
250
|
+
wrapper.invalidate = invalidate # type: ignore
|
|
251
|
+
wrapper.refresh = refresh # type: ignore
|
|
242
252
|
|
|
243
253
|
|
|
244
254
|
def cache(
|
|
@@ -250,7 +260,10 @@ def cache(
|
|
|
250
260
|
validate: Callable[[CacheEntry], bool] | None = None,
|
|
251
261
|
package: str | None = None,
|
|
252
262
|
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
|
253
|
-
"""
|
|
263
|
+
"""Universal cache decorator for sync and async functions.
|
|
264
|
+
|
|
265
|
+
Automatically detects async functions and uses appropriate code path.
|
|
266
|
+
Includes dogpile prevention using per-key mutexes.
|
|
254
267
|
|
|
255
268
|
Args:
|
|
256
269
|
ttl: Time-to-live in seconds (default: 300)
|
|
@@ -272,6 +285,10 @@ def cache(
|
|
|
272
285
|
def get_user(user_id: int) -> dict:
|
|
273
286
|
return fetch_user(user_id)
|
|
274
287
|
|
|
288
|
+
@cache(ttl=300, tag='users')
|
|
289
|
+
async def get_user_async(user_id: int) -> dict:
|
|
290
|
+
return await fetch_user(user_id)
|
|
291
|
+
|
|
275
292
|
# Normal call
|
|
276
293
|
user = get_user(123)
|
|
277
294
|
|
|
@@ -280,6 +297,12 @@ def cache(
|
|
|
280
297
|
|
|
281
298
|
# Force refresh
|
|
282
299
|
user = get_user(123, _overwrite_cache=True)
|
|
300
|
+
|
|
301
|
+
# Invalidate specific entry
|
|
302
|
+
get_user.invalidate(user_id=123)
|
|
303
|
+
|
|
304
|
+
# Refresh specific entry
|
|
305
|
+
user = get_user.refresh(user_id=123)
|
|
283
306
|
"""
|
|
284
307
|
resolved_package = package if package is not None else _get_caller_package()
|
|
285
308
|
|
|
@@ -291,6 +314,7 @@ def cache(
|
|
|
291
314
|
|
|
292
315
|
def decorator(fn: Callable[..., Any]) -> Callable[..., Any]:
|
|
293
316
|
key_generator = make_key_generator(fn, tag, exclude)
|
|
317
|
+
is_async = asyncio.iscoroutinefunction(fn)
|
|
294
318
|
|
|
295
319
|
meta = CacheMeta(
|
|
296
320
|
ttl=ttl,
|
|
@@ -303,174 +327,129 @@ def cache(
|
|
|
303
327
|
key_generator=key_generator,
|
|
304
328
|
)
|
|
305
329
|
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
330
|
+
if is_async:
|
|
331
|
+
@wraps(fn)
|
|
332
|
+
async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
333
|
+
skip_cache = kwargs.pop('_skip_cache', False)
|
|
334
|
+
overwrite_cache = kwargs.pop('_overwrite_cache', False)
|
|
310
335
|
|
|
311
|
-
|
|
312
|
-
|
|
336
|
+
if is_disabled() or skip_cache:
|
|
337
|
+
return await fn(*args, **kwargs)
|
|
313
338
|
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
if not overwrite_cache:
|
|
321
|
-
value, created_at = backend_instance.get_with_metadata(cache_key)
|
|
322
|
-
|
|
323
|
-
if value is not NO_VALUE:
|
|
324
|
-
if validate is not None and created_at is not None:
|
|
325
|
-
entry = CacheEntry(
|
|
326
|
-
value=value,
|
|
327
|
-
created_at=created_at,
|
|
328
|
-
age=time.time() - created_at,
|
|
329
|
-
)
|
|
330
|
-
if not validate(entry):
|
|
331
|
-
logger.debug(f'Cache validation failed for {fn.__name__}')
|
|
332
|
-
else:
|
|
333
|
-
manager.record_hit(wrapper)
|
|
334
|
-
return value
|
|
335
|
-
else:
|
|
336
|
-
manager.record_hit(wrapper)
|
|
337
|
-
return value
|
|
338
|
-
|
|
339
|
-
manager.record_miss(wrapper)
|
|
340
|
-
result = fn(*args, **kwargs)
|
|
341
|
-
|
|
342
|
-
should_cache = cache_if is None or cache_if(result)
|
|
343
|
-
|
|
344
|
-
if should_cache:
|
|
345
|
-
backend_instance.set(cache_key, result, ttl)
|
|
346
|
-
logger.debug(f'Cached {fn.__name__} with key {cache_key}')
|
|
347
|
-
|
|
348
|
-
return result
|
|
349
|
-
|
|
350
|
-
wrapper._cache_meta = meta # type: ignore
|
|
351
|
-
wrapper._cache_key_generator = key_generator # type: ignore
|
|
352
|
-
|
|
353
|
-
return wrapper
|
|
339
|
+
backend_inst = await manager.aget_backend(
|
|
340
|
+
resolved_package,
|
|
341
|
+
resolved_backend,
|
|
342
|
+
ttl,
|
|
343
|
+
)
|
|
344
|
+
cfg = get_config(resolved_package)
|
|
354
345
|
|
|
355
|
-
|
|
346
|
+
base_key = key_generator(*args, **kwargs)
|
|
347
|
+
cache_key = mangle_key(base_key, cfg.key_prefix, ttl)
|
|
356
348
|
|
|
349
|
+
if not overwrite_cache:
|
|
350
|
+
value, created_at = await backend_inst.aget_with_metadata(cache_key)
|
|
357
351
|
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
tag: str = '',
|
|
362
|
-
exclude: set[str] | None = None,
|
|
363
|
-
cache_if: Callable[[Any], bool] | None = None,
|
|
364
|
-
validate: Callable[[CacheEntry], bool] | None = None,
|
|
365
|
-
package: str | None = None,
|
|
366
|
-
) -> Callable[[Callable[..., Awaitable[Any]]], Callable[..., Awaitable[Any]]]:
|
|
367
|
-
"""Async cache decorator with configurable backend and behavior.
|
|
368
|
-
|
|
369
|
-
Args:
|
|
370
|
-
ttl: Time-to-live in seconds (default: 300)
|
|
371
|
-
backend: Backend type ('memory', 'file', 'redis'). Uses config default if None.
|
|
372
|
-
tag: Tag for grouping related cache entries
|
|
373
|
-
exclude: Parameter names to exclude from cache key
|
|
374
|
-
cache_if: Function to determine if result should be cached.
|
|
375
|
-
Called with result value, caches if returns True.
|
|
376
|
-
validate: Function to validate cached entries before returning.
|
|
377
|
-
Called with CacheEntry, returns False to recompute.
|
|
378
|
-
package: Package name for config isolation. Auto-detected if None.
|
|
352
|
+
if value is not NO_VALUE and _validate_entry(value, created_at, validate):
|
|
353
|
+
manager.record_hit(async_wrapper)
|
|
354
|
+
return value
|
|
379
355
|
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
356
|
+
mutex = backend_inst.get_async_mutex(cache_key)
|
|
357
|
+
acquired = await mutex.acquire(timeout=cfg.lock_timeout)
|
|
358
|
+
try:
|
|
359
|
+
if not overwrite_cache:
|
|
360
|
+
value, created_at = await backend_inst.aget_with_metadata(cache_key)
|
|
361
|
+
if value is not NO_VALUE and _validate_entry(value, created_at, validate):
|
|
362
|
+
manager.record_hit(async_wrapper)
|
|
363
|
+
return value
|
|
383
364
|
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
async def get_user(user_id: int) -> dict:
|
|
387
|
-
return await fetch_user(user_id)
|
|
365
|
+
manager.record_miss(async_wrapper)
|
|
366
|
+
result = await fn(*args, **kwargs)
|
|
388
367
|
|
|
389
|
-
|
|
390
|
-
|
|
368
|
+
if cache_if is None or cache_if(result):
|
|
369
|
+
await backend_inst.aset(cache_key, result, ttl)
|
|
370
|
+
logger.debug(f'Cached {fn.__name__} with key {cache_key}')
|
|
391
371
|
|
|
392
|
-
|
|
393
|
-
|
|
372
|
+
return result
|
|
373
|
+
finally:
|
|
374
|
+
if acquired:
|
|
375
|
+
await mutex.release()
|
|
394
376
|
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
377
|
+
async_wrapper._cache_meta = meta # type: ignore
|
|
378
|
+
async_wrapper._cache_key_generator = key_generator # type: ignore
|
|
379
|
+
_attach_helpers(async_wrapper, key_generator, resolved_package, resolved_backend, ttl, is_async=True)
|
|
380
|
+
return async_wrapper
|
|
399
381
|
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
382
|
+
else:
|
|
383
|
+
@wraps(fn)
|
|
384
|
+
def sync_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
385
|
+
skip_cache = kwargs.pop('_skip_cache', False)
|
|
386
|
+
overwrite_cache = kwargs.pop('_overwrite_cache', False)
|
|
405
387
|
|
|
406
|
-
|
|
407
|
-
|
|
388
|
+
if is_disabled() or skip_cache:
|
|
389
|
+
return fn(*args, **kwargs)
|
|
408
390
|
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
backend=resolved_backend,
|
|
412
|
-
tag=tag,
|
|
413
|
-
exclude=exclude or set(),
|
|
414
|
-
cache_if=cache_if,
|
|
415
|
-
validate=validate,
|
|
416
|
-
package=resolved_package,
|
|
417
|
-
key_generator=key_generator,
|
|
418
|
-
)
|
|
391
|
+
backend_inst = manager.get_backend(resolved_package, resolved_backend, ttl)
|
|
392
|
+
cfg = get_config(resolved_package)
|
|
419
393
|
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
skip_cache = kwargs.pop('_skip_cache', False)
|
|
423
|
-
overwrite_cache = kwargs.pop('_overwrite_cache', False)
|
|
394
|
+
base_key = key_generator(*args, **kwargs)
|
|
395
|
+
cache_key = mangle_key(base_key, cfg.key_prefix, ttl)
|
|
424
396
|
|
|
425
|
-
|
|
426
|
-
|
|
397
|
+
if not overwrite_cache:
|
|
398
|
+
value, created_at = backend_inst.get_with_metadata(cache_key)
|
|
427
399
|
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
ttl,
|
|
432
|
-
)
|
|
433
|
-
cfg = get_config(resolved_package)
|
|
400
|
+
if value is not NO_VALUE and _validate_entry(value, created_at, validate):
|
|
401
|
+
manager.record_hit(sync_wrapper)
|
|
402
|
+
return value
|
|
434
403
|
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
if validate is not None and created_at is not None:
|
|
443
|
-
entry = CacheEntry(
|
|
444
|
-
value=value,
|
|
445
|
-
created_at=created_at,
|
|
446
|
-
age=time.time() - created_at,
|
|
447
|
-
)
|
|
448
|
-
if not validate(entry):
|
|
449
|
-
logger.debug(f'Cache validation failed for {fn.__name__}')
|
|
450
|
-
else:
|
|
451
|
-
await async_manager.record_hit(wrapper)
|
|
404
|
+
mutex = backend_inst.get_mutex(cache_key)
|
|
405
|
+
acquired = mutex.acquire(timeout=cfg.lock_timeout)
|
|
406
|
+
try:
|
|
407
|
+
if not overwrite_cache:
|
|
408
|
+
value, created_at = backend_inst.get_with_metadata(cache_key)
|
|
409
|
+
if value is not NO_VALUE and _validate_entry(value, created_at, validate):
|
|
410
|
+
manager.record_hit(sync_wrapper)
|
|
452
411
|
return value
|
|
453
|
-
else:
|
|
454
|
-
await async_manager.record_hit(wrapper)
|
|
455
|
-
return value
|
|
456
412
|
|
|
457
|
-
|
|
458
|
-
|
|
413
|
+
manager.record_miss(sync_wrapper)
|
|
414
|
+
result = fn(*args, **kwargs)
|
|
459
415
|
|
|
460
|
-
|
|
416
|
+
if cache_if is None or cache_if(result):
|
|
417
|
+
backend_inst.set(cache_key, result, ttl)
|
|
418
|
+
logger.debug(f'Cached {fn.__name__} with key {cache_key}')
|
|
461
419
|
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
420
|
+
return result
|
|
421
|
+
finally:
|
|
422
|
+
if acquired:
|
|
423
|
+
mutex.release()
|
|
465
424
|
|
|
466
|
-
|
|
425
|
+
sync_wrapper._cache_meta = meta # type: ignore
|
|
426
|
+
sync_wrapper._cache_key_generator = key_generator # type: ignore
|
|
427
|
+
_attach_helpers(sync_wrapper, key_generator, resolved_package, resolved_backend, ttl, is_async=False)
|
|
428
|
+
return sync_wrapper
|
|
467
429
|
|
|
468
|
-
|
|
469
|
-
wrapper._cache_key_generator = key_generator # type: ignore
|
|
430
|
+
return decorator
|
|
470
431
|
|
|
471
|
-
return wrapper
|
|
472
432
|
|
|
473
|
-
|
|
433
|
+
def async_cache(
|
|
434
|
+
ttl: int = 300,
|
|
435
|
+
backend: str | None = None,
|
|
436
|
+
tag: str = '',
|
|
437
|
+
exclude: set[str] | None = None,
|
|
438
|
+
cache_if: Callable[[Any], bool] | None = None,
|
|
439
|
+
validate: Callable[[CacheEntry], bool] | None = None,
|
|
440
|
+
package: str | None = None,
|
|
441
|
+
) -> Callable[[Callable[..., Awaitable[Any]]], Callable[..., Awaitable[Any]]]:
|
|
442
|
+
"""Deprecated: Use @cache instead (auto-detects async).
|
|
443
|
+
"""
|
|
444
|
+
return cache(
|
|
445
|
+
ttl=ttl,
|
|
446
|
+
backend=backend,
|
|
447
|
+
tag=tag,
|
|
448
|
+
exclude=exclude,
|
|
449
|
+
cache_if=cache_if,
|
|
450
|
+
validate=validate,
|
|
451
|
+
package=package,
|
|
452
|
+
)
|
|
474
453
|
|
|
475
454
|
|
|
476
455
|
def get_cache_info(fn: Callable[..., Any]) -> CacheInfo:
|
|
@@ -503,24 +482,24 @@ async def get_async_cache_info(fn: Callable[..., Any]) -> CacheInfo:
|
|
|
503
482
|
"""Get cache statistics for an async decorated function.
|
|
504
483
|
|
|
505
484
|
Args:
|
|
506
|
-
fn: A function decorated with @
|
|
485
|
+
fn: A function decorated with @cache
|
|
507
486
|
|
|
508
487
|
Returns
|
|
509
488
|
CacheInfo with hits, misses, and currsize
|
|
510
489
|
"""
|
|
511
|
-
hits, misses =
|
|
490
|
+
hits, misses = manager.get_stats(fn)
|
|
512
491
|
|
|
513
492
|
meta = getattr(fn, '_cache_meta', None)
|
|
514
493
|
if meta is None:
|
|
515
494
|
return CacheInfo(hits=hits, misses=misses, currsize=0)
|
|
516
495
|
|
|
517
|
-
backend_instance = await
|
|
496
|
+
backend_instance = await manager.aget_backend(meta.package, meta.backend, meta.ttl)
|
|
518
497
|
cfg = get_config(meta.package)
|
|
519
498
|
|
|
520
499
|
fn_name = getattr(fn, '__wrapped__', fn).__name__
|
|
521
500
|
pattern = f'*:{cfg.key_prefix}{fn_name}|*'
|
|
522
501
|
|
|
523
|
-
currsize = await backend_instance.
|
|
502
|
+
currsize = await backend_instance.acount(pattern)
|
|
524
503
|
|
|
525
504
|
return CacheInfo(hits=hits, misses=misses, currsize=currsize)
|
|
526
505
|
|
|
@@ -534,4 +513,7 @@ def clear_backends(package: str | None = None) -> None:
|
|
|
534
513
|
async def clear_async_backends(package: str | None = None) -> None:
|
|
535
514
|
"""Clear all async backend instances for a package. Primarily for testing.
|
|
536
515
|
"""
|
|
537
|
-
await
|
|
516
|
+
await manager.aclear(package)
|
|
517
|
+
|
|
518
|
+
|
|
519
|
+
get_async_backend = aget_backend
|