cachu 0.2.4__py3-none-any.whl → 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cachu/mutex.py ADDED
@@ -0,0 +1,247 @@
1
+ """Mutex implementations for cache dogpile prevention.
2
+ """
3
+ import asyncio
4
+ import threading
5
+ import time
6
+ import uuid
7
+ from abc import ABC, abstractmethod
8
+ from typing import TYPE_CHECKING, ClassVar, Self
9
+
10
+ if TYPE_CHECKING:
11
+ import redis
12
+ import redis.asyncio as aioredis
13
+
14
+
15
+ class CacheMutex(ABC):
16
+ """Abstract base class for synchronous cache mutexes.
17
+ """
18
+
19
+ @abstractmethod
20
+ def acquire(self, timeout: float | None = None) -> bool:
21
+ """Acquire the lock. Returns True if acquired, False on timeout.
22
+ """
23
+
24
+ @abstractmethod
25
+ def release(self) -> None:
26
+ """Release the lock.
27
+ """
28
+
29
+ def __enter__(self) -> Self:
30
+ self.acquire()
31
+ return self
32
+
33
+ def __exit__(self, *args: object) -> None:
34
+ self.release()
35
+
36
+
37
+ class AsyncCacheMutex(ABC):
38
+ """Abstract base class for asynchronous cache mutexes.
39
+ """
40
+
41
+ @abstractmethod
42
+ async def acquire(self, timeout: float | None = None) -> bool:
43
+ """Acquire the lock. Returns True if acquired, False on timeout.
44
+ """
45
+
46
+ @abstractmethod
47
+ async def release(self) -> None:
48
+ """Release the lock.
49
+ """
50
+
51
+ async def __aenter__(self) -> Self:
52
+ await self.acquire()
53
+ return self
54
+
55
+ async def __aexit__(self, *args: object) -> None:
56
+ await self.release()
57
+
58
+
59
+ class NullMutex(CacheMutex):
60
+ """No-op mutex for testing or when locking is not needed.
61
+ """
62
+
63
+ def acquire(self, timeout: float | None = None) -> bool:
64
+ return True
65
+
66
+ def release(self) -> None:
67
+ pass
68
+
69
+
70
+ class NullAsyncMutex(AsyncCacheMutex):
71
+ """No-op async mutex for testing or when locking is not needed.
72
+ """
73
+
74
+ async def acquire(self, timeout: float | None = None) -> bool:
75
+ return True
76
+
77
+ async def release(self) -> None:
78
+ pass
79
+
80
+
81
+ class ThreadingMutex(CacheMutex):
82
+ """Per-key threading.Lock for local dogpile prevention.
83
+ """
84
+ _locks: ClassVar[dict[str, threading.Lock]] = {}
85
+ _registry_lock: ClassVar[threading.Lock] = threading.Lock()
86
+
87
+ def __init__(self, key: str) -> None:
88
+ self._key = key
89
+ self._acquired = False
90
+ with self._registry_lock:
91
+ if key not in self._locks:
92
+ self._locks[key] = threading.Lock()
93
+ self._lock = self._locks[key]
94
+
95
+ def acquire(self, timeout: float | None = None) -> bool:
96
+ if timeout is None:
97
+ self._acquired = self._lock.acquire()
98
+ else:
99
+ self._acquired = self._lock.acquire(timeout=timeout)
100
+ return self._acquired
101
+
102
+ def release(self) -> None:
103
+ if self._acquired:
104
+ self._lock.release()
105
+ self._acquired = False
106
+
107
+ @classmethod
108
+ def clear_locks(cls) -> None:
109
+ """Clear all locks. For testing only.
110
+ """
111
+ with cls._registry_lock:
112
+ cls._locks.clear()
113
+
114
+
115
+ class AsyncioMutex(AsyncCacheMutex):
116
+ """Per-key asyncio.Lock for local async dogpile prevention.
117
+ """
118
+ _locks: ClassVar[dict[str, asyncio.Lock]] = {}
119
+
120
+ def __init__(self, key: str) -> None:
121
+ self._key = key
122
+ self._acquired = False
123
+ if key not in self._locks:
124
+ self._locks[key] = asyncio.Lock()
125
+ self._lock = self._locks[key]
126
+
127
+ async def acquire(self, timeout: float | None = None) -> bool:
128
+ if timeout is None:
129
+ await self._lock.acquire()
130
+ self._acquired = True
131
+ return True
132
+
133
+ try:
134
+ await asyncio.wait_for(self._lock.acquire(), timeout=timeout)
135
+ self._acquired = True
136
+ return True
137
+ except asyncio.TimeoutError:
138
+ return False
139
+
140
+ async def release(self) -> None:
141
+ if self._acquired:
142
+ self._lock.release()
143
+ self._acquired = False
144
+
145
+ @classmethod
146
+ def clear_locks(cls) -> None:
147
+ """Clear all locks. For testing only.
148
+ """
149
+ cls._locks.clear()
150
+
151
+
152
+ class RedisMutex(CacheMutex):
153
+ """Distributed lock using Redis SET NX EX.
154
+ """
155
+ _RELEASE_SCRIPT = """
156
+ if redis.call("get", KEYS[1]) == ARGV[1] then
157
+ return redis.call("del", KEYS[1])
158
+ end
159
+ return 0
160
+ """
161
+
162
+ def __init__(
163
+ self,
164
+ client: 'redis.Redis',
165
+ key: str,
166
+ lock_timeout: float = 10.0,
167
+ ) -> None:
168
+ self._client = client
169
+ self._key = key
170
+ self._lock_timeout = lock_timeout
171
+ self._token = str(uuid.uuid4())
172
+ self._acquired = False
173
+
174
+ def acquire(self, timeout: float | None = None) -> bool:
175
+ timeout = timeout or self._lock_timeout
176
+ end = time.time() + timeout
177
+ while time.time() < end:
178
+ if self._client.set(
179
+ self._key,
180
+ self._token,
181
+ nx=True,
182
+ ex=int(self._lock_timeout),
183
+ ):
184
+ self._acquired = True
185
+ return True
186
+ time.sleep(0.05)
187
+ return False
188
+
189
+ def release(self) -> None:
190
+ if self._acquired:
191
+ self._client.eval(self._RELEASE_SCRIPT, 1, self._key, self._token)
192
+ self._acquired = False
193
+
194
+
195
+ class AsyncRedisMutex(AsyncCacheMutex):
196
+ """Async distributed lock using redis.asyncio.
197
+ """
198
+ _RELEASE_SCRIPT = """
199
+ if redis.call("get", KEYS[1]) == ARGV[1] then
200
+ return redis.call("del", KEYS[1])
201
+ end
202
+ return 0
203
+ """
204
+
205
+ def __init__(
206
+ self,
207
+ client: 'aioredis.Redis',
208
+ key: str,
209
+ lock_timeout: float = 10.0,
210
+ ) -> None:
211
+ self._client = client
212
+ self._key = key
213
+ self._lock_timeout = lock_timeout
214
+ self._token = str(uuid.uuid4())
215
+ self._acquired = False
216
+
217
+ async def acquire(self, timeout: float | None = None) -> bool:
218
+ timeout = timeout or self._lock_timeout
219
+ end = time.time() + timeout
220
+ while time.time() < end:
221
+ if await self._client.set(
222
+ self._key,
223
+ self._token,
224
+ nx=True,
225
+ ex=int(self._lock_timeout),
226
+ ):
227
+ self._acquired = True
228
+ return True
229
+ await asyncio.sleep(0.05)
230
+ return False
231
+
232
+ async def release(self) -> None:
233
+ if self._acquired:
234
+ await self._client.eval(self._RELEASE_SCRIPT, 1, self._key, self._token)
235
+ self._acquired = False
236
+
237
+
238
+ __all__ = [
239
+ 'CacheMutex',
240
+ 'AsyncCacheMutex',
241
+ 'NullMutex',
242
+ 'NullAsyncMutex',
243
+ 'ThreadingMutex',
244
+ 'AsyncioMutex',
245
+ 'RedisMutex',
246
+ 'AsyncRedisMutex',
247
+ ]
cachu/operations.py CHANGED
@@ -6,8 +6,7 @@ from typing import Any
6
6
 
7
7
  from .backends import NO_VALUE
8
8
  from .config import _get_caller_package, get_config
9
- from .decorator import async_manager, get_async_cache_info, get_cache_info
10
- from .decorator import manager
9
+ from .decorator import get_async_cache_info, get_cache_info, manager
11
10
  from .keys import _tag_to_pattern, mangle_key
12
11
  from .types import CacheInfo, CacheMeta
13
12
 
@@ -140,7 +139,7 @@ def cache_clear(
140
139
  total_cleared += cleared
141
140
  logger.debug(f'Cleared {cleared} entries from {backend} backend (ttl={ttl})')
142
141
  else:
143
- with manager._backends_lock:
142
+ with manager._sync_lock:
144
143
  for (pkg, btype, bttl), backend_instance in list(manager.backends.items()):
145
144
  if pkg != package:
146
145
  continue
@@ -184,7 +183,7 @@ async def async_cache_get(
184
183
  """Get a cached value without calling the async function.
185
184
 
186
185
  Args:
187
- fn: A function decorated with @async_cache
186
+ fn: A function decorated with @cache
188
187
  default: Value to return if not found (raises KeyError if not provided)
189
188
  **kwargs: Function arguments to build the cache key
190
189
 
@@ -193,17 +192,17 @@ async def async_cache_get(
193
192
 
194
193
  Raises
195
194
  KeyError: If not found and no default provided
196
- ValueError: If function is not decorated with @async_cache
195
+ ValueError: If function is not decorated with @cache
197
196
  """
198
- meta = _get_meta(fn, '@async_cache')
197
+ meta = _get_meta(fn, '@cache')
199
198
  cfg = get_config(meta.package)
200
199
 
201
200
  key_generator = fn._cache_key_generator
202
201
  base_key = key_generator(**kwargs)
203
202
  cache_key = mangle_key(base_key, cfg.key_prefix, meta.ttl)
204
203
 
205
- backend = await async_manager.get_backend(meta.package, meta.backend, meta.ttl)
206
- value = await backend.get(cache_key)
204
+ backend = await manager.aget_backend(meta.package, meta.backend, meta.ttl)
205
+ value = await backend.aget(cache_key)
207
206
 
208
207
  if value is NO_VALUE:
209
208
  if default is _MISSING:
@@ -217,22 +216,22 @@ async def async_cache_set(fn: Callable[..., Any], value: Any, **kwargs: Any) ->
217
216
  """Set a cached value directly without calling the async function.
218
217
 
219
218
  Args:
220
- fn: A function decorated with @async_cache
219
+ fn: A function decorated with @cache
221
220
  value: The value to cache
222
221
  **kwargs: Function arguments to build the cache key
223
222
 
224
223
  Raises
225
- ValueError: If function is not decorated with @async_cache
224
+ ValueError: If function is not decorated with @cache
226
225
  """
227
- meta = _get_meta(fn, '@async_cache')
226
+ meta = _get_meta(fn, '@cache')
228
227
  cfg = get_config(meta.package)
229
228
 
230
229
  key_generator = fn._cache_key_generator
231
230
  base_key = key_generator(**kwargs)
232
231
  cache_key = mangle_key(base_key, cfg.key_prefix, meta.ttl)
233
232
 
234
- backend = await async_manager.get_backend(meta.package, meta.backend, meta.ttl)
235
- await backend.set(cache_key, value, meta.ttl)
233
+ backend = await manager.aget_backend(meta.package, meta.backend, meta.ttl)
234
+ await backend.aset(cache_key, value, meta.ttl)
236
235
 
237
236
  logger.debug(f'Set cache for {fn.__name__} with key {cache_key}')
238
237
 
@@ -241,21 +240,21 @@ async def async_cache_delete(fn: Callable[..., Any], **kwargs: Any) -> None:
241
240
  """Delete a specific cached entry.
242
241
 
243
242
  Args:
244
- fn: A function decorated with @async_cache
243
+ fn: A function decorated with @cache
245
244
  **kwargs: Function arguments to build the cache key
246
245
 
247
246
  Raises
248
- ValueError: If function is not decorated with @async_cache
247
+ ValueError: If function is not decorated with @cache
249
248
  """
250
- meta = _get_meta(fn, '@async_cache')
249
+ meta = _get_meta(fn, '@cache')
251
250
  cfg = get_config(meta.package)
252
251
 
253
252
  key_generator = fn._cache_key_generator
254
253
  base_key = key_generator(**kwargs)
255
254
  cache_key = mangle_key(base_key, cfg.key_prefix, meta.ttl)
256
255
 
257
- backend = await async_manager.get_backend(meta.package, meta.backend, meta.ttl)
258
- await backend.delete(cache_key)
256
+ backend = await manager.aget_backend(meta.package, meta.backend, meta.ttl)
257
+ await backend.adelete(cache_key)
259
258
 
260
259
  logger.debug(f'Deleted cache for {fn.__name__} with key {cache_key}')
261
260
 
@@ -289,14 +288,14 @@ async def async_cache_clear(
289
288
  total_cleared = 0
290
289
 
291
290
  if backend is not None and ttl is not None:
292
- backend_instance = await async_manager.get_backend(package, backend, ttl)
293
- cleared = await backend_instance.clear(pattern)
291
+ backend_instance = await manager.aget_backend(package, backend, ttl)
292
+ cleared = await backend_instance.aclear(pattern)
294
293
  if cleared > 0:
295
294
  total_cleared += cleared
296
295
  logger.debug(f'Cleared {cleared} entries from {backend} backend (ttl={ttl})')
297
296
  else:
298
- async with async_manager._backends_lock:
299
- for (pkg, btype, bttl), backend_instance in list(async_manager.backends.items()):
297
+ async with manager._get_async_lock():
298
+ for (pkg, btype, bttl), backend_instance in list(manager.backends.items()):
300
299
  if pkg != package:
301
300
  continue
302
301
  if btype not in backends_to_clear:
@@ -304,13 +303,13 @@ async def async_cache_clear(
304
303
  if ttl is not None and bttl != ttl:
305
304
  continue
306
305
 
307
- cleared = await backend_instance.clear(pattern)
306
+ cleared = await backend_instance.aclear(pattern)
308
307
  if cleared > 0:
309
308
  total_cleared += cleared
310
309
  logger.debug(f'Cleared {cleared} entries from {btype} backend (ttl={bttl})')
311
310
 
312
- async with async_manager._stats_lock:
313
- async_manager.stats.clear()
311
+ with manager._stats_lock:
312
+ manager.stats.clear()
314
313
 
315
314
  return total_cleared
316
315
 
@@ -319,13 +318,13 @@ async def async_cache_info(fn: Callable[..., Any]) -> CacheInfo:
319
318
  """Get cache statistics for an async decorated function.
320
319
 
321
320
  Args:
322
- fn: A function decorated with @async_cache
321
+ fn: A function decorated with @cache
323
322
 
324
323
  Returns
325
324
  CacheInfo with hits, misses, and currsize
326
325
 
327
326
  Raises
328
- ValueError: If function is not decorated with @async_cache
327
+ ValueError: If function is not decorated with @cache
329
328
  """
330
- _get_meta(fn, '@async_cache')
329
+ _get_meta(fn, '@cache')
331
330
  return await get_async_cache_info(fn)
@@ -1,13 +1,12 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cachu
3
- Version: 0.2.4
3
+ Version: 0.2.5
4
4
  Summary: Flexible caching library with sync and async support for memory, file (SQLite), and Redis backends
5
5
  Author: bissli
6
6
  License-Expression: 0BSD
7
7
  Project-URL: Repository, https://github.com/bissli/cachu.git
8
8
  Requires-Python: >=3.10
9
9
  Description-Content-Type: text/markdown
10
- Requires-Dist: dogpile.cache
11
10
  Requires-Dist: func-timeout
12
11
  Provides-Extra: async
13
12
  Requires-Dist: aiosqlite; extra == "async"
@@ -0,0 +1,15 @@
1
+ cachu/__init__.py,sha256=b3uooLcOZo5mfJqrTKi1ldBe8gu9k2L3DK4L2B0ifxA,1156
2
+ cachu/config.py,sha256=u5Oqsy4bx3OMk3ImGDCzvO-U-HrRAxaC4dKHE4PUmQw,5909
3
+ cachu/decorator.py,sha256=YIO7n9gEpZNx4UHzWORI2H9mpENxGFnX_WfT9EOhQ8c,17724
4
+ cachu/keys.py,sha256=3em9_umQYFwwF2EwmIK8yWJq8aO2CI1DMRjq6Je3xC8,3747
5
+ cachu/mutex.py,sha256=SJpbKroS43UmuR_UlT9wPXhOnwDYdABo6iLkhrlVqMc,6464
6
+ cachu/operations.py,sha256=Z1zI5nhlEpM1diJQf7XHQ9cpH1HbIkdSizJV6NxLZPE,10216
7
+ cachu/types.py,sha256=FghBN5GhxnrpuT4WUL9iNnAfdoH__cw9_Ag4kHbIXq4,723
8
+ cachu/backends/__init__.py,sha256=hUGASS8kKMebf7O36RUrvpcSfi4KbcJPUwgqgccTbcw,3100
9
+ cachu/backends/memory.py,sha256=XWBoh4x3m6WiTAy4hrmm4NA6NkiTwlRxrAHxvVTG9mo,6473
10
+ cachu/backends/redis.py,sha256=kMVmb2EQJbc6zoSYuLUOFgxnSX_wgA1uiu_8mF0htV0,8307
11
+ cachu/backends/sqlite.py,sha256=RGI8Iycxi7kMAtQmjhIorsf5bLutcyr6Xw_i8iYK3JQ,18181
12
+ cachu-0.2.5.dist-info/METADATA,sha256=78331qx3dAztbUvFmB3WBZhKbi_3b7Qa0F8Kk85s4QE,11748
13
+ cachu-0.2.5.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
14
+ cachu-0.2.5.dist-info/top_level.txt,sha256=g80nNoMvLMzhSwQWV-JotCBqtsLAHeFMBo_g8hCK8hQ,6
15
+ cachu-0.2.5.dist-info/RECORD,,
cachu/async_decorator.py DELETED
@@ -1,262 +0,0 @@
1
- """Async cache decorator implementation.
2
- """
3
- import asyncio
4
- import logging
5
- import os
6
- import time
7
- from collections.abc import Awaitable, Callable
8
- from functools import wraps
9
- from typing import Any
10
-
11
- from .backends import NO_VALUE
12
- from .backends.async_base import AsyncBackend
13
- from .backends.async_memory import AsyncMemoryBackend
14
- from .config import _get_caller_package, get_config, is_disabled
15
- from .keys import make_key_generator, mangle_key
16
- from .types import CacheEntry, CacheInfo, CacheMeta
17
-
18
- logger = logging.getLogger(__name__)
19
-
20
- _async_backends: dict[tuple[str | None, str, int], AsyncBackend] = {}
21
- _async_backends_lock = asyncio.Lock()
22
-
23
- _async_stats: dict[int, tuple[int, int]] = {}
24
- _async_stats_lock = asyncio.Lock()
25
-
26
-
27
- async def _get_async_backend(package: str | None, backend_type: str, ttl: int) -> AsyncBackend:
28
- """Get or create an async backend instance.
29
- """
30
- key = (package, backend_type, ttl)
31
-
32
- async with _async_backends_lock:
33
- if key in _async_backends:
34
- return _async_backends[key]
35
-
36
- cfg = get_config(package)
37
-
38
- if backend_type == 'memory':
39
- backend: AsyncBackend = AsyncMemoryBackend()
40
- elif backend_type == 'file':
41
- from .backends.async_sqlite import AsyncSqliteBackend
42
-
43
- if ttl < 60:
44
- filename = f'cache{ttl}sec.db'
45
- elif ttl < 3600:
46
- filename = f'cache{ttl // 60}min.db'
47
- else:
48
- filename = f'cache{ttl // 3600}hour.db'
49
-
50
- if package:
51
- filename = f'{package}_{filename}'
52
-
53
- filepath = os.path.join(cfg.file_dir, filename)
54
- backend = AsyncSqliteBackend(filepath)
55
- elif backend_type == 'redis':
56
- from .backends.async_redis import AsyncRedisBackend
57
- backend = AsyncRedisBackend(cfg.redis_url, cfg.redis_distributed)
58
- else:
59
- raise ValueError(f'Unknown backend type: {backend_type}')
60
-
61
- _async_backends[key] = backend
62
- logger.debug(f"Created async {backend_type} backend for package '{package}', {ttl}s TTL")
63
- return backend
64
-
65
-
66
- async def get_async_backend(
67
- backend_type: str | None = None,
68
- package: str | None = None,
69
- *,
70
- ttl: int,
71
- ) -> AsyncBackend:
72
- """Get an async backend instance.
73
-
74
- Args:
75
- backend_type: 'memory', 'file', or 'redis'. Uses config default if None.
76
- package: Package name. Auto-detected if None.
77
- ttl: TTL in seconds (used for backend separation).
78
- """
79
- if package is None:
80
- package = _get_caller_package()
81
-
82
- if backend_type is None:
83
- cfg = get_config(package)
84
- backend_type = cfg.backend
85
-
86
- return await _get_async_backend(package, backend_type, ttl)
87
-
88
-
89
- def async_cache(
90
- ttl: int = 300,
91
- backend: str | None = None,
92
- tag: str = '',
93
- exclude: set[str] | None = None,
94
- cache_if: Callable[[Any], bool] | None = None,
95
- validate: Callable[[CacheEntry], bool] | None = None,
96
- package: str | None = None,
97
- ) -> Callable[[Callable[..., Awaitable[Any]]], Callable[..., Awaitable[Any]]]:
98
- """Async cache decorator with configurable backend and behavior.
99
-
100
- Args:
101
- ttl: Time-to-live in seconds (default: 300)
102
- backend: Backend type ('memory', 'file', 'redis'). Uses config default if None.
103
- tag: Tag for grouping related cache entries
104
- exclude: Parameter names to exclude from cache key
105
- cache_if: Function to determine if result should be cached.
106
- Called with result value, caches if returns True.
107
- validate: Function to validate cached entries before returning.
108
- Called with CacheEntry, returns False to recompute.
109
- package: Package name for config isolation. Auto-detected if None.
110
-
111
- Per-call control via reserved kwargs (not passed to function):
112
- _skip_cache: If True, bypass cache completely for this call
113
- _overwrite_cache: If True, execute function and overwrite cached value
114
-
115
- Example:
116
- @async_cache(ttl=300, tag='users')
117
- async def get_user(user_id: int) -> dict:
118
- return await fetch_user(user_id)
119
-
120
- # Normal call
121
- user = await get_user(123)
122
-
123
- # Skip cache
124
- user = await get_user(123, _skip_cache=True)
125
-
126
- # Force refresh
127
- user = await get_user(123, _overwrite_cache=True)
128
- """
129
- resolved_package = package if package is not None else _get_caller_package()
130
-
131
- if backend is None:
132
- cfg = get_config(resolved_package)
133
- resolved_backend = cfg.backend
134
- else:
135
- resolved_backend = backend
136
-
137
- def decorator(fn: Callable[..., Awaitable[Any]]) -> Callable[..., Awaitable[Any]]:
138
- key_generator = make_key_generator(fn, tag, exclude)
139
-
140
- meta = CacheMeta(
141
- ttl=ttl,
142
- backend=resolved_backend,
143
- tag=tag,
144
- exclude=exclude or set(),
145
- cache_if=cache_if,
146
- validate=validate,
147
- package=resolved_package,
148
- key_generator=key_generator,
149
- )
150
-
151
- @wraps(fn)
152
- async def wrapper(*args: Any, **kwargs: Any) -> Any:
153
- skip_cache = kwargs.pop('_skip_cache', False)
154
- overwrite_cache = kwargs.pop('_overwrite_cache', False)
155
-
156
- if is_disabled() or skip_cache:
157
- return await fn(*args, **kwargs)
158
-
159
- backend_instance = await _get_async_backend(resolved_package, resolved_backend, ttl)
160
- cfg = get_config(resolved_package)
161
-
162
- base_key = key_generator(*args, **kwargs)
163
- cache_key = mangle_key(base_key, cfg.key_prefix, ttl)
164
-
165
- if not overwrite_cache:
166
- value, created_at = await backend_instance.get_with_metadata(cache_key)
167
-
168
- if value is not NO_VALUE:
169
- if validate is not None and created_at is not None:
170
- entry = CacheEntry(
171
- value=value,
172
- created_at=created_at,
173
- age=time.time() - created_at,
174
- )
175
- if not validate(entry):
176
- logger.debug(f'Cache validation failed for {fn.__name__}')
177
- else:
178
- await _record_async_hit(wrapper)
179
- return value
180
- else:
181
- await _record_async_hit(wrapper)
182
- return value
183
-
184
- await _record_async_miss(wrapper)
185
- result = await fn(*args, **kwargs)
186
-
187
- should_cache = cache_if is None or cache_if(result)
188
-
189
- if should_cache:
190
- await backend_instance.set(cache_key, result, ttl)
191
- logger.debug(f'Cached {fn.__name__} with key {cache_key}')
192
-
193
- return result
194
-
195
- wrapper._cache_meta = meta # type: ignore
196
- wrapper._cache_key_generator = key_generator # type: ignore
197
-
198
- return wrapper
199
-
200
- return decorator
201
-
202
-
203
- async def _record_async_hit(fn: Callable[..., Any]) -> None:
204
- """Record a cache hit for the async function.
205
- """
206
- fn_id = id(fn)
207
- async with _async_stats_lock:
208
- hits, misses = _async_stats.get(fn_id, (0, 0))
209
- _async_stats[fn_id] = (hits + 1, misses)
210
-
211
-
212
- async def _record_async_miss(fn: Callable[..., Any]) -> None:
213
- """Record a cache miss for the async function.
214
- """
215
- fn_id = id(fn)
216
- async with _async_stats_lock:
217
- hits, misses = _async_stats.get(fn_id, (0, 0))
218
- _async_stats[fn_id] = (hits, misses + 1)
219
-
220
-
221
- async def get_async_cache_info(fn: Callable[..., Any]) -> CacheInfo:
222
- """Get cache statistics for an async decorated function.
223
-
224
- Args:
225
- fn: A function decorated with @async_cache
226
-
227
- Returns
228
- CacheInfo with hits, misses, and currsize
229
- """
230
- fn_id = id(fn)
231
-
232
- async with _async_stats_lock:
233
- hits, misses = _async_stats.get(fn_id, (0, 0))
234
-
235
- meta = getattr(fn, '_cache_meta', None)
236
- if meta is None:
237
- return CacheInfo(hits=hits, misses=misses, currsize=0)
238
-
239
- backend_instance = await _get_async_backend(meta.package, meta.backend, meta.ttl)
240
- cfg = get_config(meta.package)
241
-
242
- fn_name = getattr(fn, '__wrapped__', fn).__name__
243
- pattern = f'*:{cfg.key_prefix}{fn_name}|*'
244
-
245
- currsize = await backend_instance.count(pattern)
246
-
247
- return CacheInfo(hits=hits, misses=misses, currsize=currsize)
248
-
249
-
250
- async def clear_async_backends(package: str | None = None) -> None:
251
- """Clear all async backend instances for a package. Primarily for testing.
252
- """
253
- async with _async_backends_lock:
254
- if package is None:
255
- for backend in _async_backends.values():
256
- await backend.close()
257
- _async_backends.clear()
258
- else:
259
- keys_to_delete = [k for k in _async_backends if k[0] == package]
260
- for key in keys_to_delete:
261
- await _async_backends[key].close()
262
- del _async_backends[key]