cachu 0.2.3__py3-none-any.whl → 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cachu/mutex.py ADDED
@@ -0,0 +1,247 @@
1
+ """Mutex implementations for cache dogpile prevention.
2
+ """
3
+ import asyncio
4
+ import threading
5
+ import time
6
+ import uuid
7
+ from abc import ABC, abstractmethod
8
+ from typing import TYPE_CHECKING, ClassVar, Self
9
+
10
+ if TYPE_CHECKING:
11
+ import redis
12
+ import redis.asyncio as aioredis
13
+
14
+
15
+ class CacheMutex(ABC):
16
+ """Abstract base class for synchronous cache mutexes.
17
+ """
18
+
19
+ @abstractmethod
20
+ def acquire(self, timeout: float | None = None) -> bool:
21
+ """Acquire the lock. Returns True if acquired, False on timeout.
22
+ """
23
+
24
+ @abstractmethod
25
+ def release(self) -> None:
26
+ """Release the lock.
27
+ """
28
+
29
+ def __enter__(self) -> Self:
30
+ self.acquire()
31
+ return self
32
+
33
+ def __exit__(self, *args: object) -> None:
34
+ self.release()
35
+
36
+
37
+ class AsyncCacheMutex(ABC):
38
+ """Abstract base class for asynchronous cache mutexes.
39
+ """
40
+
41
+ @abstractmethod
42
+ async def acquire(self, timeout: float | None = None) -> bool:
43
+ """Acquire the lock. Returns True if acquired, False on timeout.
44
+ """
45
+
46
+ @abstractmethod
47
+ async def release(self) -> None:
48
+ """Release the lock.
49
+ """
50
+
51
+ async def __aenter__(self) -> Self:
52
+ await self.acquire()
53
+ return self
54
+
55
+ async def __aexit__(self, *args: object) -> None:
56
+ await self.release()
57
+
58
+
59
+ class NullMutex(CacheMutex):
60
+ """No-op mutex for testing or when locking is not needed.
61
+ """
62
+
63
+ def acquire(self, timeout: float | None = None) -> bool:
64
+ return True
65
+
66
+ def release(self) -> None:
67
+ pass
68
+
69
+
70
+ class NullAsyncMutex(AsyncCacheMutex):
71
+ """No-op async mutex for testing or when locking is not needed.
72
+ """
73
+
74
+ async def acquire(self, timeout: float | None = None) -> bool:
75
+ return True
76
+
77
+ async def release(self) -> None:
78
+ pass
79
+
80
+
81
+ class ThreadingMutex(CacheMutex):
82
+ """Per-key threading.Lock for local dogpile prevention.
83
+ """
84
+ _locks: ClassVar[dict[str, threading.Lock]] = {}
85
+ _registry_lock: ClassVar[threading.Lock] = threading.Lock()
86
+
87
+ def __init__(self, key: str) -> None:
88
+ self._key = key
89
+ self._acquired = False
90
+ with self._registry_lock:
91
+ if key not in self._locks:
92
+ self._locks[key] = threading.Lock()
93
+ self._lock = self._locks[key]
94
+
95
+ def acquire(self, timeout: float | None = None) -> bool:
96
+ if timeout is None:
97
+ self._acquired = self._lock.acquire()
98
+ else:
99
+ self._acquired = self._lock.acquire(timeout=timeout)
100
+ return self._acquired
101
+
102
+ def release(self) -> None:
103
+ if self._acquired:
104
+ self._lock.release()
105
+ self._acquired = False
106
+
107
+ @classmethod
108
+ def clear_locks(cls) -> None:
109
+ """Clear all locks. For testing only.
110
+ """
111
+ with cls._registry_lock:
112
+ cls._locks.clear()
113
+
114
+
115
+ class AsyncioMutex(AsyncCacheMutex):
116
+ """Per-key asyncio.Lock for local async dogpile prevention.
117
+ """
118
+ _locks: ClassVar[dict[str, asyncio.Lock]] = {}
119
+
120
+ def __init__(self, key: str) -> None:
121
+ self._key = key
122
+ self._acquired = False
123
+ if key not in self._locks:
124
+ self._locks[key] = asyncio.Lock()
125
+ self._lock = self._locks[key]
126
+
127
+ async def acquire(self, timeout: float | None = None) -> bool:
128
+ if timeout is None:
129
+ await self._lock.acquire()
130
+ self._acquired = True
131
+ return True
132
+
133
+ try:
134
+ await asyncio.wait_for(self._lock.acquire(), timeout=timeout)
135
+ self._acquired = True
136
+ return True
137
+ except asyncio.TimeoutError:
138
+ return False
139
+
140
+ async def release(self) -> None:
141
+ if self._acquired:
142
+ self._lock.release()
143
+ self._acquired = False
144
+
145
+ @classmethod
146
+ def clear_locks(cls) -> None:
147
+ """Clear all locks. For testing only.
148
+ """
149
+ cls._locks.clear()
150
+
151
+
152
+ class RedisMutex(CacheMutex):
153
+ """Distributed lock using Redis SET NX EX.
154
+ """
155
+ _RELEASE_SCRIPT = """
156
+ if redis.call("get", KEYS[1]) == ARGV[1] then
157
+ return redis.call("del", KEYS[1])
158
+ end
159
+ return 0
160
+ """
161
+
162
+ def __init__(
163
+ self,
164
+ client: 'redis.Redis',
165
+ key: str,
166
+ lock_timeout: float = 10.0,
167
+ ) -> None:
168
+ self._client = client
169
+ self._key = key
170
+ self._lock_timeout = lock_timeout
171
+ self._token = str(uuid.uuid4())
172
+ self._acquired = False
173
+
174
+ def acquire(self, timeout: float | None = None) -> bool:
175
+ timeout = timeout or self._lock_timeout
176
+ end = time.time() + timeout
177
+ while time.time() < end:
178
+ if self._client.set(
179
+ self._key,
180
+ self._token,
181
+ nx=True,
182
+ ex=int(self._lock_timeout),
183
+ ):
184
+ self._acquired = True
185
+ return True
186
+ time.sleep(0.05)
187
+ return False
188
+
189
+ def release(self) -> None:
190
+ if self._acquired:
191
+ self._client.eval(self._RELEASE_SCRIPT, 1, self._key, self._token)
192
+ self._acquired = False
193
+
194
+
195
+ class AsyncRedisMutex(AsyncCacheMutex):
196
+ """Async distributed lock using redis.asyncio.
197
+ """
198
+ _RELEASE_SCRIPT = """
199
+ if redis.call("get", KEYS[1]) == ARGV[1] then
200
+ return redis.call("del", KEYS[1])
201
+ end
202
+ return 0
203
+ """
204
+
205
+ def __init__(
206
+ self,
207
+ client: 'aioredis.Redis',
208
+ key: str,
209
+ lock_timeout: float = 10.0,
210
+ ) -> None:
211
+ self._client = client
212
+ self._key = key
213
+ self._lock_timeout = lock_timeout
214
+ self._token = str(uuid.uuid4())
215
+ self._acquired = False
216
+
217
+ async def acquire(self, timeout: float | None = None) -> bool:
218
+ timeout = timeout or self._lock_timeout
219
+ end = time.time() + timeout
220
+ while time.time() < end:
221
+ if await self._client.set(
222
+ self._key,
223
+ self._token,
224
+ nx=True,
225
+ ex=int(self._lock_timeout),
226
+ ):
227
+ self._acquired = True
228
+ return True
229
+ await asyncio.sleep(0.05)
230
+ return False
231
+
232
+ async def release(self) -> None:
233
+ if self._acquired:
234
+ await self._client.eval(self._RELEASE_SCRIPT, 1, self._key, self._token)
235
+ self._acquired = False
236
+
237
+
238
+ __all__ = [
239
+ 'CacheMutex',
240
+ 'AsyncCacheMutex',
241
+ 'NullMutex',
242
+ 'NullAsyncMutex',
243
+ 'ThreadingMutex',
244
+ 'AsyncioMutex',
245
+ 'RedisMutex',
246
+ 'AsyncRedisMutex',
247
+ ]
cachu/operations.py CHANGED
@@ -1,4 +1,4 @@
1
- """Cache CRUD operations.
1
+ """Cache CRUD operations for sync and async APIs.
2
2
  """
3
3
  import logging
4
4
  from collections.abc import Callable
@@ -6,8 +6,8 @@ from typing import Any
6
6
 
7
7
  from .backends import NO_VALUE
8
8
  from .config import _get_caller_package, get_config
9
- from .decorator import _get_backend, get_cache_info
10
- from .keys import mangle_key
9
+ from .decorator import get_async_cache_info, get_cache_info, manager
10
+ from .keys import _tag_to_pattern, mangle_key
11
11
  from .types import CacheInfo, CacheMeta
12
12
 
13
13
  logger = logging.getLogger(__name__)
@@ -15,12 +15,12 @@ logger = logging.getLogger(__name__)
15
15
  _MISSING = object()
16
16
 
17
17
 
18
- def _get_meta(fn: Callable[..., Any]) -> CacheMeta:
18
+ def _get_meta(fn: Callable[..., Any], decorator_name: str = '@cache') -> CacheMeta:
19
19
  """Get CacheMeta from a decorated function.
20
20
  """
21
21
  meta = getattr(fn, '_cache_meta', None)
22
22
  if meta is None:
23
- raise ValueError(f'{fn.__name__} is not decorated with @cache')
23
+ raise ValueError(f'{fn.__name__} is not decorated with {decorator_name}')
24
24
  return meta
25
25
 
26
26
 
@@ -46,7 +46,7 @@ def cache_get(fn: Callable[..., Any], default: Any = _MISSING, **kwargs: Any) ->
46
46
  base_key = key_generator(**kwargs)
47
47
  cache_key = mangle_key(base_key, cfg.key_prefix, meta.ttl)
48
48
 
49
- backend = _get_backend(meta.package, meta.backend, meta.ttl)
49
+ backend = manager.get_backend(meta.package, meta.backend, meta.ttl)
50
50
  value = backend.get(cache_key)
51
51
 
52
52
  if value is NO_VALUE:
@@ -75,7 +75,7 @@ def cache_set(fn: Callable[..., Any], value: Any, **kwargs: Any) -> None:
75
75
  base_key = key_generator(**kwargs)
76
76
  cache_key = mangle_key(base_key, cfg.key_prefix, meta.ttl)
77
77
 
78
- backend = _get_backend(meta.package, meta.backend, meta.ttl)
78
+ backend = manager.get_backend(meta.package, meta.backend, meta.ttl)
79
79
  backend.set(cache_key, value, meta.ttl)
80
80
 
81
81
  logger.debug(f'Set cache for {fn.__name__} with key {cache_key}')
@@ -98,7 +98,7 @@ def cache_delete(fn: Callable[..., Any], **kwargs: Any) -> None:
98
98
  base_key = key_generator(**kwargs)
99
99
  cache_key = mangle_key(base_key, cfg.key_prefix, meta.ttl)
100
100
 
101
- backend = _get_backend(meta.package, meta.backend, meta.ttl)
101
+ backend = manager.get_backend(meta.package, meta.backend, meta.ttl)
102
102
  backend.delete(cache_key)
103
103
 
104
104
  logger.debug(f'Deleted cache for {fn.__name__} with key {cache_key}')
@@ -129,28 +129,18 @@ def cache_clear(
129
129
  else:
130
130
  backends_to_clear = ['memory', 'file', 'redis']
131
131
 
132
- if tag:
133
- from .keys import _normalize_tag
134
- pattern = f'*|{_normalize_tag(tag)}|*'
135
- else:
136
- pattern = None
137
-
132
+ pattern = _tag_to_pattern(tag)
138
133
  total_cleared = 0
139
134
 
140
- from .decorator import _backends, _backends_lock
141
-
142
- # When both backend and ttl are specified, directly get/create and clear that backend.
143
- # This is essential for distributed caches (Redis) where cache_clear may be called
144
- # from a different process than the one that populated the cache.
145
135
  if backend is not None and ttl is not None:
146
- backend_instance = _get_backend(package, backend, ttl)
136
+ backend_instance = manager.get_backend(package, backend, ttl)
147
137
  cleared = backend_instance.clear(pattern)
148
138
  if cleared > 0:
149
139
  total_cleared += cleared
150
140
  logger.debug(f'Cleared {cleared} entries from {backend} backend (ttl={ttl})')
151
141
  else:
152
- with _backends_lock:
153
- for (pkg, btype, bttl), backend_instance in list(_backends.items()):
142
+ with manager._sync_lock:
143
+ for (pkg, btype, bttl), backend_instance in list(manager.backends.items()):
154
144
  if pkg != package:
155
145
  continue
156
146
  if btype not in backends_to_clear:
@@ -163,6 +153,9 @@ def cache_clear(
163
153
  total_cleared += cleared
164
154
  logger.debug(f'Cleared {cleared} entries from {btype} backend (ttl={bttl})')
165
155
 
156
+ with manager._stats_lock:
157
+ manager.stats.clear()
158
+
166
159
  return total_cleared
167
160
 
168
161
 
@@ -178,5 +171,160 @@ def cache_info(fn: Callable[..., Any]) -> CacheInfo:
178
171
  Raises
179
172
  ValueError: If function is not decorated with @cache
180
173
  """
181
- _get_meta(fn) # Validate it's decorated
174
+ _get_meta(fn)
182
175
  return get_cache_info(fn)
176
+
177
+
178
+ async def async_cache_get(
179
+ fn: Callable[..., Any],
180
+ default: Any = _MISSING,
181
+ **kwargs: Any,
182
+ ) -> Any:
183
+ """Get a cached value without calling the async function.
184
+
185
+ Args:
186
+ fn: A function decorated with @cache
187
+ default: Value to return if not found (raises KeyError if not provided)
188
+ **kwargs: Function arguments to build the cache key
189
+
190
+ Returns
191
+ The cached value or default
192
+
193
+ Raises
194
+ KeyError: If not found and no default provided
195
+ ValueError: If function is not decorated with @cache
196
+ """
197
+ meta = _get_meta(fn, '@cache')
198
+ cfg = get_config(meta.package)
199
+
200
+ key_generator = fn._cache_key_generator
201
+ base_key = key_generator(**kwargs)
202
+ cache_key = mangle_key(base_key, cfg.key_prefix, meta.ttl)
203
+
204
+ backend = await manager.aget_backend(meta.package, meta.backend, meta.ttl)
205
+ value = await backend.aget(cache_key)
206
+
207
+ if value is NO_VALUE:
208
+ if default is _MISSING:
209
+ raise KeyError(f'No cached value for {fn.__name__} with {kwargs}')
210
+ return default
211
+
212
+ return value
213
+
214
+
215
+ async def async_cache_set(fn: Callable[..., Any], value: Any, **kwargs: Any) -> None:
216
+ """Set a cached value directly without calling the async function.
217
+
218
+ Args:
219
+ fn: A function decorated with @cache
220
+ value: The value to cache
221
+ **kwargs: Function arguments to build the cache key
222
+
223
+ Raises
224
+ ValueError: If function is not decorated with @cache
225
+ """
226
+ meta = _get_meta(fn, '@cache')
227
+ cfg = get_config(meta.package)
228
+
229
+ key_generator = fn._cache_key_generator
230
+ base_key = key_generator(**kwargs)
231
+ cache_key = mangle_key(base_key, cfg.key_prefix, meta.ttl)
232
+
233
+ backend = await manager.aget_backend(meta.package, meta.backend, meta.ttl)
234
+ await backend.aset(cache_key, value, meta.ttl)
235
+
236
+ logger.debug(f'Set cache for {fn.__name__} with key {cache_key}')
237
+
238
+
239
+ async def async_cache_delete(fn: Callable[..., Any], **kwargs: Any) -> None:
240
+ """Delete a specific cached entry.
241
+
242
+ Args:
243
+ fn: A function decorated with @cache
244
+ **kwargs: Function arguments to build the cache key
245
+
246
+ Raises
247
+ ValueError: If function is not decorated with @cache
248
+ """
249
+ meta = _get_meta(fn, '@cache')
250
+ cfg = get_config(meta.package)
251
+
252
+ key_generator = fn._cache_key_generator
253
+ base_key = key_generator(**kwargs)
254
+ cache_key = mangle_key(base_key, cfg.key_prefix, meta.ttl)
255
+
256
+ backend = await manager.aget_backend(meta.package, meta.backend, meta.ttl)
257
+ await backend.adelete(cache_key)
258
+
259
+ logger.debug(f'Deleted cache for {fn.__name__} with key {cache_key}')
260
+
261
+
262
+ async def async_cache_clear(
263
+ tag: str | None = None,
264
+ backend: str | None = None,
265
+ ttl: int | None = None,
266
+ package: str | None = None,
267
+ ) -> int:
268
+ """Clear async cache entries matching criteria.
269
+
270
+ Args:
271
+ tag: Clear only entries with this tag
272
+ backend: Backend type to clear ('memory', 'file', 'redis'). Clears all if None.
273
+ ttl: Specific TTL region to clear. Clears all TTLs if None.
274
+ package: Package to clear for. Auto-detected if None.
275
+
276
+ Returns
277
+ Number of entries cleared (may be approximate)
278
+ """
279
+ if package is None:
280
+ package = _get_caller_package()
281
+
282
+ if backend is not None:
283
+ backends_to_clear = [backend]
284
+ else:
285
+ backends_to_clear = ['memory', 'file', 'redis']
286
+
287
+ pattern = _tag_to_pattern(tag)
288
+ total_cleared = 0
289
+
290
+ if backend is not None and ttl is not None:
291
+ backend_instance = await manager.aget_backend(package, backend, ttl)
292
+ cleared = await backend_instance.aclear(pattern)
293
+ if cleared > 0:
294
+ total_cleared += cleared
295
+ logger.debug(f'Cleared {cleared} entries from {backend} backend (ttl={ttl})')
296
+ else:
297
+ async with manager._get_async_lock():
298
+ for (pkg, btype, bttl), backend_instance in list(manager.backends.items()):
299
+ if pkg != package:
300
+ continue
301
+ if btype not in backends_to_clear:
302
+ continue
303
+ if ttl is not None and bttl != ttl:
304
+ continue
305
+
306
+ cleared = await backend_instance.aclear(pattern)
307
+ if cleared > 0:
308
+ total_cleared += cleared
309
+ logger.debug(f'Cleared {cleared} entries from {btype} backend (ttl={bttl})')
310
+
311
+ with manager._stats_lock:
312
+ manager.stats.clear()
313
+
314
+ return total_cleared
315
+
316
+
317
+ async def async_cache_info(fn: Callable[..., Any]) -> CacheInfo:
318
+ """Get cache statistics for an async decorated function.
319
+
320
+ Args:
321
+ fn: A function decorated with @cache
322
+
323
+ Returns
324
+ CacheInfo with hits, misses, and currsize
325
+
326
+ Raises
327
+ ValueError: If function is not decorated with @cache
328
+ """
329
+ _get_meta(fn, '@cache')
330
+ return await get_async_cache_info(fn)
@@ -1,13 +1,12 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cachu
3
- Version: 0.2.3
3
+ Version: 0.2.5
4
4
  Summary: Flexible caching library with sync and async support for memory, file (SQLite), and Redis backends
5
5
  Author: bissli
6
6
  License-Expression: 0BSD
7
7
  Project-URL: Repository, https://github.com/bissli/cachu.git
8
8
  Requires-Python: >=3.10
9
9
  Description-Content-Type: text/markdown
10
- Requires-Dist: dogpile.cache
11
10
  Requires-Dist: func-timeout
12
11
  Provides-Extra: async
13
12
  Requires-Dist: aiosqlite; extra == "async"
@@ -69,19 +68,17 @@ cachu.configure(
69
68
  key_prefix='v1:', # Prefix for all cache keys
70
69
  file_dir='/var/cache/app', # Directory for file cache
71
70
  redis_url='redis://localhost:6379/0', # Redis connection URL
72
- redis_distributed=False, # Use distributed locks for Redis
73
71
  )
74
72
  ```
75
73
 
76
74
  ### Configuration Options
77
75
 
78
- | Option | Default | Description |
79
- | ------------------- | ---------------------------- | ------------------------------------------------- |
80
- | `backend` | `'memory'` | Default backend type |
81
- | `key_prefix` | `''` | Prefix for all cache keys (useful for versioning) |
82
- | `file_dir` | `'/tmp'` | Directory for file-based caches |
83
- | `redis_url` | `'redis://localhost:6379/0'` | Redis connection URL |
84
- | `redis_distributed` | `False` | Enable distributed locks for Redis |
76
+ | Option | Default | Description |
77
+ | ------------ | ---------------------------- | ------------------------------------------------- |
78
+ | `backend` | `'memory'` | Default backend type |
79
+ | `key_prefix` | `''` | Prefix for all cache keys (useful for versioning) |
80
+ | `file_dir` | `'/tmp'` | Directory for file-based caches |
81
+ | `redis_url` | `'redis://localhost:6379/0'` | Redis connection URL |
85
82
 
86
83
  ### Package Isolation
87
84
 
@@ -0,0 +1,15 @@
1
+ cachu/__init__.py,sha256=b3uooLcOZo5mfJqrTKi1ldBe8gu9k2L3DK4L2B0ifxA,1156
2
+ cachu/config.py,sha256=u5Oqsy4bx3OMk3ImGDCzvO-U-HrRAxaC4dKHE4PUmQw,5909
3
+ cachu/decorator.py,sha256=YIO7n9gEpZNx4UHzWORI2H9mpENxGFnX_WfT9EOhQ8c,17724
4
+ cachu/keys.py,sha256=3em9_umQYFwwF2EwmIK8yWJq8aO2CI1DMRjq6Je3xC8,3747
5
+ cachu/mutex.py,sha256=SJpbKroS43UmuR_UlT9wPXhOnwDYdABo6iLkhrlVqMc,6464
6
+ cachu/operations.py,sha256=Z1zI5nhlEpM1diJQf7XHQ9cpH1HbIkdSizJV6NxLZPE,10216
7
+ cachu/types.py,sha256=FghBN5GhxnrpuT4WUL9iNnAfdoH__cw9_Ag4kHbIXq4,723
8
+ cachu/backends/__init__.py,sha256=hUGASS8kKMebf7O36RUrvpcSfi4KbcJPUwgqgccTbcw,3100
9
+ cachu/backends/memory.py,sha256=XWBoh4x3m6WiTAy4hrmm4NA6NkiTwlRxrAHxvVTG9mo,6473
10
+ cachu/backends/redis.py,sha256=kMVmb2EQJbc6zoSYuLUOFgxnSX_wgA1uiu_8mF0htV0,8307
11
+ cachu/backends/sqlite.py,sha256=RGI8Iycxi7kMAtQmjhIorsf5bLutcyr6Xw_i8iYK3JQ,18181
12
+ cachu-0.2.5.dist-info/METADATA,sha256=78331qx3dAztbUvFmB3WBZhKbi_3b7Qa0F8Kk85s4QE,11748
13
+ cachu-0.2.5.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
14
+ cachu-0.2.5.dist-info/top_level.txt,sha256=g80nNoMvLMzhSwQWV-JotCBqtsLAHeFMBo_g8hCK8hQ,6
15
+ cachu-0.2.5.dist-info/RECORD,,