cachu 0.2.3__py3-none-any.whl → 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cachu/__init__.py CHANGED
@@ -1,19 +1,16 @@
1
1
  """Flexible caching library with support for memory, file, and Redis backends.
2
2
  """
3
- __version__ = '0.2.3'
3
+ __version__ = '0.2.5'
4
4
 
5
- from .async_decorator import async_cache, clear_async_backends
6
- from .async_decorator import get_async_backend, get_async_cache_info
7
- from .async_operations import async_cache_clear, async_cache_delete
8
- from .async_operations import async_cache_get, async_cache_info
9
- from .async_operations import async_cache_set
10
- from .backends import AsyncBackend, Backend
5
+ from .backends import Backend
11
6
  from .backends.redis import get_redis_client
12
7
  from .config import configure, disable, enable, get_all_configs, get_config
13
8
  from .config import is_disabled
14
- from .decorator import cache, get_backend
15
- from .operations import cache_clear, cache_delete, cache_get, cache_info
16
- from .operations import cache_set
9
+ from .decorator import aget_backend, cache, clear_async_backends
10
+ from .decorator import get_async_backend, get_async_cache_info, get_backend
11
+ from .operations import async_cache_clear, async_cache_delete, async_cache_get
12
+ from .operations import async_cache_info, async_cache_set, cache_clear
13
+ from .operations import cache_delete, cache_get, cache_info, cache_set
17
14
 
18
15
  __all__ = [
19
16
  'configure',
@@ -29,10 +26,9 @@ __all__ = [
29
26
  'cache_clear',
30
27
  'cache_info',
31
28
  'get_backend',
29
+ 'aget_backend',
32
30
  'get_redis_client',
33
31
  'Backend',
34
- 'AsyncBackend',
35
- 'async_cache',
36
32
  'async_cache_get',
37
33
  'async_cache_set',
38
34
  'async_cache_delete',
@@ -1,16 +1,21 @@
1
1
  """Cache backend implementations.
2
2
  """
3
3
  from abc import ABC, abstractmethod
4
- from collections.abc import Iterator
5
- from typing import Any
4
+ from collections.abc import AsyncIterator, Iterator
5
+ from typing import TYPE_CHECKING, Any
6
+
7
+ if TYPE_CHECKING:
8
+ from ..mutex import AsyncCacheMutex, CacheMutex
6
9
 
7
10
  NO_VALUE = object()
8
11
 
9
12
 
10
13
  class Backend(ABC):
11
- """Abstract base class for cache backends.
14
+ """Abstract base class for cache backends with both sync and async interfaces.
12
15
  """
13
16
 
17
+ # ===== Sync interface =====
18
+
14
19
  @abstractmethod
15
20
  def get(self, key: str) -> Any:
16
21
  """Get value by key. Returns NO_VALUE if not found.
@@ -46,7 +51,62 @@ class Backend(ABC):
46
51
  """Count keys matching pattern.
47
52
  """
48
53
 
54
+ @abstractmethod
55
+ def get_mutex(self, key: str) -> 'CacheMutex':
56
+ """Get a mutex for dogpile prevention on the given key.
57
+ """
58
+
59
+ # ===== Async interface =====
60
+
61
+ @abstractmethod
62
+ async def aget(self, key: str) -> Any:
63
+ """Async get value by key. Returns NO_VALUE if not found.
64
+ """
65
+
66
+ @abstractmethod
67
+ async def aget_with_metadata(self, key: str) -> tuple[Any, float | None]:
68
+ """Async get value and creation timestamp. Returns (NO_VALUE, None) if not found.
69
+ """
70
+
71
+ @abstractmethod
72
+ async def aset(self, key: str, value: Any, ttl: int) -> None:
73
+ """Async set value with TTL in seconds.
74
+ """
75
+
76
+ @abstractmethod
77
+ async def adelete(self, key: str) -> None:
78
+ """Async delete value by key.
79
+ """
80
+
81
+ @abstractmethod
82
+ async def aclear(self, pattern: str | None = None) -> int:
83
+ """Async clear entries matching pattern. Returns count of cleared entries.
84
+ """
85
+
86
+ @abstractmethod
87
+ def akeys(self, pattern: str | None = None) -> AsyncIterator[str]:
88
+ """Async iterate over keys matching pattern.
89
+ """
90
+
91
+ @abstractmethod
92
+ async def acount(self, pattern: str | None = None) -> int:
93
+ """Async count keys matching pattern.
94
+ """
95
+
96
+ @abstractmethod
97
+ def get_async_mutex(self, key: str) -> 'AsyncCacheMutex':
98
+ """Get an async mutex for dogpile prevention on the given key.
99
+ """
100
+
101
+ # ===== Lifecycle =====
102
+
103
+ def close(self) -> None:
104
+ """Close the backend and release resources.
105
+ """
106
+
107
+ async def aclose(self) -> None:
108
+ """Async close the backend and release resources.
109
+ """
49
110
 
50
- from .async_base import AsyncBackend
51
111
 
52
- __all__ = ['Backend', 'AsyncBackend', 'NO_VALUE']
112
+ __all__ = ['Backend', 'NO_VALUE']
cachu/backends/memory.py CHANGED
@@ -1,102 +1,204 @@
1
1
  """Memory cache backend implementation.
2
2
  """
3
+ import asyncio
3
4
  import fnmatch
4
5
  import pickle
5
6
  import threading
6
7
  import time
7
- from collections.abc import Iterator
8
+ from collections.abc import AsyncIterator, Iterator
8
9
  from typing import Any
9
10
 
11
+ from ..mutex import AsyncioMutex, CacheMutex, ThreadingMutex
10
12
  from . import NO_VALUE, Backend
11
13
 
12
14
 
13
15
  class MemoryBackend(Backend):
14
- """Thread-safe in-memory cache backend.
16
+ """Thread-safe in-memory cache backend with both sync and async interfaces.
15
17
  """
16
18
 
17
19
  def __init__(self) -> None:
18
20
  self._cache: dict[str, tuple[bytes, float, float]] = {}
19
- self._lock = threading.RLock()
21
+ self._sync_lock = threading.RLock()
22
+ self._async_lock: asyncio.Lock | None = None
20
23
 
21
- def get(self, key: str) -> Any:
22
- """Get value by key. Returns NO_VALUE if not found or expired.
24
+ def _get_async_lock(self) -> asyncio.Lock:
25
+ """Lazy-create async lock (must be called from async context).
23
26
  """
24
- with self._lock:
25
- entry = self._cache.get(key)
26
- if entry is None:
27
- return NO_VALUE
27
+ if self._async_lock is None:
28
+ self._async_lock = asyncio.Lock()
29
+ return self._async_lock
28
30
 
29
- pickled_value, created_at, expires_at = entry
30
- if time.time() > expires_at:
31
- del self._cache[key]
32
- return NO_VALUE
31
+ # ===== Core logic (no locking) =====
33
32
 
34
- return pickle.loads(pickled_value)
33
+ def _do_get(self, key: str) -> tuple[Any, float | None]:
34
+ """Get value and metadata without locking.
35
+ """
36
+ entry = self._cache.get(key)
37
+ if entry is None:
38
+ return NO_VALUE, None
35
39
 
36
- def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
37
- """Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
40
+ pickled_value, created_at, expires_at = entry
41
+ if time.time() > expires_at:
42
+ del self._cache[key]
43
+ return NO_VALUE, None
44
+
45
+ return pickle.loads(pickled_value), created_at
46
+
47
+ def _do_set(self, key: str, value: Any, ttl: int) -> None:
48
+ """Set value without locking.
38
49
  """
39
- with self._lock:
40
- entry = self._cache.get(key)
41
- if entry is None:
42
- return NO_VALUE, None
50
+ now = time.time()
51
+ pickled_value = pickle.dumps(value)
52
+ self._cache[key] = (pickled_value, now, now + ttl)
53
+
54
+ def _do_delete(self, key: str) -> None:
55
+ """Delete value without locking.
56
+ """
57
+ self._cache.pop(key, None)
58
+
59
+ def _do_clear(self, pattern: str | None = None) -> int:
60
+ """Clear entries matching pattern without locking.
61
+ """
62
+ if pattern is None:
63
+ count = len(self._cache)
64
+ self._cache.clear()
65
+ return count
66
+
67
+ keys_to_delete = [k for k in self._cache if fnmatch.fnmatch(k, pattern)]
68
+ for key in keys_to_delete:
69
+ del self._cache[key]
70
+ return len(keys_to_delete)
71
+
72
+ def _do_keys(self, pattern: str | None = None) -> list[str]:
73
+ """Get keys matching pattern without locking (returns snapshot).
74
+ """
75
+ now = time.time()
76
+ result = []
77
+ keys_to_delete = []
78
+
79
+ for key, entry in list(self._cache.items()):
80
+ _, _, expires_at = entry
81
+ if now > expires_at:
82
+ keys_to_delete.append(key)
83
+ continue
84
+ if pattern is None or fnmatch.fnmatch(key, pattern):
85
+ result.append(key)
86
+
87
+ for key in keys_to_delete:
88
+ self._cache.pop(key, None)
89
+
90
+ return result
43
91
 
44
- pickled_value, created_at, expires_at = entry
45
- if time.time() > expires_at:
46
- del self._cache[key]
47
- return NO_VALUE, None
92
+ # ===== Sync interface =====
48
93
 
49
- return pickle.loads(pickled_value), created_at
94
+ def get(self, key: str) -> Any:
95
+ """Get value by key. Returns NO_VALUE if not found or expired.
96
+ """
97
+ with self._sync_lock:
98
+ value, _ = self._do_get(key)
99
+ return value
100
+
101
+ def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
102
+ """Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
103
+ """
104
+ with self._sync_lock:
105
+ return self._do_get(key)
50
106
 
51
107
  def set(self, key: str, value: Any, ttl: int) -> None:
52
108
  """Set value with TTL in seconds.
53
109
  """
54
- now = time.time()
55
- pickled_value = pickle.dumps(value)
56
- with self._lock:
57
- self._cache[key] = (pickled_value, now, now + ttl)
110
+ with self._sync_lock:
111
+ self._do_set(key, value, ttl)
58
112
 
59
113
  def delete(self, key: str) -> None:
60
114
  """Delete value by key.
61
115
  """
62
- with self._lock:
63
- self._cache.pop(key, None)
116
+ with self._sync_lock:
117
+ self._do_delete(key)
64
118
 
65
119
  def clear(self, pattern: str | None = None) -> int:
66
120
  """Clear entries matching pattern. Returns count of cleared entries.
67
121
  """
68
- with self._lock:
69
- if pattern is None:
70
- count = len(self._cache)
71
- self._cache.clear()
72
- return count
73
-
74
- keys_to_delete = [k for k in self._cache if fnmatch.fnmatch(k, pattern)]
75
- for key in keys_to_delete:
76
- del self._cache[key]
77
- return len(keys_to_delete)
122
+ with self._sync_lock:
123
+ return self._do_clear(pattern)
78
124
 
79
125
  def keys(self, pattern: str | None = None) -> Iterator[str]:
80
126
  """Iterate over keys matching pattern.
81
127
  """
82
- now = time.time()
83
- with self._lock:
84
- all_keys = list(self._cache.keys())
128
+ with self._sync_lock:
129
+ all_keys = self._do_keys(pattern)
130
+ yield from all_keys
131
+
132
+ def count(self, pattern: str | None = None) -> int:
133
+ """Count keys matching pattern.
134
+ """
135
+ with self._sync_lock:
136
+ return len(self._do_keys(pattern))
137
+
138
+ def get_mutex(self, key: str) -> CacheMutex:
139
+ """Get a mutex for dogpile prevention on the given key.
140
+ """
141
+ return ThreadingMutex(f'memory:{key}')
142
+
143
+ # ===== Async interface =====
144
+
145
+ async def aget(self, key: str) -> Any:
146
+ """Async get value by key. Returns NO_VALUE if not found or expired.
147
+ """
148
+ async with self._get_async_lock():
149
+ value, _ = self._do_get(key)
150
+ return value
151
+
152
+ async def aget_with_metadata(self, key: str) -> tuple[Any, float | None]:
153
+ """Async get value and creation timestamp. Returns (NO_VALUE, None) if not found.
154
+ """
155
+ async with self._get_async_lock():
156
+ return self._do_get(key)
157
+
158
+ async def aset(self, key: str, value: Any, ttl: int) -> None:
159
+ """Async set value with TTL in seconds.
160
+ """
161
+ async with self._get_async_lock():
162
+ self._do_set(key, value, ttl)
163
+
164
+ async def adelete(self, key: str) -> None:
165
+ """Async delete value by key.
166
+ """
167
+ async with self._get_async_lock():
168
+ self._do_delete(key)
169
+
170
+ async def aclear(self, pattern: str | None = None) -> int:
171
+ """Async clear entries matching pattern. Returns count of cleared entries.
172
+ """
173
+ async with self._get_async_lock():
174
+ return self._do_clear(pattern)
175
+
176
+ async def akeys(self, pattern: str | None = None) -> AsyncIterator[str]:
177
+ """Async iterate over keys matching pattern.
178
+ """
179
+ async with self._get_async_lock():
180
+ all_keys = self._do_keys(pattern)
85
181
 
86
182
  for key in all_keys:
87
- with self._lock:
88
- entry = self._cache.get(key)
89
- if entry is None:
90
- continue
91
- _, _, expires_at = entry
92
- if now > expires_at:
93
- del self._cache[key]
94
- continue
183
+ yield key
95
184
 
96
- if pattern is None or fnmatch.fnmatch(key, pattern):
97
- yield key
185
+ async def acount(self, pattern: str | None = None) -> int:
186
+ """Async count keys matching pattern.
187
+ """
188
+ async with self._get_async_lock():
189
+ return len(self._do_keys(pattern))
98
190
 
99
- def count(self, pattern: str | None = None) -> int:
100
- """Count keys matching pattern.
191
+ def get_async_mutex(self, key: str) -> AsyncioMutex:
192
+ """Get an async mutex for dogpile prevention on the given key.
193
+ """
194
+ return AsyncioMutex(f'memory:{key}')
195
+
196
+ # ===== Lifecycle =====
197
+
198
+ def close(self) -> None:
199
+ """Close the backend (no-op for memory backend).
200
+ """
201
+
202
+ async def aclose(self) -> None:
203
+ """Async close the backend (no-op for memory backend).
101
204
  """
102
- return sum(1 for _ in self.keys(pattern))
cachu/backends/redis.py CHANGED
@@ -3,13 +3,15 @@
3
3
  import pickle
4
4
  import struct
5
5
  import time
6
- from collections.abc import Iterator
6
+ from collections.abc import AsyncIterator, Iterator
7
7
  from typing import TYPE_CHECKING, Any
8
8
 
9
+ from ..mutex import AsyncCacheMutex, AsyncRedisMutex, CacheMutex, RedisMutex
9
10
  from . import NO_VALUE, Backend
10
11
 
11
12
  if TYPE_CHECKING:
12
13
  import redis
14
+ import redis.asyncio as aioredis
13
15
 
14
16
 
15
17
  _METADATA_FORMAT = 'd'
@@ -24,7 +26,20 @@ def _get_redis_module() -> Any:
24
26
  return redis
25
27
  except ImportError as e:
26
28
  raise RuntimeError(
27
- "Redis support requires the 'redis' package. Install with: pip install cache[redis]"
29
+ "Redis support requires the 'redis' package. Install with: pip install cachu[redis]"
30
+ ) from e
31
+
32
+
33
+ def _get_async_redis_module() -> Any:
34
+ """Import redis.asyncio module, raising helpful error if not installed.
35
+ """
36
+ try:
37
+ import redis.asyncio as aioredis
38
+ return aioredis
39
+ except ImportError as e:
40
+ raise RuntimeError(
41
+ "Async Redis support requires the 'redis' package (>=4.2.0). "
42
+ "Install with: pip install cachu[redis]"
28
43
  ) from e
29
44
 
30
45
 
@@ -38,36 +53,49 @@ def get_redis_client(url: str) -> 'redis.Redis':
38
53
  return redis_module.from_url(url)
39
54
 
40
55
 
56
+ def _pack_value(value: Any, created_at: float) -> bytes:
57
+ """Pack value with creation timestamp.
58
+ """
59
+ metadata = struct.pack(_METADATA_FORMAT, created_at)
60
+ pickled = pickle.dumps(value)
61
+ return metadata + pickled
62
+
63
+
64
+ def _unpack_value(data: bytes) -> tuple[Any, float]:
65
+ """Unpack value and creation timestamp.
66
+ """
67
+ created_at = struct.unpack(_METADATA_FORMAT, data[:_METADATA_SIZE])[0]
68
+ value = pickle.loads(data[_METADATA_SIZE:])
69
+ return value, created_at
70
+
71
+
41
72
  class RedisBackend(Backend):
42
- """Redis cache backend.
73
+ """Unified Redis cache backend with both sync and async interfaces.
43
74
  """
44
75
 
45
- def __init__(self, url: str, distributed_lock: bool = False) -> None:
76
+ def __init__(self, url: str, lock_timeout: float = 10.0) -> None:
46
77
  self._url = url
47
- self._distributed_lock = distributed_lock
48
- self._client: redis.Redis | None = None
78
+ self._lock_timeout = lock_timeout
79
+ self._sync_client: redis.Redis | None = None
80
+ self._async_client: aioredis.Redis | None = None
49
81
 
50
82
  @property
51
83
  def client(self) -> 'redis.Redis':
52
- """Lazy-load Redis client.
84
+ """Lazy-load sync Redis client.
53
85
  """
54
- if self._client is None:
55
- self._client = get_redis_client(self._url)
56
- return self._client
86
+ if self._sync_client is None:
87
+ self._sync_client = get_redis_client(self._url)
88
+ return self._sync_client
57
89
 
58
- def _pack_value(self, value: Any, created_at: float) -> bytes:
59
- """Pack value with creation timestamp.
90
+ def _get_async_client(self) -> 'aioredis.Redis':
91
+ """Lazy-load async Redis client (from_url is NOT async).
60
92
  """
61
- metadata = struct.pack(_METADATA_FORMAT, created_at)
62
- pickled = pickle.dumps(value)
63
- return metadata + pickled
93
+ if self._async_client is None:
94
+ aioredis = _get_async_redis_module()
95
+ self._async_client = aioredis.from_url(self._url)
96
+ return self._async_client
64
97
 
65
- def _unpack_value(self, data: bytes) -> tuple[Any, float]:
66
- """Unpack value and creation timestamp.
67
- """
68
- created_at = struct.unpack(_METADATA_FORMAT, data[:_METADATA_SIZE])[0]
69
- value = pickle.loads(data[_METADATA_SIZE:])
70
- return value, created_at
98
+ # ===== Sync interface =====
71
99
 
72
100
  def get(self, key: str) -> Any:
73
101
  """Get value by key. Returns NO_VALUE if not found.
@@ -75,7 +103,7 @@ class RedisBackend(Backend):
75
103
  data = self.client.get(key)
76
104
  if data is None:
77
105
  return NO_VALUE
78
- value, _ = self._unpack_value(data)
106
+ value, _ = _unpack_value(data)
79
107
  return value
80
108
 
81
109
  def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
@@ -84,14 +112,14 @@ class RedisBackend(Backend):
84
112
  data = self.client.get(key)
85
113
  if data is None:
86
114
  return NO_VALUE, None
87
- value, created_at = self._unpack_value(data)
115
+ value, created_at = _unpack_value(data)
88
116
  return value, created_at
89
117
 
90
118
  def set(self, key: str, value: Any, ttl: int) -> None:
91
119
  """Set value with TTL in seconds.
92
120
  """
93
121
  now = time.time()
94
- packed = self._pack_value(value, now)
122
+ packed = _pack_value(value, now)
95
123
  self.client.setex(key, ttl, packed)
96
124
 
97
125
  def delete(self, key: str) -> None:
@@ -123,9 +151,113 @@ class RedisBackend(Backend):
123
151
  """
124
152
  return sum(1 for _ in self.keys(pattern))
125
153
 
154
+ def get_mutex(self, key: str) -> CacheMutex:
155
+ """Get a mutex for dogpile prevention on the given key.
156
+ """
157
+ return RedisMutex(self.client, f'lock:{key}', self._lock_timeout)
158
+
159
+ # ===== Async interface =====
160
+
161
+ async def aget(self, key: str) -> Any:
162
+ """Async get value by key. Returns NO_VALUE if not found.
163
+ """
164
+ client = self._get_async_client()
165
+ data = await client.get(key)
166
+ if data is None:
167
+ return NO_VALUE
168
+ value, _ = _unpack_value(data)
169
+ return value
170
+
171
+ async def aget_with_metadata(self, key: str) -> tuple[Any, float | None]:
172
+ """Async get value and creation timestamp. Returns (NO_VALUE, None) if not found.
173
+ """
174
+ client = self._get_async_client()
175
+ data = await client.get(key)
176
+ if data is None:
177
+ return NO_VALUE, None
178
+ value, created_at = _unpack_value(data)
179
+ return value, created_at
180
+
181
+ async def aset(self, key: str, value: Any, ttl: int) -> None:
182
+ """Async set value with TTL in seconds.
183
+ """
184
+ client = self._get_async_client()
185
+ now = time.time()
186
+ packed = _pack_value(value, now)
187
+ await client.setex(key, ttl, packed)
188
+
189
+ async def adelete(self, key: str) -> None:
190
+ """Async delete value by key.
191
+ """
192
+ client = self._get_async_client()
193
+ await client.delete(key)
194
+
195
+ async def aclear(self, pattern: str | None = None) -> int:
196
+ """Async clear entries matching pattern. Returns count of cleared entries.
197
+ """
198
+ client = self._get_async_client()
199
+ if pattern is None:
200
+ pattern = '*'
201
+
202
+ count = 0
203
+ async for key in client.scan_iter(match=pattern):
204
+ await client.delete(key)
205
+ count += 1
206
+ return count
207
+
208
+ async def akeys(self, pattern: str | None = None) -> AsyncIterator[str]:
209
+ """Async iterate over keys matching pattern.
210
+ """
211
+ client = self._get_async_client()
212
+ redis_pattern = pattern or '*'
213
+ async for key in client.scan_iter(match=redis_pattern):
214
+ yield key.decode() if isinstance(key, bytes) else key
215
+
216
+ async def acount(self, pattern: str | None = None) -> int:
217
+ """Async count keys matching pattern.
218
+ """
219
+ count = 0
220
+ async for _ in self.akeys(pattern):
221
+ count += 1
222
+ return count
223
+
224
+ def get_async_mutex(self, key: str) -> AsyncCacheMutex:
225
+ """Get an async mutex for dogpile prevention on the given key.
226
+ """
227
+ return AsyncRedisMutex(self._get_async_client(), f'lock:{key}', self._lock_timeout)
228
+
229
+ # ===== Lifecycle =====
230
+
231
+ def _close_sync_client(self) -> None:
232
+ """Close sync client if open.
233
+ """
234
+ if self._sync_client is not None:
235
+ client = self._sync_client
236
+ self._sync_client = None
237
+ client.close()
238
+
239
+ def _close_async_client_sync(self) -> None:
240
+ """Forcefully close async client from sync context.
241
+ """
242
+ if self._async_client is not None:
243
+ client = self._async_client
244
+ self._async_client = None
245
+ try:
246
+ client.close()
247
+ except Exception:
248
+ pass
249
+
126
250
  def close(self) -> None:
127
- """Close the Redis connection.
251
+ """Close all backend resources from sync context.
252
+ """
253
+ self._close_sync_client()
254
+ self._close_async_client_sync()
255
+
256
+ async def aclose(self) -> None:
257
+ """Close all backend resources from async context.
128
258
  """
129
- if self._client is not None:
130
- self._client.close()
131
- self._client = None
259
+ if self._async_client is not None:
260
+ client = self._async_client
261
+ self._async_client = None
262
+ await client.aclose()
263
+ self._close_sync_client()