cachu 0.2.3__py3-none-any.whl → 0.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cachu/__init__.py CHANGED
@@ -1,19 +1,16 @@
1
1
  """Flexible caching library with support for memory, file, and Redis backends.
2
2
  """
3
- __version__ = '0.2.3'
3
+ __version__ = '0.2.4'
4
4
 
5
- from .async_decorator import async_cache, clear_async_backends
6
- from .async_decorator import get_async_backend, get_async_cache_info
7
- from .async_operations import async_cache_clear, async_cache_delete
8
- from .async_operations import async_cache_get, async_cache_info
9
- from .async_operations import async_cache_set
10
5
  from .backends import AsyncBackend, Backend
11
6
  from .backends.redis import get_redis_client
12
7
  from .config import configure, disable, enable, get_all_configs, get_config
13
8
  from .config import is_disabled
14
- from .decorator import cache, get_backend
15
- from .operations import cache_clear, cache_delete, cache_get, cache_info
16
- from .operations import cache_set
9
+ from .decorator import async_cache, cache, clear_async_backends
10
+ from .decorator import get_async_backend, get_async_cache_info, get_backend
11
+ from .operations import async_cache_clear, async_cache_delete, async_cache_get
12
+ from .operations import async_cache_info, async_cache_set, cache_clear
13
+ from .operations import cache_delete, cache_get, cache_info, cache_set
17
14
 
18
15
  __all__ = [
19
16
  'configure',
@@ -1,7 +1,7 @@
1
1
  """Cache backend implementations.
2
2
  """
3
3
  from abc import ABC, abstractmethod
4
- from collections.abc import Iterator
4
+ from collections.abc import AsyncIterator, Iterator
5
5
  from typing import Any
6
6
 
7
7
  NO_VALUE = object()
@@ -47,6 +47,49 @@ class Backend(ABC):
47
47
  """
48
48
 
49
49
 
50
- from .async_base import AsyncBackend
50
+ class AsyncBackend(ABC):
51
+ """Abstract base class for async cache backends.
52
+ """
53
+
54
+ @abstractmethod
55
+ async def get(self, key: str) -> Any:
56
+ """Get value by key. Returns NO_VALUE if not found.
57
+ """
58
+
59
+ @abstractmethod
60
+ async def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
61
+ """Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
62
+ """
63
+
64
+ @abstractmethod
65
+ async def set(self, key: str, value: Any, ttl: int) -> None:
66
+ """Set value with TTL in seconds.
67
+ """
68
+
69
+ @abstractmethod
70
+ async def delete(self, key: str) -> None:
71
+ """Delete value by key.
72
+ """
73
+
74
+ @abstractmethod
75
+ async def clear(self, pattern: str | None = None) -> int:
76
+ """Clear entries matching pattern. Returns count of cleared entries.
77
+ """
78
+
79
+ @abstractmethod
80
+ async def keys(self, pattern: str | None = None) -> AsyncIterator[str]:
81
+ """Iterate over keys matching pattern.
82
+ """
83
+
84
+ @abstractmethod
85
+ async def count(self, pattern: str | None = None) -> int:
86
+ """Count keys matching pattern.
87
+ """
88
+
89
+ @abstractmethod
90
+ async def close(self) -> None:
91
+ """Close the backend and release resources.
92
+ """
93
+
51
94
 
52
95
  __all__ = ['Backend', 'AsyncBackend', 'NO_VALUE']
cachu/backends/memory.py CHANGED
@@ -1,13 +1,14 @@
1
1
  """Memory cache backend implementation.
2
2
  """
3
+ import asyncio
3
4
  import fnmatch
4
5
  import pickle
5
6
  import threading
6
7
  import time
7
- from collections.abc import Iterator
8
+ from collections.abc import AsyncIterator, Iterator
8
9
  from typing import Any
9
10
 
10
- from . import NO_VALUE, Backend
11
+ from . import NO_VALUE, AsyncBackend, Backend
11
12
 
12
13
 
13
14
  class MemoryBackend(Backend):
@@ -100,3 +101,102 @@ class MemoryBackend(Backend):
100
101
  """Count keys matching pattern.
101
102
  """
102
103
  return sum(1 for _ in self.keys(pattern))
104
+
105
+
106
+ class AsyncMemoryBackend(AsyncBackend):
107
+ """Async in-memory cache backend using asyncio.Lock.
108
+ """
109
+
110
+ def __init__(self) -> None:
111
+ self._cache: dict[str, tuple[bytes, float, float]] = {}
112
+ self._lock = asyncio.Lock()
113
+
114
+ async def get(self, key: str) -> Any:
115
+ """Get value by key. Returns NO_VALUE if not found or expired.
116
+ """
117
+ async with self._lock:
118
+ entry = self._cache.get(key)
119
+ if entry is None:
120
+ return NO_VALUE
121
+
122
+ pickled_value, created_at, expires_at = entry
123
+ if time.time() > expires_at:
124
+ del self._cache[key]
125
+ return NO_VALUE
126
+
127
+ return pickle.loads(pickled_value)
128
+
129
+ async def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
130
+ """Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
131
+ """
132
+ async with self._lock:
133
+ entry = self._cache.get(key)
134
+ if entry is None:
135
+ return NO_VALUE, None
136
+
137
+ pickled_value, created_at, expires_at = entry
138
+ if time.time() > expires_at:
139
+ del self._cache[key]
140
+ return NO_VALUE, None
141
+
142
+ return pickle.loads(pickled_value), created_at
143
+
144
+ async def set(self, key: str, value: Any, ttl: int) -> None:
145
+ """Set value with TTL in seconds.
146
+ """
147
+ now = time.time()
148
+ pickled_value = pickle.dumps(value)
149
+ async with self._lock:
150
+ self._cache[key] = (pickled_value, now, now + ttl)
151
+
152
+ async def delete(self, key: str) -> None:
153
+ """Delete value by key.
154
+ """
155
+ async with self._lock:
156
+ self._cache.pop(key, None)
157
+
158
+ async def clear(self, pattern: str | None = None) -> int:
159
+ """Clear entries matching pattern. Returns count of cleared entries.
160
+ """
161
+ async with self._lock:
162
+ if pattern is None:
163
+ count = len(self._cache)
164
+ self._cache.clear()
165
+ return count
166
+
167
+ keys_to_delete = [k for k in self._cache if fnmatch.fnmatch(k, pattern)]
168
+ for key in keys_to_delete:
169
+ del self._cache[key]
170
+ return len(keys_to_delete)
171
+
172
+ async def keys(self, pattern: str | None = None) -> AsyncIterator[str]:
173
+ """Iterate over keys matching pattern.
174
+ """
175
+ now = time.time()
176
+ async with self._lock:
177
+ all_keys = list(self._cache.keys())
178
+
179
+ for key in all_keys:
180
+ async with self._lock:
181
+ entry = self._cache.get(key)
182
+ if entry is None:
183
+ continue
184
+ _, _, expires_at = entry
185
+ if now > expires_at:
186
+ del self._cache[key]
187
+ continue
188
+
189
+ if pattern is None or fnmatch.fnmatch(key, pattern):
190
+ yield key
191
+
192
+ async def count(self, pattern: str | None = None) -> int:
193
+ """Count keys matching pattern.
194
+ """
195
+ count = 0
196
+ async for _ in self.keys(pattern):
197
+ count += 1
198
+ return count
199
+
200
+ async def close(self) -> None:
201
+ """Close the backend (no-op for memory backend).
202
+ """
cachu/backends/redis.py CHANGED
@@ -3,13 +3,14 @@
3
3
  import pickle
4
4
  import struct
5
5
  import time
6
- from collections.abc import Iterator
6
+ from collections.abc import AsyncIterator, Iterator
7
7
  from typing import TYPE_CHECKING, Any
8
8
 
9
- from . import NO_VALUE, Backend
9
+ from . import NO_VALUE, AsyncBackend, Backend
10
10
 
11
11
  if TYPE_CHECKING:
12
12
  import redis
13
+ import redis.asyncio as aioredis
13
14
 
14
15
 
15
16
  _METADATA_FORMAT = 'd'
@@ -28,6 +29,29 @@ def _get_redis_module() -> Any:
28
29
  ) from e
29
30
 
30
31
 
32
+ def _get_async_redis_module() -> Any:
33
+ """Import redis.asyncio module, raising helpful error if not installed.
34
+ """
35
+ try:
36
+ import redis.asyncio as aioredis
37
+ return aioredis
38
+ except ImportError as e:
39
+ raise RuntimeError(
40
+ "Async Redis support requires the 'redis' package (>=4.2.0). "
41
+ "Install with: pip install cachu[redis]"
42
+ ) from e
43
+
44
+
45
+ async def get_async_redis_client(url: str) -> 'aioredis.Redis':
46
+ """Create an async Redis client from URL.
47
+
48
+ Args:
49
+ url: Redis URL (e.g., 'redis://localhost:6379/0')
50
+ """
51
+ aioredis = _get_async_redis_module()
52
+ return aioredis.from_url(url)
53
+
54
+
31
55
  def get_redis_client(url: str) -> 'redis.Redis':
32
56
  """Create a Redis client from URL.
33
57
 
@@ -38,13 +62,28 @@ def get_redis_client(url: str) -> 'redis.Redis':
38
62
  return redis_module.from_url(url)
39
63
 
40
64
 
65
+ def _pack_value(value: Any, created_at: float) -> bytes:
66
+ """Pack value with creation timestamp.
67
+ """
68
+ metadata = struct.pack(_METADATA_FORMAT, created_at)
69
+ pickled = pickle.dumps(value)
70
+ return metadata + pickled
71
+
72
+
73
+ def _unpack_value(data: bytes) -> tuple[Any, float]:
74
+ """Unpack value and creation timestamp.
75
+ """
76
+ created_at = struct.unpack(_METADATA_FORMAT, data[:_METADATA_SIZE])[0]
77
+ value = pickle.loads(data[_METADATA_SIZE:])
78
+ return value, created_at
79
+
80
+
41
81
  class RedisBackend(Backend):
42
82
  """Redis cache backend.
43
83
  """
44
84
 
45
- def __init__(self, url: str, distributed_lock: bool = False) -> None:
85
+ def __init__(self, url: str) -> None:
46
86
  self._url = url
47
- self._distributed_lock = distributed_lock
48
87
  self._client: redis.Redis | None = None
49
88
 
50
89
  @property
@@ -55,27 +94,13 @@ class RedisBackend(Backend):
55
94
  self._client = get_redis_client(self._url)
56
95
  return self._client
57
96
 
58
- def _pack_value(self, value: Any, created_at: float) -> bytes:
59
- """Pack value with creation timestamp.
60
- """
61
- metadata = struct.pack(_METADATA_FORMAT, created_at)
62
- pickled = pickle.dumps(value)
63
- return metadata + pickled
64
-
65
- def _unpack_value(self, data: bytes) -> tuple[Any, float]:
66
- """Unpack value and creation timestamp.
67
- """
68
- created_at = struct.unpack(_METADATA_FORMAT, data[:_METADATA_SIZE])[0]
69
- value = pickle.loads(data[_METADATA_SIZE:])
70
- return value, created_at
71
-
72
97
  def get(self, key: str) -> Any:
73
98
  """Get value by key. Returns NO_VALUE if not found.
74
99
  """
75
100
  data = self.client.get(key)
76
101
  if data is None:
77
102
  return NO_VALUE
78
- value, _ = self._unpack_value(data)
103
+ value, _ = _unpack_value(data)
79
104
  return value
80
105
 
81
106
  def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
@@ -84,14 +109,14 @@ class RedisBackend(Backend):
84
109
  data = self.client.get(key)
85
110
  if data is None:
86
111
  return NO_VALUE, None
87
- value, created_at = self._unpack_value(data)
112
+ value, created_at = _unpack_value(data)
88
113
  return value, created_at
89
114
 
90
115
  def set(self, key: str, value: Any, ttl: int) -> None:
91
116
  """Set value with TTL in seconds.
92
117
  """
93
118
  now = time.time()
94
- packed = self._pack_value(value, now)
119
+ packed = _pack_value(value, now)
95
120
  self.client.setex(key, ttl, packed)
96
121
 
97
122
  def delete(self, key: str) -> None:
@@ -129,3 +154,89 @@ class RedisBackend(Backend):
129
154
  if self._client is not None:
130
155
  self._client.close()
131
156
  self._client = None
157
+
158
+
159
+ class AsyncRedisBackend(AsyncBackend):
160
+ """Async Redis cache backend using redis.asyncio.
161
+ """
162
+
163
+ def __init__(self, url: str) -> None:
164
+ self._url = url
165
+ self._client: aioredis.Redis | None = None
166
+
167
+ async def _get_client(self) -> 'aioredis.Redis':
168
+ """Lazy-load async Redis client.
169
+ """
170
+ if self._client is None:
171
+ self._client = await get_async_redis_client(self._url)
172
+ return self._client
173
+
174
+ async def get(self, key: str) -> Any:
175
+ """Get value by key. Returns NO_VALUE if not found.
176
+ """
177
+ client = await self._get_client()
178
+ data = await client.get(key)
179
+ if data is None:
180
+ return NO_VALUE
181
+ value, _ = _unpack_value(data)
182
+ return value
183
+
184
+ async def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
185
+ """Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
186
+ """
187
+ client = await self._get_client()
188
+ data = await client.get(key)
189
+ if data is None:
190
+ return NO_VALUE, None
191
+ value, created_at = _unpack_value(data)
192
+ return value, created_at
193
+
194
+ async def set(self, key: str, value: Any, ttl: int) -> None:
195
+ """Set value with TTL in seconds.
196
+ """
197
+ client = await self._get_client()
198
+ now = time.time()
199
+ packed = _pack_value(value, now)
200
+ await client.setex(key, ttl, packed)
201
+
202
+ async def delete(self, key: str) -> None:
203
+ """Delete value by key.
204
+ """
205
+ client = await self._get_client()
206
+ await client.delete(key)
207
+
208
+ async def clear(self, pattern: str | None = None) -> int:
209
+ """Clear entries matching pattern. Returns count of cleared entries.
210
+ """
211
+ client = await self._get_client()
212
+ if pattern is None:
213
+ pattern = '*'
214
+
215
+ count = 0
216
+ async for key in client.scan_iter(match=pattern):
217
+ await client.delete(key)
218
+ count += 1
219
+ return count
220
+
221
+ async def keys(self, pattern: str | None = None) -> AsyncIterator[str]:
222
+ """Iterate over keys matching pattern.
223
+ """
224
+ client = await self._get_client()
225
+ redis_pattern = pattern or '*'
226
+ async for key in client.scan_iter(match=redis_pattern):
227
+ yield key.decode() if isinstance(key, bytes) else key
228
+
229
+ async def count(self, pattern: str | None = None) -> int:
230
+ """Count keys matching pattern.
231
+ """
232
+ count = 0
233
+ async for _ in self.keys(pattern):
234
+ count += 1
235
+ return count
236
+
237
+ async def close(self) -> None:
238
+ """Close the Redis connection.
239
+ """
240
+ if self._client is not None:
241
+ await self._client.close()
242
+ self._client = None