cachu 0.2.4__py3-none-any.whl → 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cachu/__init__.py +4 -5
- cachu/backends/__init__.py +38 -21
- cachu/backends/memory.py +137 -135
- cachu/backends/redis.py +86 -65
- cachu/backends/sqlite.py +163 -123
- cachu/config.py +6 -0
- cachu/decorator.py +257 -275
- cachu/mutex.py +247 -0
- cachu/operations.py +27 -28
- {cachu-0.2.4.dist-info → cachu-0.2.5.dist-info}/METADATA +1 -2
- cachu-0.2.5.dist-info/RECORD +15 -0
- cachu/async_decorator.py +0 -262
- cachu/async_operations.py +0 -178
- cachu/backends/async_base.py +0 -50
- cachu/backends/async_memory.py +0 -111
- cachu/backends/async_redis.py +0 -141
- cachu/backends/async_sqlite.py +0 -256
- cachu/backends/file.py +0 -10
- cachu-0.2.4.dist-info/RECORD +0 -21
- {cachu-0.2.4.dist-info → cachu-0.2.5.dist-info}/WHEEL +0 -0
- {cachu-0.2.4.dist-info → cachu-0.2.5.dist-info}/top_level.txt +0 -0
cachu/__init__.py
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
"""Flexible caching library with support for memory, file, and Redis backends.
|
|
2
2
|
"""
|
|
3
|
-
__version__ = '0.2.
|
|
3
|
+
__version__ = '0.2.5'
|
|
4
4
|
|
|
5
|
-
from .backends import
|
|
5
|
+
from .backends import Backend
|
|
6
6
|
from .backends.redis import get_redis_client
|
|
7
7
|
from .config import configure, disable, enable, get_all_configs, get_config
|
|
8
8
|
from .config import is_disabled
|
|
9
|
-
from .decorator import
|
|
9
|
+
from .decorator import aget_backend, cache, clear_async_backends
|
|
10
10
|
from .decorator import get_async_backend, get_async_cache_info, get_backend
|
|
11
11
|
from .operations import async_cache_clear, async_cache_delete, async_cache_get
|
|
12
12
|
from .operations import async_cache_info, async_cache_set, cache_clear
|
|
@@ -26,10 +26,9 @@ __all__ = [
|
|
|
26
26
|
'cache_clear',
|
|
27
27
|
'cache_info',
|
|
28
28
|
'get_backend',
|
|
29
|
+
'aget_backend',
|
|
29
30
|
'get_redis_client',
|
|
30
31
|
'Backend',
|
|
31
|
-
'AsyncBackend',
|
|
32
|
-
'async_cache',
|
|
33
32
|
'async_cache_get',
|
|
34
33
|
'async_cache_set',
|
|
35
34
|
'async_cache_delete',
|
cachu/backends/__init__.py
CHANGED
|
@@ -2,15 +2,20 @@
|
|
|
2
2
|
"""
|
|
3
3
|
from abc import ABC, abstractmethod
|
|
4
4
|
from collections.abc import AsyncIterator, Iterator
|
|
5
|
-
from typing import Any
|
|
5
|
+
from typing import TYPE_CHECKING, Any
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from ..mutex import AsyncCacheMutex, CacheMutex
|
|
6
9
|
|
|
7
10
|
NO_VALUE = object()
|
|
8
11
|
|
|
9
12
|
|
|
10
13
|
class Backend(ABC):
|
|
11
|
-
"""Abstract base class for cache backends.
|
|
14
|
+
"""Abstract base class for cache backends with both sync and async interfaces.
|
|
12
15
|
"""
|
|
13
16
|
|
|
17
|
+
# ===== Sync interface =====
|
|
18
|
+
|
|
14
19
|
@abstractmethod
|
|
15
20
|
def get(self, key: str) -> Any:
|
|
16
21
|
"""Get value by key. Returns NO_VALUE if not found.
|
|
@@ -46,50 +51,62 @@ class Backend(ABC):
|
|
|
46
51
|
"""Count keys matching pattern.
|
|
47
52
|
"""
|
|
48
53
|
|
|
54
|
+
@abstractmethod
|
|
55
|
+
def get_mutex(self, key: str) -> 'CacheMutex':
|
|
56
|
+
"""Get a mutex for dogpile prevention on the given key.
|
|
57
|
+
"""
|
|
49
58
|
|
|
50
|
-
|
|
51
|
-
"""Abstract base class for async cache backends.
|
|
52
|
-
"""
|
|
59
|
+
# ===== Async interface =====
|
|
53
60
|
|
|
54
61
|
@abstractmethod
|
|
55
|
-
async def
|
|
56
|
-
"""
|
|
62
|
+
async def aget(self, key: str) -> Any:
|
|
63
|
+
"""Async get value by key. Returns NO_VALUE if not found.
|
|
57
64
|
"""
|
|
58
65
|
|
|
59
66
|
@abstractmethod
|
|
60
|
-
async def
|
|
61
|
-
"""
|
|
67
|
+
async def aget_with_metadata(self, key: str) -> tuple[Any, float | None]:
|
|
68
|
+
"""Async get value and creation timestamp. Returns (NO_VALUE, None) if not found.
|
|
62
69
|
"""
|
|
63
70
|
|
|
64
71
|
@abstractmethod
|
|
65
|
-
async def
|
|
66
|
-
"""
|
|
72
|
+
async def aset(self, key: str, value: Any, ttl: int) -> None:
|
|
73
|
+
"""Async set value with TTL in seconds.
|
|
67
74
|
"""
|
|
68
75
|
|
|
69
76
|
@abstractmethod
|
|
70
|
-
async def
|
|
71
|
-
"""
|
|
77
|
+
async def adelete(self, key: str) -> None:
|
|
78
|
+
"""Async delete value by key.
|
|
72
79
|
"""
|
|
73
80
|
|
|
74
81
|
@abstractmethod
|
|
75
|
-
async def
|
|
76
|
-
"""
|
|
82
|
+
async def aclear(self, pattern: str | None = None) -> int:
|
|
83
|
+
"""Async clear entries matching pattern. Returns count of cleared entries.
|
|
77
84
|
"""
|
|
78
85
|
|
|
79
86
|
@abstractmethod
|
|
80
|
-
|
|
81
|
-
"""
|
|
87
|
+
def akeys(self, pattern: str | None = None) -> AsyncIterator[str]:
|
|
88
|
+
"""Async iterate over keys matching pattern.
|
|
82
89
|
"""
|
|
83
90
|
|
|
84
91
|
@abstractmethod
|
|
85
|
-
async def
|
|
86
|
-
"""
|
|
92
|
+
async def acount(self, pattern: str | None = None) -> int:
|
|
93
|
+
"""Async count keys matching pattern.
|
|
87
94
|
"""
|
|
88
95
|
|
|
89
96
|
@abstractmethod
|
|
90
|
-
|
|
97
|
+
def get_async_mutex(self, key: str) -> 'AsyncCacheMutex':
|
|
98
|
+
"""Get an async mutex for dogpile prevention on the given key.
|
|
99
|
+
"""
|
|
100
|
+
|
|
101
|
+
# ===== Lifecycle =====
|
|
102
|
+
|
|
103
|
+
def close(self) -> None:
|
|
91
104
|
"""Close the backend and release resources.
|
|
92
105
|
"""
|
|
93
106
|
|
|
107
|
+
async def aclose(self) -> None:
|
|
108
|
+
"""Async close the backend and release resources.
|
|
109
|
+
"""
|
|
110
|
+
|
|
94
111
|
|
|
95
|
-
__all__ = ['Backend', '
|
|
112
|
+
__all__ = ['Backend', 'NO_VALUE']
|
cachu/backends/memory.py
CHANGED
|
@@ -8,195 +8,197 @@ import time
|
|
|
8
8
|
from collections.abc import AsyncIterator, Iterator
|
|
9
9
|
from typing import Any
|
|
10
10
|
|
|
11
|
-
from
|
|
11
|
+
from ..mutex import AsyncioMutex, CacheMutex, ThreadingMutex
|
|
12
|
+
from . import NO_VALUE, Backend
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
class MemoryBackend(Backend):
|
|
15
|
-
"""Thread-safe in-memory cache backend.
|
|
16
|
+
"""Thread-safe in-memory cache backend with both sync and async interfaces.
|
|
16
17
|
"""
|
|
17
18
|
|
|
18
19
|
def __init__(self) -> None:
|
|
19
20
|
self._cache: dict[str, tuple[bytes, float, float]] = {}
|
|
20
|
-
self.
|
|
21
|
+
self._sync_lock = threading.RLock()
|
|
22
|
+
self._async_lock: asyncio.Lock | None = None
|
|
21
23
|
|
|
22
|
-
def
|
|
23
|
-
"""
|
|
24
|
+
def _get_async_lock(self) -> asyncio.Lock:
|
|
25
|
+
"""Lazy-create async lock (must be called from async context).
|
|
24
26
|
"""
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
return NO_VALUE
|
|
27
|
+
if self._async_lock is None:
|
|
28
|
+
self._async_lock = asyncio.Lock()
|
|
29
|
+
return self._async_lock
|
|
29
30
|
|
|
30
|
-
|
|
31
|
-
if time.time() > expires_at:
|
|
32
|
-
del self._cache[key]
|
|
33
|
-
return NO_VALUE
|
|
31
|
+
# ===== Core logic (no locking) =====
|
|
34
32
|
|
|
35
|
-
|
|
33
|
+
def _do_get(self, key: str) -> tuple[Any, float | None]:
|
|
34
|
+
"""Get value and metadata without locking.
|
|
35
|
+
"""
|
|
36
|
+
entry = self._cache.get(key)
|
|
37
|
+
if entry is None:
|
|
38
|
+
return NO_VALUE, None
|
|
36
39
|
|
|
37
|
-
|
|
38
|
-
|
|
40
|
+
pickled_value, created_at, expires_at = entry
|
|
41
|
+
if time.time() > expires_at:
|
|
42
|
+
del self._cache[key]
|
|
43
|
+
return NO_VALUE, None
|
|
44
|
+
|
|
45
|
+
return pickle.loads(pickled_value), created_at
|
|
46
|
+
|
|
47
|
+
def _do_set(self, key: str, value: Any, ttl: int) -> None:
|
|
48
|
+
"""Set value without locking.
|
|
49
|
+
"""
|
|
50
|
+
now = time.time()
|
|
51
|
+
pickled_value = pickle.dumps(value)
|
|
52
|
+
self._cache[key] = (pickled_value, now, now + ttl)
|
|
53
|
+
|
|
54
|
+
def _do_delete(self, key: str) -> None:
|
|
55
|
+
"""Delete value without locking.
|
|
39
56
|
"""
|
|
40
|
-
|
|
41
|
-
entry = self._cache.get(key)
|
|
42
|
-
if entry is None:
|
|
43
|
-
return NO_VALUE, None
|
|
57
|
+
self._cache.pop(key, None)
|
|
44
58
|
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
59
|
+
def _do_clear(self, pattern: str | None = None) -> int:
|
|
60
|
+
"""Clear entries matching pattern without locking.
|
|
61
|
+
"""
|
|
62
|
+
if pattern is None:
|
|
63
|
+
count = len(self._cache)
|
|
64
|
+
self._cache.clear()
|
|
65
|
+
return count
|
|
66
|
+
|
|
67
|
+
keys_to_delete = [k for k in self._cache if fnmatch.fnmatch(k, pattern)]
|
|
68
|
+
for key in keys_to_delete:
|
|
69
|
+
del self._cache[key]
|
|
70
|
+
return len(keys_to_delete)
|
|
71
|
+
|
|
72
|
+
def _do_keys(self, pattern: str | None = None) -> list[str]:
|
|
73
|
+
"""Get keys matching pattern without locking (returns snapshot).
|
|
74
|
+
"""
|
|
75
|
+
now = time.time()
|
|
76
|
+
result = []
|
|
77
|
+
keys_to_delete = []
|
|
78
|
+
|
|
79
|
+
for key, entry in list(self._cache.items()):
|
|
80
|
+
_, _, expires_at = entry
|
|
81
|
+
if now > expires_at:
|
|
82
|
+
keys_to_delete.append(key)
|
|
83
|
+
continue
|
|
84
|
+
if pattern is None or fnmatch.fnmatch(key, pattern):
|
|
85
|
+
result.append(key)
|
|
49
86
|
|
|
50
|
-
|
|
87
|
+
for key in keys_to_delete:
|
|
88
|
+
self._cache.pop(key, None)
|
|
89
|
+
|
|
90
|
+
return result
|
|
91
|
+
|
|
92
|
+
# ===== Sync interface =====
|
|
93
|
+
|
|
94
|
+
def get(self, key: str) -> Any:
|
|
95
|
+
"""Get value by key. Returns NO_VALUE if not found or expired.
|
|
96
|
+
"""
|
|
97
|
+
with self._sync_lock:
|
|
98
|
+
value, _ = self._do_get(key)
|
|
99
|
+
return value
|
|
100
|
+
|
|
101
|
+
def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
|
|
102
|
+
"""Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
|
|
103
|
+
"""
|
|
104
|
+
with self._sync_lock:
|
|
105
|
+
return self._do_get(key)
|
|
51
106
|
|
|
52
107
|
def set(self, key: str, value: Any, ttl: int) -> None:
|
|
53
108
|
"""Set value with TTL in seconds.
|
|
54
109
|
"""
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
with self._lock:
|
|
58
|
-
self._cache[key] = (pickled_value, now, now + ttl)
|
|
110
|
+
with self._sync_lock:
|
|
111
|
+
self._do_set(key, value, ttl)
|
|
59
112
|
|
|
60
113
|
def delete(self, key: str) -> None:
|
|
61
114
|
"""Delete value by key.
|
|
62
115
|
"""
|
|
63
|
-
with self.
|
|
64
|
-
self.
|
|
116
|
+
with self._sync_lock:
|
|
117
|
+
self._do_delete(key)
|
|
65
118
|
|
|
66
119
|
def clear(self, pattern: str | None = None) -> int:
|
|
67
120
|
"""Clear entries matching pattern. Returns count of cleared entries.
|
|
68
121
|
"""
|
|
69
|
-
with self.
|
|
70
|
-
|
|
71
|
-
count = len(self._cache)
|
|
72
|
-
self._cache.clear()
|
|
73
|
-
return count
|
|
74
|
-
|
|
75
|
-
keys_to_delete = [k for k in self._cache if fnmatch.fnmatch(k, pattern)]
|
|
76
|
-
for key in keys_to_delete:
|
|
77
|
-
del self._cache[key]
|
|
78
|
-
return len(keys_to_delete)
|
|
122
|
+
with self._sync_lock:
|
|
123
|
+
return self._do_clear(pattern)
|
|
79
124
|
|
|
80
125
|
def keys(self, pattern: str | None = None) -> Iterator[str]:
|
|
81
126
|
"""Iterate over keys matching pattern.
|
|
82
127
|
"""
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
for key in all_keys:
|
|
88
|
-
with self._lock:
|
|
89
|
-
entry = self._cache.get(key)
|
|
90
|
-
if entry is None:
|
|
91
|
-
continue
|
|
92
|
-
_, _, expires_at = entry
|
|
93
|
-
if now > expires_at:
|
|
94
|
-
del self._cache[key]
|
|
95
|
-
continue
|
|
96
|
-
|
|
97
|
-
if pattern is None or fnmatch.fnmatch(key, pattern):
|
|
98
|
-
yield key
|
|
128
|
+
with self._sync_lock:
|
|
129
|
+
all_keys = self._do_keys(pattern)
|
|
130
|
+
yield from all_keys
|
|
99
131
|
|
|
100
132
|
def count(self, pattern: str | None = None) -> int:
|
|
101
133
|
"""Count keys matching pattern.
|
|
102
134
|
"""
|
|
103
|
-
|
|
135
|
+
with self._sync_lock:
|
|
136
|
+
return len(self._do_keys(pattern))
|
|
104
137
|
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
"""Async in-memory cache backend using asyncio.Lock.
|
|
108
|
-
"""
|
|
109
|
-
|
|
110
|
-
def __init__(self) -> None:
|
|
111
|
-
self._cache: dict[str, tuple[bytes, float, float]] = {}
|
|
112
|
-
self._lock = asyncio.Lock()
|
|
113
|
-
|
|
114
|
-
async def get(self, key: str) -> Any:
|
|
115
|
-
"""Get value by key. Returns NO_VALUE if not found or expired.
|
|
138
|
+
def get_mutex(self, key: str) -> CacheMutex:
|
|
139
|
+
"""Get a mutex for dogpile prevention on the given key.
|
|
116
140
|
"""
|
|
117
|
-
|
|
118
|
-
entry = self._cache.get(key)
|
|
119
|
-
if entry is None:
|
|
120
|
-
return NO_VALUE
|
|
121
|
-
|
|
122
|
-
pickled_value, created_at, expires_at = entry
|
|
123
|
-
if time.time() > expires_at:
|
|
124
|
-
del self._cache[key]
|
|
125
|
-
return NO_VALUE
|
|
141
|
+
return ThreadingMutex(f'memory:{key}')
|
|
126
142
|
|
|
127
|
-
|
|
143
|
+
# ===== Async interface =====
|
|
128
144
|
|
|
129
|
-
async def
|
|
130
|
-
"""
|
|
145
|
+
async def aget(self, key: str) -> Any:
|
|
146
|
+
"""Async get value by key. Returns NO_VALUE if not found or expired.
|
|
131
147
|
"""
|
|
132
|
-
async with self.
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
return NO_VALUE, None
|
|
136
|
-
|
|
137
|
-
pickled_value, created_at, expires_at = entry
|
|
138
|
-
if time.time() > expires_at:
|
|
139
|
-
del self._cache[key]
|
|
140
|
-
return NO_VALUE, None
|
|
141
|
-
|
|
142
|
-
return pickle.loads(pickled_value), created_at
|
|
148
|
+
async with self._get_async_lock():
|
|
149
|
+
value, _ = self._do_get(key)
|
|
150
|
+
return value
|
|
143
151
|
|
|
144
|
-
async def
|
|
145
|
-
"""
|
|
152
|
+
async def aget_with_metadata(self, key: str) -> tuple[Any, float | None]:
|
|
153
|
+
"""Async get value and creation timestamp. Returns (NO_VALUE, None) if not found.
|
|
146
154
|
"""
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
async with self._lock:
|
|
150
|
-
self._cache[key] = (pickled_value, now, now + ttl)
|
|
155
|
+
async with self._get_async_lock():
|
|
156
|
+
return self._do_get(key)
|
|
151
157
|
|
|
152
|
-
async def
|
|
153
|
-
"""
|
|
158
|
+
async def aset(self, key: str, value: Any, ttl: int) -> None:
|
|
159
|
+
"""Async set value with TTL in seconds.
|
|
154
160
|
"""
|
|
155
|
-
async with self.
|
|
156
|
-
self.
|
|
161
|
+
async with self._get_async_lock():
|
|
162
|
+
self._do_set(key, value, ttl)
|
|
157
163
|
|
|
158
|
-
async def
|
|
159
|
-
"""
|
|
164
|
+
async def adelete(self, key: str) -> None:
|
|
165
|
+
"""Async delete value by key.
|
|
160
166
|
"""
|
|
161
|
-
async with self.
|
|
162
|
-
|
|
163
|
-
count = len(self._cache)
|
|
164
|
-
self._cache.clear()
|
|
165
|
-
return count
|
|
167
|
+
async with self._get_async_lock():
|
|
168
|
+
self._do_delete(key)
|
|
166
169
|
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
170
|
+
async def aclear(self, pattern: str | None = None) -> int:
|
|
171
|
+
"""Async clear entries matching pattern. Returns count of cleared entries.
|
|
172
|
+
"""
|
|
173
|
+
async with self._get_async_lock():
|
|
174
|
+
return self._do_clear(pattern)
|
|
171
175
|
|
|
172
|
-
async def
|
|
173
|
-
"""
|
|
176
|
+
async def akeys(self, pattern: str | None = None) -> AsyncIterator[str]:
|
|
177
|
+
"""Async iterate over keys matching pattern.
|
|
174
178
|
"""
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
all_keys = list(self._cache.keys())
|
|
179
|
+
async with self._get_async_lock():
|
|
180
|
+
all_keys = self._do_keys(pattern)
|
|
178
181
|
|
|
179
182
|
for key in all_keys:
|
|
180
|
-
|
|
181
|
-
entry = self._cache.get(key)
|
|
182
|
-
if entry is None:
|
|
183
|
-
continue
|
|
184
|
-
_, _, expires_at = entry
|
|
185
|
-
if now > expires_at:
|
|
186
|
-
del self._cache[key]
|
|
187
|
-
continue
|
|
183
|
+
yield key
|
|
188
184
|
|
|
189
|
-
|
|
190
|
-
|
|
185
|
+
async def acount(self, pattern: str | None = None) -> int:
|
|
186
|
+
"""Async count keys matching pattern.
|
|
187
|
+
"""
|
|
188
|
+
async with self._get_async_lock():
|
|
189
|
+
return len(self._do_keys(pattern))
|
|
191
190
|
|
|
192
|
-
|
|
193
|
-
"""
|
|
191
|
+
def get_async_mutex(self, key: str) -> AsyncioMutex:
|
|
192
|
+
"""Get an async mutex for dogpile prevention on the given key.
|
|
194
193
|
"""
|
|
195
|
-
|
|
196
|
-
async for _ in self.keys(pattern):
|
|
197
|
-
count += 1
|
|
198
|
-
return count
|
|
194
|
+
return AsyncioMutex(f'memory:{key}')
|
|
199
195
|
|
|
200
|
-
|
|
196
|
+
# ===== Lifecycle =====
|
|
197
|
+
|
|
198
|
+
def close(self) -> None:
|
|
201
199
|
"""Close the backend (no-op for memory backend).
|
|
202
200
|
"""
|
|
201
|
+
|
|
202
|
+
async def aclose(self) -> None:
|
|
203
|
+
"""Async close the backend (no-op for memory backend).
|
|
204
|
+
"""
|