cachu 0.2.0__py3-none-any.whl → 0.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cachu/__init__.py +1 -1
- cachu/async_decorator.py +2 -1
- cachu/backends/async_sqlite.py +88 -92
- cachu/decorator.py +2 -2
- {cachu-0.2.0.dist-info → cachu-0.2.2.dist-info}/METADATA +1 -1
- {cachu-0.2.0.dist-info → cachu-0.2.2.dist-info}/RECORD +8 -9
- cachu/cache.py +0 -636
- {cachu-0.2.0.dist-info → cachu-0.2.2.dist-info}/WHEEL +0 -0
- {cachu-0.2.0.dist-info → cachu-0.2.2.dist-info}/top_level.txt +0 -0
cachu/__init__.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"""Flexible caching library with support for memory, file, and Redis backends.
|
|
2
2
|
"""
|
|
3
|
-
__version__ = '0.2.
|
|
3
|
+
__version__ = '0.2.2'
|
|
4
4
|
|
|
5
5
|
from .async_decorator import async_cache, clear_async_backends
|
|
6
6
|
from .async_decorator import get_async_backend, get_async_cache_info
|
cachu/async_decorator.py
CHANGED
|
@@ -66,7 +66,8 @@ async def _get_async_backend(package: str | None, backend_type: str, ttl: int) -
|
|
|
66
66
|
async def get_async_backend(
|
|
67
67
|
backend_type: str | None = None,
|
|
68
68
|
package: str | None = None,
|
|
69
|
-
|
|
69
|
+
*,
|
|
70
|
+
ttl: int,
|
|
70
71
|
) -> AsyncBackend:
|
|
71
72
|
"""Get an async backend instance.
|
|
72
73
|
|
cachu/backends/async_sqlite.py
CHANGED
|
@@ -33,91 +33,89 @@ class AsyncSqliteBackend(AsyncBackend):
|
|
|
33
33
|
def __init__(self, filepath: str) -> None:
|
|
34
34
|
self._filepath = filepath
|
|
35
35
|
self._connection: aiosqlite.Connection | None = None
|
|
36
|
-
self.
|
|
36
|
+
self._init_lock = asyncio.Lock()
|
|
37
|
+
self._write_lock = asyncio.Lock()
|
|
37
38
|
self._initialized = False
|
|
38
39
|
|
|
39
40
|
async def _ensure_initialized(self) -> 'aiosqlite.Connection':
|
|
40
41
|
"""Ensure database is initialized and return connection.
|
|
41
42
|
"""
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
43
|
+
async with self._init_lock:
|
|
44
|
+
if self._connection is None:
|
|
45
|
+
aiosqlite = _get_aiosqlite_module()
|
|
46
|
+
self._connection = await aiosqlite.connect(self._filepath)
|
|
47
|
+
await self._connection.execute('PRAGMA journal_mode=WAL')
|
|
48
|
+
await self._connection.execute('PRAGMA busy_timeout=5000')
|
|
49
|
+
|
|
50
|
+
if not self._initialized:
|
|
51
|
+
await self._connection.execute('''
|
|
52
|
+
CREATE TABLE IF NOT EXISTS cache (
|
|
53
|
+
key TEXT PRIMARY KEY,
|
|
54
|
+
value BLOB NOT NULL,
|
|
55
|
+
created_at REAL NOT NULL,
|
|
56
|
+
expires_at REAL NOT NULL
|
|
57
|
+
)
|
|
58
|
+
''')
|
|
59
|
+
await self._connection.execute('''
|
|
60
|
+
CREATE INDEX IF NOT EXISTS idx_cache_expires
|
|
61
|
+
ON cache(expires_at)
|
|
62
|
+
''')
|
|
63
|
+
await self._connection.commit()
|
|
64
|
+
self._initialized = True
|
|
61
65
|
|
|
62
66
|
return self._connection
|
|
63
67
|
|
|
64
68
|
async def get(self, key: str) -> Any:
|
|
65
69
|
"""Get value by key. Returns NO_VALUE if not found or expired.
|
|
66
70
|
"""
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
row = await cursor.fetchone()
|
|
75
|
-
|
|
76
|
-
if row is None:
|
|
77
|
-
return NO_VALUE
|
|
71
|
+
try:
|
|
72
|
+
conn = await self._ensure_initialized()
|
|
73
|
+
cursor = await conn.execute(
|
|
74
|
+
'SELECT value, expires_at FROM cache WHERE key = ?',
|
|
75
|
+
(key,),
|
|
76
|
+
)
|
|
77
|
+
row = await cursor.fetchone()
|
|
78
78
|
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
await conn.execute('DELETE FROM cache WHERE key = ?', (key,))
|
|
82
|
-
await conn.commit()
|
|
83
|
-
return NO_VALUE
|
|
79
|
+
if row is None:
|
|
80
|
+
return NO_VALUE
|
|
84
81
|
|
|
85
|
-
|
|
86
|
-
|
|
82
|
+
value_blob, expires_at = row
|
|
83
|
+
if time.time() > expires_at:
|
|
87
84
|
return NO_VALUE
|
|
88
85
|
|
|
86
|
+
return pickle.loads(value_blob)
|
|
87
|
+
except Exception:
|
|
88
|
+
return NO_VALUE
|
|
89
|
+
|
|
89
90
|
async def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
|
|
90
91
|
"""Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
|
|
91
92
|
"""
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
row = await cursor.fetchone()
|
|
100
|
-
|
|
101
|
-
if row is None:
|
|
102
|
-
return NO_VALUE, None
|
|
93
|
+
try:
|
|
94
|
+
conn = await self._ensure_initialized()
|
|
95
|
+
cursor = await conn.execute(
|
|
96
|
+
'SELECT value, created_at, expires_at FROM cache WHERE key = ?',
|
|
97
|
+
(key,),
|
|
98
|
+
)
|
|
99
|
+
row = await cursor.fetchone()
|
|
103
100
|
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
await conn.execute('DELETE FROM cache WHERE key = ?', (key,))
|
|
107
|
-
await conn.commit()
|
|
108
|
-
return NO_VALUE, None
|
|
101
|
+
if row is None:
|
|
102
|
+
return NO_VALUE, None
|
|
109
103
|
|
|
110
|
-
|
|
111
|
-
|
|
104
|
+
value_blob, created_at, expires_at = row
|
|
105
|
+
if time.time() > expires_at:
|
|
112
106
|
return NO_VALUE, None
|
|
113
107
|
|
|
108
|
+
return pickle.loads(value_blob), created_at
|
|
109
|
+
except Exception:
|
|
110
|
+
return NO_VALUE, None
|
|
111
|
+
|
|
114
112
|
async def set(self, key: str, value: Any, ttl: int) -> None:
|
|
115
113
|
"""Set value with TTL in seconds.
|
|
116
114
|
"""
|
|
117
115
|
now = time.time()
|
|
118
116
|
value_blob = pickle.dumps(value)
|
|
119
117
|
|
|
120
|
-
async with self.
|
|
118
|
+
async with self._write_lock:
|
|
121
119
|
conn = await self._ensure_initialized()
|
|
122
120
|
await conn.execute(
|
|
123
121
|
'''INSERT OR REPLACE INTO cache (key, value, created_at, expires_at)
|
|
@@ -129,7 +127,7 @@ class AsyncSqliteBackend(AsyncBackend):
|
|
|
129
127
|
async def delete(self, key: str) -> None:
|
|
130
128
|
"""Delete value by key.
|
|
131
129
|
"""
|
|
132
|
-
async with self.
|
|
130
|
+
async with self._write_lock:
|
|
133
131
|
try:
|
|
134
132
|
conn = await self._ensure_initialized()
|
|
135
133
|
await conn.execute('DELETE FROM cache WHERE key = ?', (key,))
|
|
@@ -140,7 +138,7 @@ class AsyncSqliteBackend(AsyncBackend):
|
|
|
140
138
|
async def clear(self, pattern: str | None = None) -> int:
|
|
141
139
|
"""Clear entries matching pattern. Returns count of cleared entries.
|
|
142
140
|
"""
|
|
143
|
-
async with self.
|
|
141
|
+
async with self._write_lock:
|
|
144
142
|
try:
|
|
145
143
|
conn = await self._ensure_initialized()
|
|
146
144
|
if pattern is None:
|
|
@@ -168,22 +166,21 @@ class AsyncSqliteBackend(AsyncBackend):
|
|
|
168
166
|
"""Iterate over keys matching pattern.
|
|
169
167
|
"""
|
|
170
168
|
now = time.time()
|
|
169
|
+
conn = await self._ensure_initialized()
|
|
171
170
|
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
(glob_pattern, now),
|
|
184
|
-
)
|
|
171
|
+
if pattern is None:
|
|
172
|
+
cursor = await conn.execute(
|
|
173
|
+
'SELECT key FROM cache WHERE expires_at > ?',
|
|
174
|
+
(now,),
|
|
175
|
+
)
|
|
176
|
+
else:
|
|
177
|
+
glob_pattern = self._fnmatch_to_glob(pattern)
|
|
178
|
+
cursor = await conn.execute(
|
|
179
|
+
'SELECT key FROM cache WHERE key GLOB ? AND expires_at > ?',
|
|
180
|
+
(glob_pattern, now),
|
|
181
|
+
)
|
|
185
182
|
|
|
186
|
-
|
|
183
|
+
all_keys = [row[0] for row in await cursor.fetchall()]
|
|
187
184
|
|
|
188
185
|
for key in all_keys:
|
|
189
186
|
yield key
|
|
@@ -193,25 +190,24 @@ class AsyncSqliteBackend(AsyncBackend):
|
|
|
193
190
|
"""
|
|
194
191
|
now = time.time()
|
|
195
192
|
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
)
|
|
193
|
+
try:
|
|
194
|
+
conn = await self._ensure_initialized()
|
|
195
|
+
if pattern is None:
|
|
196
|
+
cursor = await conn.execute(
|
|
197
|
+
'SELECT COUNT(*) FROM cache WHERE expires_at > ?',
|
|
198
|
+
(now,),
|
|
199
|
+
)
|
|
200
|
+
else:
|
|
201
|
+
glob_pattern = self._fnmatch_to_glob(pattern)
|
|
202
|
+
cursor = await conn.execute(
|
|
203
|
+
'SELECT COUNT(*) FROM cache WHERE key GLOB ? AND expires_at > ?',
|
|
204
|
+
(glob_pattern, now),
|
|
205
|
+
)
|
|
210
206
|
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
207
|
+
row = await cursor.fetchone()
|
|
208
|
+
return row[0]
|
|
209
|
+
except Exception:
|
|
210
|
+
return 0
|
|
215
211
|
|
|
216
212
|
def _fnmatch_to_glob(self, pattern: str) -> str:
|
|
217
213
|
"""Convert fnmatch pattern to SQLite GLOB pattern.
|
|
@@ -223,7 +219,7 @@ class AsyncSqliteBackend(AsyncBackend):
|
|
|
223
219
|
"""
|
|
224
220
|
now = time.time()
|
|
225
221
|
|
|
226
|
-
async with self.
|
|
222
|
+
async with self._write_lock:
|
|
227
223
|
conn = await self._ensure_initialized()
|
|
228
224
|
cursor = await conn.execute(
|
|
229
225
|
'SELECT COUNT(*) FROM cache WHERE expires_at <= ?',
|
cachu/decorator.py
CHANGED
|
@@ -4,9 +4,9 @@ import logging
|
|
|
4
4
|
import os
|
|
5
5
|
import threading
|
|
6
6
|
import time
|
|
7
|
+
from collections.abc import Callable
|
|
7
8
|
from functools import wraps
|
|
8
9
|
from typing import Any
|
|
9
|
-
from collections.abc import Callable
|
|
10
10
|
|
|
11
11
|
from .backends import NO_VALUE, Backend
|
|
12
12
|
from .backends.file import FileBackend
|
|
@@ -61,7 +61,7 @@ def _get_backend(package: str | None, backend_type: str, ttl: int) -> Backend:
|
|
|
61
61
|
return backend
|
|
62
62
|
|
|
63
63
|
|
|
64
|
-
def get_backend(backend_type: str | None = None, package: str | None = None, ttl: int
|
|
64
|
+
def get_backend(backend_type: str | None = None, package: str | None = None, *, ttl: int) -> Backend:
|
|
65
65
|
"""Get a backend instance.
|
|
66
66
|
|
|
67
67
|
Args:
|
|
@@ -1,9 +1,8 @@
|
|
|
1
|
-
cachu/__init__.py,sha256=
|
|
2
|
-
cachu/async_decorator.py,sha256=
|
|
1
|
+
cachu/__init__.py,sha256=aJsGOKN5kfurhCuJjlefhO1yCZFxgf1P7JZk44Jxofc,1286
|
|
2
|
+
cachu/async_decorator.py,sha256=Jx2fHESLlld7NZiD2-6kcozukJtp5efnt4cMhntDDRA,8939
|
|
3
3
|
cachu/async_operations.py,sha256=eVqhZk3FVLNip_abjnCzG8AajzvJTtXbpL--dpMXBlc,5597
|
|
4
|
-
cachu/cache.py,sha256=UOh1hsvo5wqpf-quU0glGZi5bgjF8gkaSmYIkAwWfUA,23362
|
|
5
4
|
cachu/config.py,sha256=KtcDGpSTJmjRrcNLz9_Om3O814oJJ3p8gntB84Pd6Dk,5922
|
|
6
|
-
cachu/decorator.py,sha256=
|
|
5
|
+
cachu/decorator.py,sha256=RHwDRZxZfOkBgEK1XgRyis22bxQ0ba0X4NtHBd9FTb4,8161
|
|
7
6
|
cachu/keys.py,sha256=fwwNOpnDJFCIWZoQ5UGJWhJa6xu36hsBsURI-n2NJKU,3557
|
|
8
7
|
cachu/operations.py,sha256=t42_Er-O59vrwFa5jdf4yq3Jr4li2l7php4yMVJnxPs,5588
|
|
9
8
|
cachu/types.py,sha256=FghBN5GhxnrpuT4WUL9iNnAfdoH__cw9_Ag4kHbIXq4,723
|
|
@@ -11,12 +10,12 @@ cachu/backends/__init__.py,sha256=Jn2yBAMmJ8d0J_NyjOtxRt7UTyMLf1rlY8QJ049hXE8,13
|
|
|
11
10
|
cachu/backends/async_base.py,sha256=oZ3K3PhsYkbgZxFLFk3_NbxBxtNopqS90HZBizwg_q8,1394
|
|
12
11
|
cachu/backends/async_memory.py,sha256=SQvSHeWbySa52BnQLF75nhVXgsydubNu84a8hvSzQSc,3457
|
|
13
12
|
cachu/backends/async_redis.py,sha256=8kefPIoIJDAZ6C6HJCvHqKFMDS10sJYh8YcJMpXpQm8,4455
|
|
14
|
-
cachu/backends/async_sqlite.py,sha256=
|
|
13
|
+
cachu/backends/async_sqlite.py,sha256=HbdV1ih-xMSfl8a4-S8xhyiFjkEBPwDlDkCT9AMe4Us,8143
|
|
15
14
|
cachu/backends/file.py,sha256=Pu01VtgHDgK6ev5hqyZXuJRCSB2VbNKHQ4w4nNKNyeI,298
|
|
16
15
|
cachu/backends/memory.py,sha256=kIgrVU8k_3Aquyj2PDf8IPbTjCITM_0V5GU47m3fJmo,3138
|
|
17
16
|
cachu/backends/redis.py,sha256=yE5rEBgOij9QOeC1VhWdIbGCgi442q-aWfmbbG4aNSE,3858
|
|
18
17
|
cachu/backends/sqlite.py,sha256=whduN5G_bN6ZJNuCBwbraDcadv_sg0j-OEiFnP8EEsk,7803
|
|
19
|
-
cachu-0.2.
|
|
20
|
-
cachu-0.2.
|
|
21
|
-
cachu-0.2.
|
|
22
|
-
cachu-0.2.
|
|
18
|
+
cachu-0.2.2.dist-info/METADATA,sha256=dZL33-xMXzoEepH7HJ1BhSYyS14NFQj8E4wTW6cVEk4,11992
|
|
19
|
+
cachu-0.2.2.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
20
|
+
cachu-0.2.2.dist-info/top_level.txt,sha256=g80nNoMvLMzhSwQWV-JotCBqtsLAHeFMBo_g8hCK8hQ,6
|
|
21
|
+
cachu-0.2.2.dist-info/RECORD,,
|
cachu/cache.py
DELETED
|
@@ -1,636 +0,0 @@
|
|
|
1
|
-
import dbm
|
|
2
|
-
import inspect
|
|
3
|
-
import logging
|
|
4
|
-
import os
|
|
5
|
-
import pathlib
|
|
6
|
-
import threading
|
|
7
|
-
from collections.abc import Callable
|
|
8
|
-
from functools import partial, wraps
|
|
9
|
-
from typing import Any
|
|
10
|
-
|
|
11
|
-
from dogpile.cache import CacheRegion, make_region
|
|
12
|
-
from dogpile.cache.backends.file import AbstractFileLock
|
|
13
|
-
from dogpile.cache.region import DefaultInvalidationStrategy
|
|
14
|
-
from dogpile.util.readwrite_lock import ReadWriteMutex
|
|
15
|
-
|
|
16
|
-
from .config import _get_caller_package, config, get_config, is_disabled
|
|
17
|
-
|
|
18
|
-
logger = logging.getLogger(__name__)
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
def _is_connection_like(obj: Any) -> bool:
|
|
22
|
-
"""Check if object appears to be a database connection.
|
|
23
|
-
"""
|
|
24
|
-
if hasattr(obj, 'driver_connection'):
|
|
25
|
-
return True
|
|
26
|
-
|
|
27
|
-
if hasattr(obj, 'dialect'):
|
|
28
|
-
return True
|
|
29
|
-
|
|
30
|
-
if hasattr(obj, 'engine'):
|
|
31
|
-
return True
|
|
32
|
-
|
|
33
|
-
obj_type = str(type(obj))
|
|
34
|
-
connection_indicators = ('Connection', 'Engine', 'psycopg', 'pyodbc', 'sqlite3')
|
|
35
|
-
|
|
36
|
-
return any(indicator in obj_type for indicator in connection_indicators)
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
def _normalize_namespace(namespace: str) -> str:
|
|
40
|
-
"""Normalize namespace to always be wrapped in pipes.
|
|
41
|
-
"""
|
|
42
|
-
if not namespace:
|
|
43
|
-
return ''
|
|
44
|
-
namespace = namespace.strip('|')
|
|
45
|
-
namespace = namespace.replace('|', '.')
|
|
46
|
-
return f'|{namespace}|'
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
def _create_namespace_filter(namespace: str) -> Callable[[str], bool]:
|
|
50
|
-
"""Create a filter function for namespace-based key matching.
|
|
51
|
-
"""
|
|
52
|
-
debug_prefix = config.debug_key
|
|
53
|
-
normalized_ns = _normalize_namespace(namespace)
|
|
54
|
-
namespace_pattern = f'|{normalized_ns}|'
|
|
55
|
-
|
|
56
|
-
def matches_namespace(key: str) -> bool:
|
|
57
|
-
if not key.startswith(debug_prefix):
|
|
58
|
-
return False
|
|
59
|
-
key_after_prefix = key[len(debug_prefix):]
|
|
60
|
-
return namespace_pattern in key_after_prefix
|
|
61
|
-
|
|
62
|
-
return matches_namespace
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
def key_generator(namespace: str, fn: Callable[..., Any], exclude_params: set[str] | None = None) -> Callable[..., str]:
|
|
66
|
-
"""Generate a cache key for the given namespace and function.
|
|
67
|
-
"""
|
|
68
|
-
exclude_params = exclude_params or set()
|
|
69
|
-
unwrapped_fn = getattr(fn, '__wrapped__', fn)
|
|
70
|
-
namespace = f'{unwrapped_fn.__name__}|{_normalize_namespace(namespace)}' if namespace else f'{unwrapped_fn.__name__}'
|
|
71
|
-
|
|
72
|
-
argspec = inspect.getfullargspec(unwrapped_fn)
|
|
73
|
-
_args_reversed = list(reversed(argspec.args or []))
|
|
74
|
-
_defaults_reversed = list(reversed(argspec.defaults or []))
|
|
75
|
-
args_with_defaults = { _args_reversed[i]: default for i, default in enumerate(_defaults_reversed)}
|
|
76
|
-
|
|
77
|
-
def generate_key(*args, **kwargs) -> str:
|
|
78
|
-
args, vargs = args[:len(argspec.args)], args[len(argspec.args):]
|
|
79
|
-
as_kwargs = dict(**args_with_defaults)
|
|
80
|
-
as_kwargs.update(dict(zip(argspec.args, args)))
|
|
81
|
-
as_kwargs.update({f'vararg{i+1}': varg for i, varg in enumerate(vargs)})
|
|
82
|
-
as_kwargs.update(**kwargs)
|
|
83
|
-
as_kwargs = {k: v for k, v in as_kwargs.items() if not _is_connection_like(v) and k not in {'self', 'cls'}}
|
|
84
|
-
as_kwargs = {k: v for k, v in as_kwargs.items() if not k.startswith('_') and k not in exclude_params}
|
|
85
|
-
as_str = ' '.join(f'{str(k)}={repr(v)}' for k, v in sorted(as_kwargs.items()))
|
|
86
|
-
return f'{namespace}|{as_str}'
|
|
87
|
-
|
|
88
|
-
return generate_key
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
def key_mangler_default(key: str) -> str:
|
|
92
|
-
"""Modify the key for debugging purposes by prefixing it with a debug marker.
|
|
93
|
-
"""
|
|
94
|
-
return f'{config.debug_key}{key}'
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
def key_mangler_region(key: str, region: str) -> str:
|
|
98
|
-
"""Modify the key for a specific region for debugging purposes.
|
|
99
|
-
"""
|
|
100
|
-
return f'{region}:{config.debug_key}{key}'
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
def _make_key_mangler(debug_key: str) -> Callable[[str], str]:
|
|
104
|
-
"""Create a key mangler with a captured debug_key.
|
|
105
|
-
"""
|
|
106
|
-
def mangler(key: str) -> str:
|
|
107
|
-
return f'{debug_key}{key}'
|
|
108
|
-
return mangler
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
def _make_region_key_mangler(debug_key: str, region_name: str) -> Callable[[str], str]:
|
|
112
|
-
"""Create a region key mangler with captured debug_key and region name.
|
|
113
|
-
"""
|
|
114
|
-
def mangler(key: str) -> str:
|
|
115
|
-
return f'{region_name}:{debug_key}{key}'
|
|
116
|
-
return mangler
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
def should_cache_fn(value: Any) -> bool:
|
|
120
|
-
"""Determine if the given value should be cached.
|
|
121
|
-
"""
|
|
122
|
-
return bool(value)
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
def _seconds_to_region_name(seconds: int) -> str:
|
|
126
|
-
"""Convert seconds to a human-readable region name.
|
|
127
|
-
"""
|
|
128
|
-
if seconds < 60:
|
|
129
|
-
return f'{seconds}s'
|
|
130
|
-
elif seconds < 3600:
|
|
131
|
-
return f'{seconds // 60}m'
|
|
132
|
-
elif seconds < 86400:
|
|
133
|
-
return f'{seconds // 3600}h'
|
|
134
|
-
else:
|
|
135
|
-
return f'{seconds // 86400}d'
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
def get_redis_client(namespace: str | None = None) -> Any:
|
|
139
|
-
"""Create a Redis client directly from config.
|
|
140
|
-
"""
|
|
141
|
-
try:
|
|
142
|
-
import redis
|
|
143
|
-
except ImportError as e:
|
|
144
|
-
raise RuntimeError(
|
|
145
|
-
"Redis support requires the 'redis' package. Install with: pip install redis"
|
|
146
|
-
) from e
|
|
147
|
-
if namespace is None:
|
|
148
|
-
namespace = _get_caller_package()
|
|
149
|
-
cfg = get_config(namespace)
|
|
150
|
-
connection_kwargs = {}
|
|
151
|
-
if cfg.redis_ssl:
|
|
152
|
-
connection_kwargs['ssl'] = True
|
|
153
|
-
return redis.Redis(
|
|
154
|
-
host=cfg.redis_host,
|
|
155
|
-
port=cfg.redis_port,
|
|
156
|
-
db=cfg.redis_db,
|
|
157
|
-
**connection_kwargs
|
|
158
|
-
)
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
class CacheRegionWrapper:
|
|
162
|
-
"""Wrapper for CacheRegion that adds exclude_params support.
|
|
163
|
-
"""
|
|
164
|
-
|
|
165
|
-
def __init__(self, region: CacheRegion) -> None:
|
|
166
|
-
self._region = region
|
|
167
|
-
self._original_cache_on_arguments = region.cache_on_arguments
|
|
168
|
-
|
|
169
|
-
def cache_on_arguments(
|
|
170
|
-
self,
|
|
171
|
-
namespace: str = '',
|
|
172
|
-
should_cache_fn: Callable[[Any], bool] = should_cache_fn,
|
|
173
|
-
exclude_params: set[str] | None = None,
|
|
174
|
-
**kwargs) -> Callable:
|
|
175
|
-
"""Cache function results based on arguments with optional parameter exclusion.
|
|
176
|
-
"""
|
|
177
|
-
if exclude_params:
|
|
178
|
-
custom_key_gen = partial(key_generator, exclude_params=exclude_params)
|
|
179
|
-
cache_decorator = self._original_cache_on_arguments(
|
|
180
|
-
namespace=namespace,
|
|
181
|
-
should_cache_fn=should_cache_fn,
|
|
182
|
-
function_key_generator=custom_key_gen,
|
|
183
|
-
**kwargs
|
|
184
|
-
)
|
|
185
|
-
else:
|
|
186
|
-
cache_decorator = self._original_cache_on_arguments(
|
|
187
|
-
namespace=namespace,
|
|
188
|
-
should_cache_fn=should_cache_fn,
|
|
189
|
-
**kwargs
|
|
190
|
-
)
|
|
191
|
-
|
|
192
|
-
def decorator(fn: Callable) -> Callable:
|
|
193
|
-
cached_fn = cache_decorator(fn)
|
|
194
|
-
|
|
195
|
-
@wraps(fn)
|
|
196
|
-
def wrapper(*args, **kw):
|
|
197
|
-
if is_disabled():
|
|
198
|
-
return fn(*args, **kw)
|
|
199
|
-
return cached_fn(*args, **kw)
|
|
200
|
-
return wrapper
|
|
201
|
-
return decorator
|
|
202
|
-
|
|
203
|
-
def __getattr__(self, name: str) -> Any:
|
|
204
|
-
"""Delegate all other attributes to the wrapped region.
|
|
205
|
-
"""
|
|
206
|
-
return getattr(self._region, name)
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
def _wrap_cache_on_arguments(region: CacheRegion) -> CacheRegionWrapper:
|
|
210
|
-
"""Wrap CacheRegion to add exclude_params support with proper IDE typing.
|
|
211
|
-
"""
|
|
212
|
-
return CacheRegionWrapper(region)
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
class CustomFileLock(AbstractFileLock):
|
|
216
|
-
"""Implementation of a file lock using a read-write mutex.
|
|
217
|
-
"""
|
|
218
|
-
|
|
219
|
-
def __init__(self, filename: str) -> None:
|
|
220
|
-
self.mutex = ReadWriteMutex()
|
|
221
|
-
|
|
222
|
-
def acquire_read_lock(self, wait: bool) -> bool:
|
|
223
|
-
"""Acquire the read lock.
|
|
224
|
-
"""
|
|
225
|
-
ret = self.mutex.acquire_read_lock(wait)
|
|
226
|
-
return wait or ret
|
|
227
|
-
|
|
228
|
-
def acquire_write_lock(self, wait: bool) -> bool:
|
|
229
|
-
"""Acquire the write lock.
|
|
230
|
-
"""
|
|
231
|
-
ret = self.mutex.acquire_write_lock(wait)
|
|
232
|
-
return wait or ret
|
|
233
|
-
|
|
234
|
-
def release_read_lock(self) -> bool:
|
|
235
|
-
"""Release the read lock.
|
|
236
|
-
"""
|
|
237
|
-
return self.mutex.release_read_lock()
|
|
238
|
-
|
|
239
|
-
def release_write_lock(self) -> bool:
|
|
240
|
-
"""Release the write lock.
|
|
241
|
-
"""
|
|
242
|
-
return self.mutex.release_write_lock()
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
class RedisInvalidator(DefaultInvalidationStrategy):
|
|
246
|
-
"""Redis invalidation strategy with optional key deletion.
|
|
247
|
-
"""
|
|
248
|
-
|
|
249
|
-
def __init__(self, region: CacheRegion, delete_keys: bool = False) -> None:
|
|
250
|
-
"""Initialize the RedisInvalidator for a given CacheRegion.
|
|
251
|
-
"""
|
|
252
|
-
self.region = region
|
|
253
|
-
self.delete_keys = delete_keys
|
|
254
|
-
super().__init__()
|
|
255
|
-
|
|
256
|
-
def invalidate(self, hard: bool = True) -> None:
|
|
257
|
-
"""Invalidate the cache region using timestamp-based invalidation.
|
|
258
|
-
"""
|
|
259
|
-
super().invalidate(hard)
|
|
260
|
-
if self.delete_keys:
|
|
261
|
-
self._delete_backend_keys()
|
|
262
|
-
|
|
263
|
-
def _delete_backend_keys(self) -> None:
|
|
264
|
-
"""Delete keys from Redis backend for this region.
|
|
265
|
-
"""
|
|
266
|
-
try:
|
|
267
|
-
client = self.region.backend.writer_client
|
|
268
|
-
region_prefix = f'{self.region.name}:'
|
|
269
|
-
deleted_count = 0
|
|
270
|
-
for key in client.scan_iter(match=f'{region_prefix}*'):
|
|
271
|
-
client.delete(key)
|
|
272
|
-
deleted_count += 1
|
|
273
|
-
logger.debug(f'Deleted {deleted_count} Redis keys for region "{self.region.name}"')
|
|
274
|
-
except Exception as e:
|
|
275
|
-
logger.warning(f'Failed to delete Redis keys for region "{self.region.name}": {e}')
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
def _handle_all_regions(regions_dict: dict[tuple[str | None, int], CacheRegionWrapper], log_level: str = 'warning') -> Callable:
|
|
279
|
-
"""Decorator to handle clearing all cache regions when seconds=None.
|
|
280
|
-
"""
|
|
281
|
-
def decorator(func: Callable) -> Callable:
|
|
282
|
-
@wraps(func)
|
|
283
|
-
def wrapper(
|
|
284
|
-
seconds: int | None = None,
|
|
285
|
-
namespace: str | None = None,
|
|
286
|
-
*,
|
|
287
|
-
package: str | None = None,
|
|
288
|
-
) -> None:
|
|
289
|
-
resolved_ns = package if package is not None else _get_caller_package()
|
|
290
|
-
if seconds is None:
|
|
291
|
-
regions_to_clear = [
|
|
292
|
-
(ns, secs) for (ns, secs) in regions_dict
|
|
293
|
-
if ns == resolved_ns
|
|
294
|
-
]
|
|
295
|
-
if not regions_to_clear:
|
|
296
|
-
log_func = getattr(logger, log_level)
|
|
297
|
-
cache_type = func.__name__.replace('clear_', '').replace('cache', ' cache')
|
|
298
|
-
log_func(f'No{cache_type} regions exist for namespace "{resolved_ns}"')
|
|
299
|
-
return
|
|
300
|
-
for _, region_seconds in regions_to_clear:
|
|
301
|
-
func(region_seconds, namespace, _resolved_namespace=resolved_ns)
|
|
302
|
-
return
|
|
303
|
-
return func(seconds, namespace, _resolved_namespace=resolved_ns)
|
|
304
|
-
return wrapper
|
|
305
|
-
return decorator
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
_region_lock = threading.Lock()
|
|
309
|
-
_memory_cache_regions: dict[tuple[str | None, int], CacheRegionWrapper] = {}
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
def memorycache(seconds: int, *, package: str | None = None) -> CacheRegionWrapper:
|
|
313
|
-
"""Create or retrieve a memory cache region with a specified expiration time.
|
|
314
|
-
"""
|
|
315
|
-
with _region_lock:
|
|
316
|
-
namespace = package if package is not None else _get_caller_package()
|
|
317
|
-
cfg = get_config(namespace)
|
|
318
|
-
key = (namespace, seconds)
|
|
319
|
-
|
|
320
|
-
if key not in _memory_cache_regions:
|
|
321
|
-
region = make_region(
|
|
322
|
-
function_key_generator=key_generator,
|
|
323
|
-
key_mangler=_make_key_mangler(cfg.debug_key),
|
|
324
|
-
).configure(
|
|
325
|
-
cfg.memory,
|
|
326
|
-
expiration_time=seconds,
|
|
327
|
-
)
|
|
328
|
-
_memory_cache_regions[key] = _wrap_cache_on_arguments(region)
|
|
329
|
-
logger.debug(f"Created memory cache region for namespace '{namespace}', {seconds}s TTL")
|
|
330
|
-
return _memory_cache_regions[key]
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
_file_cache_regions: dict[tuple[str | None, int], CacheRegionWrapper] = {}
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
def filecache(seconds: int, *, package: str | None = None) -> CacheRegionWrapper:
|
|
337
|
-
"""Create or retrieve a file cache region with a specified expiration time.
|
|
338
|
-
"""
|
|
339
|
-
with _region_lock:
|
|
340
|
-
namespace = package if package is not None else _get_caller_package()
|
|
341
|
-
cfg = get_config(namespace)
|
|
342
|
-
key = (namespace, seconds)
|
|
343
|
-
|
|
344
|
-
if seconds < 60:
|
|
345
|
-
filename = f'cache{seconds}sec'
|
|
346
|
-
elif seconds < 3600:
|
|
347
|
-
filename = f'cache{seconds // 60}min'
|
|
348
|
-
else:
|
|
349
|
-
filename = f'cache{seconds // 3600}hour'
|
|
350
|
-
|
|
351
|
-
if namespace:
|
|
352
|
-
filename = f'{namespace}_{filename}'
|
|
353
|
-
|
|
354
|
-
if key not in _file_cache_regions:
|
|
355
|
-
if cfg.file == 'dogpile.cache.null':
|
|
356
|
-
logger.debug(
|
|
357
|
-
f"filecache() called from '{namespace}' with null backend - "
|
|
358
|
-
f"caching disabled for this region."
|
|
359
|
-
)
|
|
360
|
-
name = _seconds_to_region_name(seconds)
|
|
361
|
-
region = make_region(name=name, function_key_generator=key_generator,
|
|
362
|
-
key_mangler=_make_key_mangler(cfg.debug_key))
|
|
363
|
-
region.configure('dogpile.cache.null')
|
|
364
|
-
else:
|
|
365
|
-
region = make_region(
|
|
366
|
-
function_key_generator=key_generator,
|
|
367
|
-
key_mangler=_make_key_mangler(cfg.debug_key),
|
|
368
|
-
).configure(
|
|
369
|
-
cfg.file,
|
|
370
|
-
expiration_time=seconds,
|
|
371
|
-
arguments={
|
|
372
|
-
'filename': os.path.join(cfg.tmpdir, filename),
|
|
373
|
-
'lock_factory': CustomFileLock
|
|
374
|
-
}
|
|
375
|
-
)
|
|
376
|
-
logger.debug(f"Created file cache region for namespace '{namespace}', {seconds}s TTL")
|
|
377
|
-
_file_cache_regions[key] = _wrap_cache_on_arguments(region)
|
|
378
|
-
return _file_cache_regions[key]
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
_redis_cache_regions: dict[tuple[str | None, int], CacheRegionWrapper] = {}
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
def rediscache(seconds: int, *, package: str | None = None) -> CacheRegionWrapper:
|
|
385
|
-
"""Create or retrieve a Redis cache region with a specified expiration time.
|
|
386
|
-
"""
|
|
387
|
-
with _region_lock:
|
|
388
|
-
namespace = package if package is not None else _get_caller_package()
|
|
389
|
-
cfg = get_config(namespace)
|
|
390
|
-
key = (namespace, seconds)
|
|
391
|
-
|
|
392
|
-
if key not in _redis_cache_regions:
|
|
393
|
-
name = _seconds_to_region_name(seconds)
|
|
394
|
-
region = make_region(name=name, function_key_generator=key_generator,
|
|
395
|
-
key_mangler=_make_region_key_mangler(cfg.debug_key, name))
|
|
396
|
-
|
|
397
|
-
if cfg.redis == 'dogpile.cache.null':
|
|
398
|
-
logger.debug(
|
|
399
|
-
f"rediscache() called from '{namespace}' with null backend - "
|
|
400
|
-
f"caching disabled for this region."
|
|
401
|
-
)
|
|
402
|
-
region.configure('dogpile.cache.null')
|
|
403
|
-
else:
|
|
404
|
-
connection_kwargs = {}
|
|
405
|
-
if cfg.redis_ssl:
|
|
406
|
-
connection_kwargs['ssl'] = True
|
|
407
|
-
|
|
408
|
-
region.configure(
|
|
409
|
-
cfg.redis,
|
|
410
|
-
arguments={
|
|
411
|
-
'host': cfg.redis_host,
|
|
412
|
-
'port': cfg.redis_port,
|
|
413
|
-
'db': cfg.redis_db,
|
|
414
|
-
'redis_expiration_time': seconds,
|
|
415
|
-
'distributed_lock': cfg.redis_distributed,
|
|
416
|
-
'thread_local_lock': not cfg.redis_distributed,
|
|
417
|
-
'connection_kwargs': connection_kwargs,
|
|
418
|
-
},
|
|
419
|
-
region_invalidator=RedisInvalidator(region)
|
|
420
|
-
)
|
|
421
|
-
logger.debug(f"Created redis cache region for namespace '{namespace}', {seconds}s TTL")
|
|
422
|
-
_redis_cache_regions[key] = _wrap_cache_on_arguments(region)
|
|
423
|
-
return _redis_cache_regions[key]
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
@_handle_all_regions(_memory_cache_regions)
|
|
427
|
-
def clear_memorycache(
|
|
428
|
-
seconds: int | None = None,
|
|
429
|
-
namespace: str | None = None,
|
|
430
|
-
*,
|
|
431
|
-
_resolved_namespace: str | None = None,
|
|
432
|
-
) -> None:
|
|
433
|
-
"""Clear a memory cache region.
|
|
434
|
-
"""
|
|
435
|
-
pkg = _resolved_namespace if _resolved_namespace is not None else _get_caller_package()
|
|
436
|
-
region_key = (pkg, seconds)
|
|
437
|
-
|
|
438
|
-
if region_key not in _memory_cache_regions:
|
|
439
|
-
logger.warning(f'No memory cache region exists for namespace "{pkg}", {seconds} seconds')
|
|
440
|
-
return
|
|
441
|
-
|
|
442
|
-
cache_dict = _memory_cache_regions[region_key].actual_backend._cache
|
|
443
|
-
|
|
444
|
-
if namespace is None:
|
|
445
|
-
cache_dict.clear()
|
|
446
|
-
logger.debug(f'Cleared all memory cache keys for namespace "{pkg}", {seconds} second region')
|
|
447
|
-
else:
|
|
448
|
-
matches_namespace = _create_namespace_filter(namespace)
|
|
449
|
-
keys_to_delete = [key for key in list(cache_dict.keys()) if matches_namespace(key)]
|
|
450
|
-
for key in keys_to_delete:
|
|
451
|
-
del cache_dict[key]
|
|
452
|
-
logger.debug(f'Cleared {len(keys_to_delete)} memory cache keys for namespace "{namespace}"')
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
@_handle_all_regions(_file_cache_regions)
|
|
456
|
-
def clear_filecache(
|
|
457
|
-
seconds: int | None = None,
|
|
458
|
-
namespace: str | None = None,
|
|
459
|
-
*,
|
|
460
|
-
_resolved_namespace: str | None = None,
|
|
461
|
-
) -> None:
|
|
462
|
-
"""Clear a file cache region.
|
|
463
|
-
"""
|
|
464
|
-
pkg = _resolved_namespace if _resolved_namespace is not None else _get_caller_package()
|
|
465
|
-
cfg = get_config(pkg)
|
|
466
|
-
region_key = (pkg, seconds)
|
|
467
|
-
|
|
468
|
-
if region_key not in _file_cache_regions:
|
|
469
|
-
logger.warning(f'No file cache region exists for namespace "{pkg}", {seconds} seconds')
|
|
470
|
-
return
|
|
471
|
-
|
|
472
|
-
filename = _file_cache_regions[region_key].actual_backend.filename
|
|
473
|
-
basename = pathlib.Path(filename).name
|
|
474
|
-
filepath = os.path.join(cfg.tmpdir, basename)
|
|
475
|
-
|
|
476
|
-
if namespace is None:
|
|
477
|
-
with dbm.open(filepath, 'n'):
|
|
478
|
-
pass
|
|
479
|
-
logger.debug(f'Cleared all file cache keys for namespace "{pkg}", {seconds} second region')
|
|
480
|
-
else:
|
|
481
|
-
matches_namespace = _create_namespace_filter(namespace)
|
|
482
|
-
with dbm.open(filepath, 'w') as db:
|
|
483
|
-
keys_to_delete = [
|
|
484
|
-
key for key in list(db.keys())
|
|
485
|
-
if matches_namespace(key.decode())
|
|
486
|
-
]
|
|
487
|
-
for key in keys_to_delete:
|
|
488
|
-
del db[key]
|
|
489
|
-
logger.debug(f'Cleared {len(keys_to_delete)} file cache keys for namespace "{namespace}"')
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
@_handle_all_regions(_redis_cache_regions)
|
|
493
|
-
def clear_rediscache(
|
|
494
|
-
seconds: int | None = None,
|
|
495
|
-
namespace: str | None = None,
|
|
496
|
-
*,
|
|
497
|
-
_resolved_namespace: str | None = None,
|
|
498
|
-
) -> None:
|
|
499
|
-
"""Clear a redis cache region.
|
|
500
|
-
"""
|
|
501
|
-
pkg = _resolved_namespace if _resolved_namespace is not None else _get_caller_package()
|
|
502
|
-
cfg = get_config(pkg)
|
|
503
|
-
client = get_redis_client(pkg)
|
|
504
|
-
|
|
505
|
-
try:
|
|
506
|
-
region_name = _seconds_to_region_name(seconds)
|
|
507
|
-
region_prefix = f'{region_name}:{cfg.debug_key}'
|
|
508
|
-
deleted_count = 0
|
|
509
|
-
|
|
510
|
-
if namespace is None:
|
|
511
|
-
for key in client.scan_iter(match=f'{region_prefix}*'):
|
|
512
|
-
client.delete(key)
|
|
513
|
-
deleted_count += 1
|
|
514
|
-
logger.debug(f'Cleared {deleted_count} Redis keys for region "{region_name}"')
|
|
515
|
-
else:
|
|
516
|
-
matches_namespace = _create_namespace_filter(namespace)
|
|
517
|
-
for key in client.scan_iter(match=f'{region_prefix}*'):
|
|
518
|
-
key_str = key.decode()
|
|
519
|
-
key_without_region = key_str[len(region_name) + 1:]
|
|
520
|
-
if matches_namespace(key_without_region):
|
|
521
|
-
client.delete(key)
|
|
522
|
-
deleted_count += 1
|
|
523
|
-
logger.debug(f'Cleared {deleted_count} Redis keys for namespace "{namespace}" in region "{region_name}"')
|
|
524
|
-
finally:
|
|
525
|
-
client.close()
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
def set_memorycache_key(seconds: int, namespace: str, fn: Callable[..., Any], value: Any, **kwargs) -> None:
|
|
529
|
-
"""Set a specific cached entry in memory cache.
|
|
530
|
-
"""
|
|
531
|
-
region = memorycache(seconds)
|
|
532
|
-
cache_key = key_generator(namespace, fn)(**kwargs)
|
|
533
|
-
region.set(cache_key, value)
|
|
534
|
-
logger.debug(f'Set memory cache key for {fn.__name__} in namespace "{namespace}"')
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
def delete_memorycache_key(seconds: int, namespace: str, fn: Callable[..., Any], **kwargs) -> None:
|
|
538
|
-
"""Delete a specific cached entry from memory cache.
|
|
539
|
-
"""
|
|
540
|
-
region = memorycache(seconds)
|
|
541
|
-
cache_key = key_generator(namespace, fn)(**kwargs)
|
|
542
|
-
region.delete(cache_key)
|
|
543
|
-
logger.debug(f'Deleted memory cache key for {fn.__name__} in namespace "{namespace}"')
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
def set_filecache_key(seconds: int, namespace: str, fn: Callable[..., Any], value: Any, **kwargs) -> None:
|
|
547
|
-
"""Set a specific cached entry in file cache.
|
|
548
|
-
"""
|
|
549
|
-
region = filecache(seconds)
|
|
550
|
-
cache_key = key_generator(namespace, fn)(**kwargs)
|
|
551
|
-
region.set(cache_key, value)
|
|
552
|
-
logger.debug(f'Set file cache key for {fn.__name__} in namespace "{namespace}"')
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
def delete_filecache_key(seconds: int, namespace: str, fn: Callable[..., Any], **kwargs) -> None:
|
|
556
|
-
"""Delete a specific cached entry from file cache.
|
|
557
|
-
"""
|
|
558
|
-
region = filecache(seconds)
|
|
559
|
-
cache_key = key_generator(namespace, fn)(**kwargs)
|
|
560
|
-
region.delete(cache_key)
|
|
561
|
-
logger.debug(f'Deleted file cache key for {fn.__name__} in namespace "{namespace}"')
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
def set_rediscache_key(seconds: int, namespace: str, fn: Callable[..., Any], value: Any, **kwargs) -> None:
|
|
565
|
-
"""Set a specific cached entry in redis cache.
|
|
566
|
-
"""
|
|
567
|
-
region = rediscache(seconds)
|
|
568
|
-
cache_key = key_generator(namespace, fn)(**kwargs)
|
|
569
|
-
region.set(cache_key, value)
|
|
570
|
-
logger.debug(f'Set redis cache key for {fn.__name__} in namespace "{namespace}"')
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
def delete_rediscache_key(seconds: int, namespace: str, fn: Callable[..., Any], **kwargs) -> None:
|
|
574
|
-
"""Delete a specific cached entry from redis cache.
|
|
575
|
-
"""
|
|
576
|
-
region = rediscache(seconds)
|
|
577
|
-
cache_key = key_generator(namespace, fn)(**kwargs)
|
|
578
|
-
region.delete(cache_key)
|
|
579
|
-
logger.debug(f'Deleted redis cache key for {fn.__name__} in namespace "{namespace}"')
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
_BACKEND_MAP = {
|
|
583
|
-
'memory': (memorycache, clear_memorycache, set_memorycache_key, delete_memorycache_key),
|
|
584
|
-
'redis': (rediscache, clear_rediscache, set_rediscache_key, delete_rediscache_key),
|
|
585
|
-
'file': (filecache, clear_filecache, set_filecache_key, delete_filecache_key),
|
|
586
|
-
}
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
def defaultcache(seconds: int) -> CacheRegionWrapper:
|
|
590
|
-
"""Return cache region based on configured default backend.
|
|
591
|
-
"""
|
|
592
|
-
backend = config.default_backend
|
|
593
|
-
if backend not in _BACKEND_MAP:
|
|
594
|
-
raise ValueError(f'Unknown default_backend: {backend}. Must be one of: {list(_BACKEND_MAP.keys())}')
|
|
595
|
-
return _BACKEND_MAP[backend][0](seconds)
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
def clear_defaultcache(seconds: int | None = None, namespace: str | None = None) -> None:
|
|
599
|
-
"""Clear the default cache region.
|
|
600
|
-
"""
|
|
601
|
-
return _BACKEND_MAP[config.default_backend][1](seconds, namespace)
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
def set_defaultcache_key(seconds: int, namespace: str, fn: Callable[..., Any],
|
|
605
|
-
value: Any, **kwargs) -> None:
|
|
606
|
-
"""Set a specific cached entry in default cache.
|
|
607
|
-
"""
|
|
608
|
-
return _BACKEND_MAP[config.default_backend][2](seconds, namespace, fn, value, **kwargs)
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
def delete_defaultcache_key(seconds: int, namespace: str,
|
|
612
|
-
fn: Callable[..., Any], **kwargs) -> None:
|
|
613
|
-
"""Delete a specific cached entry from default cache.
|
|
614
|
-
"""
|
|
615
|
-
return _BACKEND_MAP[config.default_backend][3](seconds, namespace, fn, **kwargs)
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
def clear_cache_for_namespace(
|
|
619
|
-
namespace: str,
|
|
620
|
-
backend: str | None = None,
|
|
621
|
-
seconds: int | None = None,
|
|
622
|
-
) -> None:
|
|
623
|
-
"""Clear cache regions for a specific namespace (cross-module safe).
|
|
624
|
-
"""
|
|
625
|
-
backends = [backend] if backend else ['memory', 'file', 'redis']
|
|
626
|
-
for b in backends:
|
|
627
|
-
if b == 'memory':
|
|
628
|
-
clear_memorycache(seconds=seconds, package=namespace)
|
|
629
|
-
elif b == 'file':
|
|
630
|
-
clear_filecache(seconds=seconds, package=namespace)
|
|
631
|
-
elif b == 'redis':
|
|
632
|
-
clear_rediscache(seconds=seconds, package=namespace)
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
if __name__ == '__main__':
|
|
636
|
-
__import__('doctest').testmod(optionflags=4 | 8 | 32)
|
|
File without changes
|
|
File without changes
|