cachu 0.1.1__tar.gz → 0.1.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {cachu-0.1.1 → cachu-0.1.3}/PKG-INFO +27 -10
- {cachu-0.1.1 → cachu-0.1.3}/README.md +25 -9
- {cachu-0.1.1 → cachu-0.1.3}/pyproject.toml +2 -1
- {cachu-0.1.1 → cachu-0.1.3}/setup.cfg +1 -1
- {cachu-0.1.1 → cachu-0.1.3}/src/cachu/__init__.py +1 -1
- {cachu-0.1.1 → cachu-0.1.3}/src/cachu/backends/file.py +2 -2
- {cachu-0.1.1 → cachu-0.1.3}/src/cachu/decorator.py +1 -6
- {cachu-0.1.1 → cachu-0.1.3}/src/cachu/operations.py +24 -16
- {cachu-0.1.1 → cachu-0.1.3}/src/cachu.egg-info/PKG-INFO +27 -10
- {cachu-0.1.1 → cachu-0.1.3}/src/cachu.egg-info/SOURCES.txt +0 -1
- {cachu-0.1.1 → cachu-0.1.3}/src/cachu.egg-info/requires.txt +1 -0
- {cachu-0.1.1 → cachu-0.1.3}/tests/test_clearing.py +61 -0
- cachu-0.1.1/src/cachu/cache.py +0 -636
- {cachu-0.1.1 → cachu-0.1.3}/src/cachu/backends/__init__.py +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/src/cachu/backends/memory.py +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/src/cachu/backends/redis.py +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/src/cachu/config.py +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/src/cachu/keys.py +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/src/cachu/types.py +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/src/cachu.egg-info/dependency_links.txt +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/src/cachu.egg-info/top_level.txt +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/tests/test_config.py +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/tests/test_defaultcache.py +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/tests/test_delete_keys.py +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/tests/test_disable.py +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/tests/test_exclude_params.py +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/tests/test_file_cache.py +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/tests/test_integration.py +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/tests/test_memory_cache.py +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/tests/test_namespace.py +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/tests/test_namespace_isolation.py +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/tests/test_redis_cache.py +0 -0
- {cachu-0.1.1 → cachu-0.1.3}/tests/test_set_keys.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: cachu
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.3
|
|
4
4
|
Summary: Flexible caching library built on dogpile.cache
|
|
5
5
|
Author: bissli
|
|
6
6
|
License-Expression: 0BSD
|
|
@@ -14,6 +14,7 @@ Requires-Dist: redis; extra == "redis"
|
|
|
14
14
|
Provides-Extra: test
|
|
15
15
|
Requires-Dist: pytest; extra == "test"
|
|
16
16
|
Requires-Dist: pytest-mock; extra == "test"
|
|
17
|
+
Requires-Dist: redis; extra == "test"
|
|
17
18
|
Requires-Dist: testcontainers[redis]; extra == "test"
|
|
18
19
|
|
|
19
20
|
# cachu
|
|
@@ -25,13 +26,13 @@ Flexible caching library with support for memory, file, and Redis backends.
|
|
|
25
26
|
**Basic installation:**
|
|
26
27
|
|
|
27
28
|
```bash
|
|
28
|
-
pip install
|
|
29
|
+
pip install cachu
|
|
29
30
|
```
|
|
30
31
|
|
|
31
32
|
**With Redis support:**
|
|
32
33
|
|
|
33
34
|
```bash
|
|
34
|
-
pip install
|
|
35
|
+
pip install cachu[redis]
|
|
35
36
|
```
|
|
36
37
|
|
|
37
38
|
## Quick Start
|
|
@@ -80,7 +81,7 @@ cachu.configure(
|
|
|
80
81
|
|
|
81
82
|
### Package Isolation
|
|
82
83
|
|
|
83
|
-
Each package automatically gets isolated configuration
|
|
84
|
+
Each package automatically gets isolated configuration, preventing conflicts when multiple libraries use cachu:
|
|
84
85
|
|
|
85
86
|
```python
|
|
86
87
|
# In library_a/config.py
|
|
@@ -91,7 +92,18 @@ cachu.configure(key_prefix='lib_a:', redis_url='redis://redis-a:6379/0')
|
|
|
91
92
|
import cachu
|
|
92
93
|
cachu.configure(key_prefix='lib_b:', redis_url='redis://redis-b:6379/0')
|
|
93
94
|
|
|
94
|
-
# Each library
|
|
95
|
+
# Each library's @cache calls use its own configuration automatically
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
To override the automatic detection, specify the `package` parameter:
|
|
99
|
+
|
|
100
|
+
```python
|
|
101
|
+
from cachu import cache
|
|
102
|
+
|
|
103
|
+
# This function will use library_a's configuration
|
|
104
|
+
@cache(ttl=300, package='library_a')
|
|
105
|
+
def get_shared_data(id: int) -> dict:
|
|
106
|
+
return fetch(id)
|
|
95
107
|
```
|
|
96
108
|
|
|
97
109
|
Retrieve configuration:
|
|
@@ -107,7 +119,7 @@ all_configs = cachu.get_all_configs() # All configurations
|
|
|
107
119
|
### Basic Caching
|
|
108
120
|
|
|
109
121
|
```python
|
|
110
|
-
from cachu import
|
|
122
|
+
from cachu import cache
|
|
111
123
|
|
|
112
124
|
@cache(ttl=300, backend='memory')
|
|
113
125
|
def expensive_operation(param: str) -> dict:
|
|
@@ -138,6 +150,8 @@ def fetch_external_data(api_key: str) -> dict:
|
|
|
138
150
|
Tags organize cache entries into logical groups for selective clearing:
|
|
139
151
|
|
|
140
152
|
```python
|
|
153
|
+
from cachu import cache, cache_clear
|
|
154
|
+
|
|
141
155
|
@cache(ttl=300, tag='users')
|
|
142
156
|
def get_user(user_id: int) -> dict:
|
|
143
157
|
return fetch_user(user_id)
|
|
@@ -147,7 +161,7 @@ def get_product(product_id: int) -> dict:
|
|
|
147
161
|
return fetch_product(product_id)
|
|
148
162
|
|
|
149
163
|
# Clear only user caches
|
|
150
|
-
|
|
164
|
+
cache_clear(tag='users', backend='memory', ttl=300)
|
|
151
165
|
```
|
|
152
166
|
|
|
153
167
|
### Conditional Caching
|
|
@@ -214,12 +228,14 @@ result = get_data(123, _overwrite_cache=True)
|
|
|
214
228
|
Track hits and misses:
|
|
215
229
|
|
|
216
230
|
```python
|
|
231
|
+
from cachu import cache, cache_info
|
|
232
|
+
|
|
217
233
|
@cache(ttl=300)
|
|
218
234
|
def get_user(user_id: int) -> dict:
|
|
219
235
|
return fetch_user(user_id)
|
|
220
236
|
|
|
221
237
|
# After some usage
|
|
222
|
-
info =
|
|
238
|
+
info = cache_info(get_user)
|
|
223
239
|
print(f"Hits: {info.hits}, Misses: {info.misses}, Size: {info.currsize}")
|
|
224
240
|
```
|
|
225
241
|
|
|
@@ -267,7 +283,7 @@ cache_delete(get_user, user_id=123)
|
|
|
267
283
|
### Clearing Caches
|
|
268
284
|
|
|
269
285
|
```python
|
|
270
|
-
from cachu import
|
|
286
|
+
from cachu import cache_clear
|
|
271
287
|
|
|
272
288
|
# Clear specific region
|
|
273
289
|
cache_clear(backend='memory', ttl=300)
|
|
@@ -302,7 +318,8 @@ def get_data(id: int) -> dict:
|
|
|
302
318
|
return fetch(id)
|
|
303
319
|
|
|
304
320
|
# In tests/conftest.py
|
|
305
|
-
cachu
|
|
321
|
+
from cachu import cache_clear
|
|
322
|
+
cache_clear(backend='memory', ttl=300, package='myapp')
|
|
306
323
|
```
|
|
307
324
|
|
|
308
325
|
## Instance and Class Methods
|
|
@@ -7,13 +7,13 @@ Flexible caching library with support for memory, file, and Redis backends.
|
|
|
7
7
|
**Basic installation:**
|
|
8
8
|
|
|
9
9
|
```bash
|
|
10
|
-
pip install
|
|
10
|
+
pip install cachu
|
|
11
11
|
```
|
|
12
12
|
|
|
13
13
|
**With Redis support:**
|
|
14
14
|
|
|
15
15
|
```bash
|
|
16
|
-
pip install
|
|
16
|
+
pip install cachu[redis]
|
|
17
17
|
```
|
|
18
18
|
|
|
19
19
|
## Quick Start
|
|
@@ -62,7 +62,7 @@ cachu.configure(
|
|
|
62
62
|
|
|
63
63
|
### Package Isolation
|
|
64
64
|
|
|
65
|
-
Each package automatically gets isolated configuration
|
|
65
|
+
Each package automatically gets isolated configuration, preventing conflicts when multiple libraries use cachu:
|
|
66
66
|
|
|
67
67
|
```python
|
|
68
68
|
# In library_a/config.py
|
|
@@ -73,7 +73,18 @@ cachu.configure(key_prefix='lib_a:', redis_url='redis://redis-a:6379/0')
|
|
|
73
73
|
import cachu
|
|
74
74
|
cachu.configure(key_prefix='lib_b:', redis_url='redis://redis-b:6379/0')
|
|
75
75
|
|
|
76
|
-
# Each library
|
|
76
|
+
# Each library's @cache calls use its own configuration automatically
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
To override the automatic detection, specify the `package` parameter:
|
|
80
|
+
|
|
81
|
+
```python
|
|
82
|
+
from cachu import cache
|
|
83
|
+
|
|
84
|
+
# This function will use library_a's configuration
|
|
85
|
+
@cache(ttl=300, package='library_a')
|
|
86
|
+
def get_shared_data(id: int) -> dict:
|
|
87
|
+
return fetch(id)
|
|
77
88
|
```
|
|
78
89
|
|
|
79
90
|
Retrieve configuration:
|
|
@@ -89,7 +100,7 @@ all_configs = cachu.get_all_configs() # All configurations
|
|
|
89
100
|
### Basic Caching
|
|
90
101
|
|
|
91
102
|
```python
|
|
92
|
-
from cachu import
|
|
103
|
+
from cachu import cache
|
|
93
104
|
|
|
94
105
|
@cache(ttl=300, backend='memory')
|
|
95
106
|
def expensive_operation(param: str) -> dict:
|
|
@@ -120,6 +131,8 @@ def fetch_external_data(api_key: str) -> dict:
|
|
|
120
131
|
Tags organize cache entries into logical groups for selective clearing:
|
|
121
132
|
|
|
122
133
|
```python
|
|
134
|
+
from cachu import cache, cache_clear
|
|
135
|
+
|
|
123
136
|
@cache(ttl=300, tag='users')
|
|
124
137
|
def get_user(user_id: int) -> dict:
|
|
125
138
|
return fetch_user(user_id)
|
|
@@ -129,7 +142,7 @@ def get_product(product_id: int) -> dict:
|
|
|
129
142
|
return fetch_product(product_id)
|
|
130
143
|
|
|
131
144
|
# Clear only user caches
|
|
132
|
-
|
|
145
|
+
cache_clear(tag='users', backend='memory', ttl=300)
|
|
133
146
|
```
|
|
134
147
|
|
|
135
148
|
### Conditional Caching
|
|
@@ -196,12 +209,14 @@ result = get_data(123, _overwrite_cache=True)
|
|
|
196
209
|
Track hits and misses:
|
|
197
210
|
|
|
198
211
|
```python
|
|
212
|
+
from cachu import cache, cache_info
|
|
213
|
+
|
|
199
214
|
@cache(ttl=300)
|
|
200
215
|
def get_user(user_id: int) -> dict:
|
|
201
216
|
return fetch_user(user_id)
|
|
202
217
|
|
|
203
218
|
# After some usage
|
|
204
|
-
info =
|
|
219
|
+
info = cache_info(get_user)
|
|
205
220
|
print(f"Hits: {info.hits}, Misses: {info.misses}, Size: {info.currsize}")
|
|
206
221
|
```
|
|
207
222
|
|
|
@@ -249,7 +264,7 @@ cache_delete(get_user, user_id=123)
|
|
|
249
264
|
### Clearing Caches
|
|
250
265
|
|
|
251
266
|
```python
|
|
252
|
-
from cachu import
|
|
267
|
+
from cachu import cache_clear
|
|
253
268
|
|
|
254
269
|
# Clear specific region
|
|
255
270
|
cache_clear(backend='memory', ttl=300)
|
|
@@ -284,7 +299,8 @@ def get_data(id: int) -> dict:
|
|
|
284
299
|
return fetch(id)
|
|
285
300
|
|
|
286
301
|
# In tests/conftest.py
|
|
287
|
-
cachu
|
|
302
|
+
from cachu import cache_clear
|
|
303
|
+
cache_clear(backend='memory', ttl=300, package='myapp')
|
|
288
304
|
```
|
|
289
305
|
|
|
290
306
|
## Instance and Class Methods
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "cachu"
|
|
3
|
-
version = "0.1.
|
|
3
|
+
version = "0.1.3"
|
|
4
4
|
description = "Flexible caching library built on dogpile.cache"
|
|
5
5
|
readme = "README.md"
|
|
6
6
|
license = "0BSD"
|
|
@@ -16,6 +16,7 @@ redis = ["redis"]
|
|
|
16
16
|
test = [
|
|
17
17
|
"pytest",
|
|
18
18
|
"pytest-mock",
|
|
19
|
+
"redis",
|
|
19
20
|
"testcontainers[redis]",
|
|
20
21
|
]
|
|
21
22
|
|
|
@@ -115,7 +115,7 @@ class FileBackend(Backend):
|
|
|
115
115
|
|
|
116
116
|
with dbm.open(self._filepath, 'c') as db:
|
|
117
117
|
keys_to_delete = [
|
|
118
|
-
k for k in db
|
|
118
|
+
k for k in db.keys()
|
|
119
119
|
if fnmatch.fnmatch(k.decode(), pattern)
|
|
120
120
|
]
|
|
121
121
|
for key in keys_to_delete:
|
|
@@ -131,7 +131,7 @@ class FileBackend(Backend):
|
|
|
131
131
|
with self._lock:
|
|
132
132
|
try:
|
|
133
133
|
with dbm.open(self._filepath, 'c') as db:
|
|
134
|
-
all_keys = [k.decode() for k in db]
|
|
134
|
+
all_keys = [k.decode() for k in db.keys()]
|
|
135
135
|
except Exception:
|
|
136
136
|
return
|
|
137
137
|
|
|
@@ -220,12 +220,7 @@ def get_cache_info(fn: Callable[..., Any]) -> CacheInfo:
|
|
|
220
220
|
Returns
|
|
221
221
|
CacheInfo with hits, misses, and currsize
|
|
222
222
|
"""
|
|
223
|
-
|
|
224
|
-
actual_fn = fn
|
|
225
|
-
else:
|
|
226
|
-
actual_fn = fn
|
|
227
|
-
|
|
228
|
-
fn_id = id(actual_fn)
|
|
223
|
+
fn_id = id(fn)
|
|
229
224
|
|
|
230
225
|
with _stats_lock:
|
|
231
226
|
hits, misses = _stats.get(fn_id, (0, 0))
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
"""Cache CRUD operations.
|
|
2
2
|
"""
|
|
3
3
|
import logging
|
|
4
|
-
from typing import Any
|
|
5
4
|
from collections.abc import Callable
|
|
5
|
+
from typing import Any
|
|
6
6
|
|
|
7
7
|
from .backends import NO_VALUE
|
|
8
8
|
from .config import _get_caller_package, get_config
|
|
@@ -124,8 +124,6 @@ def cache_clear(
|
|
|
124
124
|
if package is None:
|
|
125
125
|
package = _get_caller_package()
|
|
126
126
|
|
|
127
|
-
cfg = get_config(package)
|
|
128
|
-
|
|
129
127
|
if backend is not None:
|
|
130
128
|
backends_to_clear = [backend]
|
|
131
129
|
else:
|
|
@@ -141,19 +139,29 @@ def cache_clear(
|
|
|
141
139
|
|
|
142
140
|
from .decorator import _backends, _backends_lock
|
|
143
141
|
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
142
|
+
# When both backend and ttl are specified, directly get/create and clear that backend.
|
|
143
|
+
# This is essential for distributed caches (Redis) where cache_clear may be called
|
|
144
|
+
# from a different process than the one that populated the cache.
|
|
145
|
+
if backend is not None and ttl is not None:
|
|
146
|
+
backend_instance = _get_backend(package, backend, ttl)
|
|
147
|
+
cleared = backend_instance.clear(pattern)
|
|
148
|
+
if cleared > 0:
|
|
149
|
+
total_cleared += cleared
|
|
150
|
+
logger.debug(f'Cleared {cleared} entries from {backend} backend (ttl={ttl})')
|
|
151
|
+
else:
|
|
152
|
+
with _backends_lock:
|
|
153
|
+
for (pkg, btype, bttl), backend_instance in list(_backends.items()):
|
|
154
|
+
if pkg != package:
|
|
155
|
+
continue
|
|
156
|
+
if btype not in backends_to_clear:
|
|
157
|
+
continue
|
|
158
|
+
if ttl is not None and bttl != ttl:
|
|
159
|
+
continue
|
|
160
|
+
|
|
161
|
+
cleared = backend_instance.clear(pattern)
|
|
162
|
+
if cleared > 0:
|
|
163
|
+
total_cleared += cleared
|
|
164
|
+
logger.debug(f'Cleared {cleared} entries from {btype} backend (ttl={bttl})')
|
|
157
165
|
|
|
158
166
|
return total_cleared
|
|
159
167
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: cachu
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.3
|
|
4
4
|
Summary: Flexible caching library built on dogpile.cache
|
|
5
5
|
Author: bissli
|
|
6
6
|
License-Expression: 0BSD
|
|
@@ -14,6 +14,7 @@ Requires-Dist: redis; extra == "redis"
|
|
|
14
14
|
Provides-Extra: test
|
|
15
15
|
Requires-Dist: pytest; extra == "test"
|
|
16
16
|
Requires-Dist: pytest-mock; extra == "test"
|
|
17
|
+
Requires-Dist: redis; extra == "test"
|
|
17
18
|
Requires-Dist: testcontainers[redis]; extra == "test"
|
|
18
19
|
|
|
19
20
|
# cachu
|
|
@@ -25,13 +26,13 @@ Flexible caching library with support for memory, file, and Redis backends.
|
|
|
25
26
|
**Basic installation:**
|
|
26
27
|
|
|
27
28
|
```bash
|
|
28
|
-
pip install
|
|
29
|
+
pip install cachu
|
|
29
30
|
```
|
|
30
31
|
|
|
31
32
|
**With Redis support:**
|
|
32
33
|
|
|
33
34
|
```bash
|
|
34
|
-
pip install
|
|
35
|
+
pip install cachu[redis]
|
|
35
36
|
```
|
|
36
37
|
|
|
37
38
|
## Quick Start
|
|
@@ -80,7 +81,7 @@ cachu.configure(
|
|
|
80
81
|
|
|
81
82
|
### Package Isolation
|
|
82
83
|
|
|
83
|
-
Each package automatically gets isolated configuration
|
|
84
|
+
Each package automatically gets isolated configuration, preventing conflicts when multiple libraries use cachu:
|
|
84
85
|
|
|
85
86
|
```python
|
|
86
87
|
# In library_a/config.py
|
|
@@ -91,7 +92,18 @@ cachu.configure(key_prefix='lib_a:', redis_url='redis://redis-a:6379/0')
|
|
|
91
92
|
import cachu
|
|
92
93
|
cachu.configure(key_prefix='lib_b:', redis_url='redis://redis-b:6379/0')
|
|
93
94
|
|
|
94
|
-
# Each library
|
|
95
|
+
# Each library's @cache calls use its own configuration automatically
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
To override the automatic detection, specify the `package` parameter:
|
|
99
|
+
|
|
100
|
+
```python
|
|
101
|
+
from cachu import cache
|
|
102
|
+
|
|
103
|
+
# This function will use library_a's configuration
|
|
104
|
+
@cache(ttl=300, package='library_a')
|
|
105
|
+
def get_shared_data(id: int) -> dict:
|
|
106
|
+
return fetch(id)
|
|
95
107
|
```
|
|
96
108
|
|
|
97
109
|
Retrieve configuration:
|
|
@@ -107,7 +119,7 @@ all_configs = cachu.get_all_configs() # All configurations
|
|
|
107
119
|
### Basic Caching
|
|
108
120
|
|
|
109
121
|
```python
|
|
110
|
-
from cachu import
|
|
122
|
+
from cachu import cache
|
|
111
123
|
|
|
112
124
|
@cache(ttl=300, backend='memory')
|
|
113
125
|
def expensive_operation(param: str) -> dict:
|
|
@@ -138,6 +150,8 @@ def fetch_external_data(api_key: str) -> dict:
|
|
|
138
150
|
Tags organize cache entries into logical groups for selective clearing:
|
|
139
151
|
|
|
140
152
|
```python
|
|
153
|
+
from cachu import cache, cache_clear
|
|
154
|
+
|
|
141
155
|
@cache(ttl=300, tag='users')
|
|
142
156
|
def get_user(user_id: int) -> dict:
|
|
143
157
|
return fetch_user(user_id)
|
|
@@ -147,7 +161,7 @@ def get_product(product_id: int) -> dict:
|
|
|
147
161
|
return fetch_product(product_id)
|
|
148
162
|
|
|
149
163
|
# Clear only user caches
|
|
150
|
-
|
|
164
|
+
cache_clear(tag='users', backend='memory', ttl=300)
|
|
151
165
|
```
|
|
152
166
|
|
|
153
167
|
### Conditional Caching
|
|
@@ -214,12 +228,14 @@ result = get_data(123, _overwrite_cache=True)
|
|
|
214
228
|
Track hits and misses:
|
|
215
229
|
|
|
216
230
|
```python
|
|
231
|
+
from cachu import cache, cache_info
|
|
232
|
+
|
|
217
233
|
@cache(ttl=300)
|
|
218
234
|
def get_user(user_id: int) -> dict:
|
|
219
235
|
return fetch_user(user_id)
|
|
220
236
|
|
|
221
237
|
# After some usage
|
|
222
|
-
info =
|
|
238
|
+
info = cache_info(get_user)
|
|
223
239
|
print(f"Hits: {info.hits}, Misses: {info.misses}, Size: {info.currsize}")
|
|
224
240
|
```
|
|
225
241
|
|
|
@@ -267,7 +283,7 @@ cache_delete(get_user, user_id=123)
|
|
|
267
283
|
### Clearing Caches
|
|
268
284
|
|
|
269
285
|
```python
|
|
270
|
-
from cachu import
|
|
286
|
+
from cachu import cache_clear
|
|
271
287
|
|
|
272
288
|
# Clear specific region
|
|
273
289
|
cache_clear(backend='memory', ttl=300)
|
|
@@ -302,7 +318,8 @@ def get_data(id: int) -> dict:
|
|
|
302
318
|
return fetch(id)
|
|
303
319
|
|
|
304
320
|
# In tests/conftest.py
|
|
305
|
-
cachu
|
|
321
|
+
from cachu import cache_clear
|
|
322
|
+
cache_clear(backend='memory', ttl=300, package='myapp')
|
|
306
323
|
```
|
|
307
324
|
|
|
308
325
|
## Instance and Class Methods
|
|
@@ -111,3 +111,64 @@ def test_cache_clear_redis_by_tag(redis_docker):
|
|
|
111
111
|
get_product(1)
|
|
112
112
|
|
|
113
113
|
cachu.cache_clear(tag='users', backend='redis', ttl=300)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def test_cache_clear_without_instantiated_backend():
|
|
117
|
+
"""Verify cache_clear creates backend when none exists.
|
|
118
|
+
|
|
119
|
+
This tests that cache_clear() properly creates a backend instance when
|
|
120
|
+
both backend and ttl are specified, even if no cached function has been called.
|
|
121
|
+
|
|
122
|
+
This is essential for distributed caches (Redis) where cache_clear may be called
|
|
123
|
+
from a different process than the one that populated the cache.
|
|
124
|
+
"""
|
|
125
|
+
from cachu.decorator import _backends, clear_backends
|
|
126
|
+
|
|
127
|
+
# Clear all backends to simulate a fresh process
|
|
128
|
+
clear_backends()
|
|
129
|
+
|
|
130
|
+
# Verify no backends exist (simulates import script that hasn't called cached functions)
|
|
131
|
+
assert len(_backends) == 0
|
|
132
|
+
|
|
133
|
+
# Call cache_clear with specific backend and ttl
|
|
134
|
+
# Before the fix, this would do nothing because no backend existed
|
|
135
|
+
# After the fix, this should create the backend and attempt to clear it
|
|
136
|
+
cachu.cache_clear(backend='memory', ttl=999, tag='test_tag')
|
|
137
|
+
|
|
138
|
+
# With the fix, a backend should have been created
|
|
139
|
+
assert len(_backends) == 1
|
|
140
|
+
key = list(_backends.keys())[0]
|
|
141
|
+
assert key[1] == 'memory' # backend type
|
|
142
|
+
assert key[2] == 999 # ttl
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def test_cache_clear_creates_backend_and_clears(temp_cache_dir):
|
|
146
|
+
"""Verify cache_clear can clear data in file backend without prior instantiation.
|
|
147
|
+
|
|
148
|
+
File backend persists data to disk, allowing us to verify that cache_clear
|
|
149
|
+
can find and delete cached data even when called from a 'fresh' process state.
|
|
150
|
+
"""
|
|
151
|
+
from cachu.backends import NO_VALUE
|
|
152
|
+
from cachu.config import _get_caller_package
|
|
153
|
+
from cachu.decorator import _backends, _get_backend, clear_backends
|
|
154
|
+
|
|
155
|
+
package = _get_caller_package()
|
|
156
|
+
|
|
157
|
+
# First, create some cached data in a file backend
|
|
158
|
+
backend = _get_backend(package, 'file', 888)
|
|
159
|
+
backend.set('14m:test_func||file_tag||x=1', 'test_value', 888)
|
|
160
|
+
assert backend.get('14m:test_func||file_tag||x=1') == 'test_value'
|
|
161
|
+
|
|
162
|
+
# Clear backend instances (but file data persists on disk)
|
|
163
|
+
clear_backends()
|
|
164
|
+
assert len(_backends) == 0
|
|
165
|
+
|
|
166
|
+
# cache_clear should create a new backend instance and clear the persisted data
|
|
167
|
+
cleared = cachu.cache_clear(backend='file', ttl=888, tag='file_tag')
|
|
168
|
+
|
|
169
|
+
# Backend should have been created
|
|
170
|
+
assert len(_backends) == 1
|
|
171
|
+
|
|
172
|
+
# Data should have been cleared (verify by getting a fresh backend)
|
|
173
|
+
backend = _get_backend(package, 'file', 888)
|
|
174
|
+
assert backend.get('14m:test_func||file_tag||x=1') is NO_VALUE
|
cachu-0.1.1/src/cachu/cache.py
DELETED
|
@@ -1,636 +0,0 @@
|
|
|
1
|
-
import dbm
|
|
2
|
-
import inspect
|
|
3
|
-
import logging
|
|
4
|
-
import os
|
|
5
|
-
import pathlib
|
|
6
|
-
import threading
|
|
7
|
-
from collections.abc import Callable
|
|
8
|
-
from functools import partial, wraps
|
|
9
|
-
from typing import Any
|
|
10
|
-
|
|
11
|
-
from dogpile.cache import CacheRegion, make_region
|
|
12
|
-
from dogpile.cache.backends.file import AbstractFileLock
|
|
13
|
-
from dogpile.cache.region import DefaultInvalidationStrategy
|
|
14
|
-
from dogpile.util.readwrite_lock import ReadWriteMutex
|
|
15
|
-
|
|
16
|
-
from .config import _get_caller_package, config, get_config, is_disabled
|
|
17
|
-
|
|
18
|
-
logger = logging.getLogger(__name__)
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
def _is_connection_like(obj: Any) -> bool:
|
|
22
|
-
"""Check if object appears to be a database connection.
|
|
23
|
-
"""
|
|
24
|
-
if hasattr(obj, 'driver_connection'):
|
|
25
|
-
return True
|
|
26
|
-
|
|
27
|
-
if hasattr(obj, 'dialect'):
|
|
28
|
-
return True
|
|
29
|
-
|
|
30
|
-
if hasattr(obj, 'engine'):
|
|
31
|
-
return True
|
|
32
|
-
|
|
33
|
-
obj_type = str(type(obj))
|
|
34
|
-
connection_indicators = ('Connection', 'Engine', 'psycopg', 'pyodbc', 'sqlite3')
|
|
35
|
-
|
|
36
|
-
return any(indicator in obj_type for indicator in connection_indicators)
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
def _normalize_namespace(namespace: str) -> str:
|
|
40
|
-
"""Normalize namespace to always be wrapped in pipes.
|
|
41
|
-
"""
|
|
42
|
-
if not namespace:
|
|
43
|
-
return ''
|
|
44
|
-
namespace = namespace.strip('|')
|
|
45
|
-
namespace = namespace.replace('|', '.')
|
|
46
|
-
return f'|{namespace}|'
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
def _create_namespace_filter(namespace: str) -> Callable[[str], bool]:
|
|
50
|
-
"""Create a filter function for namespace-based key matching.
|
|
51
|
-
"""
|
|
52
|
-
debug_prefix = config.debug_key
|
|
53
|
-
normalized_ns = _normalize_namespace(namespace)
|
|
54
|
-
namespace_pattern = f'|{normalized_ns}|'
|
|
55
|
-
|
|
56
|
-
def matches_namespace(key: str) -> bool:
|
|
57
|
-
if not key.startswith(debug_prefix):
|
|
58
|
-
return False
|
|
59
|
-
key_after_prefix = key[len(debug_prefix):]
|
|
60
|
-
return namespace_pattern in key_after_prefix
|
|
61
|
-
|
|
62
|
-
return matches_namespace
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
def key_generator(namespace: str, fn: Callable[..., Any], exclude_params: set[str] | None = None) -> Callable[..., str]:
|
|
66
|
-
"""Generate a cache key for the given namespace and function.
|
|
67
|
-
"""
|
|
68
|
-
exclude_params = exclude_params or set()
|
|
69
|
-
unwrapped_fn = getattr(fn, '__wrapped__', fn)
|
|
70
|
-
namespace = f'{unwrapped_fn.__name__}|{_normalize_namespace(namespace)}' if namespace else f'{unwrapped_fn.__name__}'
|
|
71
|
-
|
|
72
|
-
argspec = inspect.getfullargspec(unwrapped_fn)
|
|
73
|
-
_args_reversed = list(reversed(argspec.args or []))
|
|
74
|
-
_defaults_reversed = list(reversed(argspec.defaults or []))
|
|
75
|
-
args_with_defaults = { _args_reversed[i]: default for i, default in enumerate(_defaults_reversed)}
|
|
76
|
-
|
|
77
|
-
def generate_key(*args, **kwargs) -> str:
|
|
78
|
-
args, vargs = args[:len(argspec.args)], args[len(argspec.args):]
|
|
79
|
-
as_kwargs = dict(**args_with_defaults)
|
|
80
|
-
as_kwargs.update(dict(zip(argspec.args, args)))
|
|
81
|
-
as_kwargs.update({f'vararg{i+1}': varg for i, varg in enumerate(vargs)})
|
|
82
|
-
as_kwargs.update(**kwargs)
|
|
83
|
-
as_kwargs = {k: v for k, v in as_kwargs.items() if not _is_connection_like(v) and k not in {'self', 'cls'}}
|
|
84
|
-
as_kwargs = {k: v for k, v in as_kwargs.items() if not k.startswith('_') and k not in exclude_params}
|
|
85
|
-
as_str = ' '.join(f'{str(k)}={repr(v)}' for k, v in sorted(as_kwargs.items()))
|
|
86
|
-
return f'{namespace}|{as_str}'
|
|
87
|
-
|
|
88
|
-
return generate_key
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
def key_mangler_default(key: str) -> str:
|
|
92
|
-
"""Modify the key for debugging purposes by prefixing it with a debug marker.
|
|
93
|
-
"""
|
|
94
|
-
return f'{config.debug_key}{key}'
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
def key_mangler_region(key: str, region: str) -> str:
|
|
98
|
-
"""Modify the key for a specific region for debugging purposes.
|
|
99
|
-
"""
|
|
100
|
-
return f'{region}:{config.debug_key}{key}'
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
def _make_key_mangler(debug_key: str) -> Callable[[str], str]:
|
|
104
|
-
"""Create a key mangler with a captured debug_key.
|
|
105
|
-
"""
|
|
106
|
-
def mangler(key: str) -> str:
|
|
107
|
-
return f'{debug_key}{key}'
|
|
108
|
-
return mangler
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
def _make_region_key_mangler(debug_key: str, region_name: str) -> Callable[[str], str]:
|
|
112
|
-
"""Create a region key mangler with captured debug_key and region name.
|
|
113
|
-
"""
|
|
114
|
-
def mangler(key: str) -> str:
|
|
115
|
-
return f'{region_name}:{debug_key}{key}'
|
|
116
|
-
return mangler
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
def should_cache_fn(value: Any) -> bool:
|
|
120
|
-
"""Determine if the given value should be cached.
|
|
121
|
-
"""
|
|
122
|
-
return bool(value)
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
def _seconds_to_region_name(seconds: int) -> str:
|
|
126
|
-
"""Convert seconds to a human-readable region name.
|
|
127
|
-
"""
|
|
128
|
-
if seconds < 60:
|
|
129
|
-
return f'{seconds}s'
|
|
130
|
-
elif seconds < 3600:
|
|
131
|
-
return f'{seconds // 60}m'
|
|
132
|
-
elif seconds < 86400:
|
|
133
|
-
return f'{seconds // 3600}h'
|
|
134
|
-
else:
|
|
135
|
-
return f'{seconds // 86400}d'
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
def get_redis_client(namespace: str | None = None) -> Any:
|
|
139
|
-
"""Create a Redis client directly from config.
|
|
140
|
-
"""
|
|
141
|
-
try:
|
|
142
|
-
import redis
|
|
143
|
-
except ImportError as e:
|
|
144
|
-
raise RuntimeError(
|
|
145
|
-
"Redis support requires the 'redis' package. Install with: pip install redis"
|
|
146
|
-
) from e
|
|
147
|
-
if namespace is None:
|
|
148
|
-
namespace = _get_caller_package()
|
|
149
|
-
cfg = get_config(namespace)
|
|
150
|
-
connection_kwargs = {}
|
|
151
|
-
if cfg.redis_ssl:
|
|
152
|
-
connection_kwargs['ssl'] = True
|
|
153
|
-
return redis.Redis(
|
|
154
|
-
host=cfg.redis_host,
|
|
155
|
-
port=cfg.redis_port,
|
|
156
|
-
db=cfg.redis_db,
|
|
157
|
-
**connection_kwargs
|
|
158
|
-
)
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
class CacheRegionWrapper:
|
|
162
|
-
"""Wrapper for CacheRegion that adds exclude_params support.
|
|
163
|
-
"""
|
|
164
|
-
|
|
165
|
-
def __init__(self, region: CacheRegion) -> None:
|
|
166
|
-
self._region = region
|
|
167
|
-
self._original_cache_on_arguments = region.cache_on_arguments
|
|
168
|
-
|
|
169
|
-
def cache_on_arguments(
|
|
170
|
-
self,
|
|
171
|
-
namespace: str = '',
|
|
172
|
-
should_cache_fn: Callable[[Any], bool] = should_cache_fn,
|
|
173
|
-
exclude_params: set[str] | None = None,
|
|
174
|
-
**kwargs) -> Callable:
|
|
175
|
-
"""Cache function results based on arguments with optional parameter exclusion.
|
|
176
|
-
"""
|
|
177
|
-
if exclude_params:
|
|
178
|
-
custom_key_gen = partial(key_generator, exclude_params=exclude_params)
|
|
179
|
-
cache_decorator = self._original_cache_on_arguments(
|
|
180
|
-
namespace=namespace,
|
|
181
|
-
should_cache_fn=should_cache_fn,
|
|
182
|
-
function_key_generator=custom_key_gen,
|
|
183
|
-
**kwargs
|
|
184
|
-
)
|
|
185
|
-
else:
|
|
186
|
-
cache_decorator = self._original_cache_on_arguments(
|
|
187
|
-
namespace=namespace,
|
|
188
|
-
should_cache_fn=should_cache_fn,
|
|
189
|
-
**kwargs
|
|
190
|
-
)
|
|
191
|
-
|
|
192
|
-
def decorator(fn: Callable) -> Callable:
|
|
193
|
-
cached_fn = cache_decorator(fn)
|
|
194
|
-
|
|
195
|
-
@wraps(fn)
|
|
196
|
-
def wrapper(*args, **kw):
|
|
197
|
-
if is_disabled():
|
|
198
|
-
return fn(*args, **kw)
|
|
199
|
-
return cached_fn(*args, **kw)
|
|
200
|
-
return wrapper
|
|
201
|
-
return decorator
|
|
202
|
-
|
|
203
|
-
def __getattr__(self, name: str) -> Any:
|
|
204
|
-
"""Delegate all other attributes to the wrapped region.
|
|
205
|
-
"""
|
|
206
|
-
return getattr(self._region, name)
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
def _wrap_cache_on_arguments(region: CacheRegion) -> CacheRegionWrapper:
|
|
210
|
-
"""Wrap CacheRegion to add exclude_params support with proper IDE typing.
|
|
211
|
-
"""
|
|
212
|
-
return CacheRegionWrapper(region)
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
class CustomFileLock(AbstractFileLock):
|
|
216
|
-
"""Implementation of a file lock using a read-write mutex.
|
|
217
|
-
"""
|
|
218
|
-
|
|
219
|
-
def __init__(self, filename: str) -> None:
|
|
220
|
-
self.mutex = ReadWriteMutex()
|
|
221
|
-
|
|
222
|
-
def acquire_read_lock(self, wait: bool) -> bool:
|
|
223
|
-
"""Acquire the read lock.
|
|
224
|
-
"""
|
|
225
|
-
ret = self.mutex.acquire_read_lock(wait)
|
|
226
|
-
return wait or ret
|
|
227
|
-
|
|
228
|
-
def acquire_write_lock(self, wait: bool) -> bool:
|
|
229
|
-
"""Acquire the write lock.
|
|
230
|
-
"""
|
|
231
|
-
ret = self.mutex.acquire_write_lock(wait)
|
|
232
|
-
return wait or ret
|
|
233
|
-
|
|
234
|
-
def release_read_lock(self) -> bool:
|
|
235
|
-
"""Release the read lock.
|
|
236
|
-
"""
|
|
237
|
-
return self.mutex.release_read_lock()
|
|
238
|
-
|
|
239
|
-
def release_write_lock(self) -> bool:
|
|
240
|
-
"""Release the write lock.
|
|
241
|
-
"""
|
|
242
|
-
return self.mutex.release_write_lock()
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
class RedisInvalidator(DefaultInvalidationStrategy):
|
|
246
|
-
"""Redis invalidation strategy with optional key deletion.
|
|
247
|
-
"""
|
|
248
|
-
|
|
249
|
-
def __init__(self, region: CacheRegion, delete_keys: bool = False) -> None:
|
|
250
|
-
"""Initialize the RedisInvalidator for a given CacheRegion.
|
|
251
|
-
"""
|
|
252
|
-
self.region = region
|
|
253
|
-
self.delete_keys = delete_keys
|
|
254
|
-
super().__init__()
|
|
255
|
-
|
|
256
|
-
def invalidate(self, hard: bool = True) -> None:
|
|
257
|
-
"""Invalidate the cache region using timestamp-based invalidation.
|
|
258
|
-
"""
|
|
259
|
-
super().invalidate(hard)
|
|
260
|
-
if self.delete_keys:
|
|
261
|
-
self._delete_backend_keys()
|
|
262
|
-
|
|
263
|
-
def _delete_backend_keys(self) -> None:
|
|
264
|
-
"""Delete keys from Redis backend for this region.
|
|
265
|
-
"""
|
|
266
|
-
try:
|
|
267
|
-
client = self.region.backend.writer_client
|
|
268
|
-
region_prefix = f'{self.region.name}:'
|
|
269
|
-
deleted_count = 0
|
|
270
|
-
for key in client.scan_iter(match=f'{region_prefix}*'):
|
|
271
|
-
client.delete(key)
|
|
272
|
-
deleted_count += 1
|
|
273
|
-
logger.debug(f'Deleted {deleted_count} Redis keys for region "{self.region.name}"')
|
|
274
|
-
except Exception as e:
|
|
275
|
-
logger.warning(f'Failed to delete Redis keys for region "{self.region.name}": {e}')
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
def _handle_all_regions(regions_dict: dict[tuple[str | None, int], CacheRegionWrapper], log_level: str = 'warning') -> Callable:
|
|
279
|
-
"""Decorator to handle clearing all cache regions when seconds=None.
|
|
280
|
-
"""
|
|
281
|
-
def decorator(func: Callable) -> Callable:
|
|
282
|
-
@wraps(func)
|
|
283
|
-
def wrapper(
|
|
284
|
-
seconds: int | None = None,
|
|
285
|
-
namespace: str | None = None,
|
|
286
|
-
*,
|
|
287
|
-
package: str | None = None,
|
|
288
|
-
) -> None:
|
|
289
|
-
resolved_ns = package if package is not None else _get_caller_package()
|
|
290
|
-
if seconds is None:
|
|
291
|
-
regions_to_clear = [
|
|
292
|
-
(ns, secs) for (ns, secs) in regions_dict
|
|
293
|
-
if ns == resolved_ns
|
|
294
|
-
]
|
|
295
|
-
if not regions_to_clear:
|
|
296
|
-
log_func = getattr(logger, log_level)
|
|
297
|
-
cache_type = func.__name__.replace('clear_', '').replace('cache', ' cache')
|
|
298
|
-
log_func(f'No{cache_type} regions exist for namespace "{resolved_ns}"')
|
|
299
|
-
return
|
|
300
|
-
for _, region_seconds in regions_to_clear:
|
|
301
|
-
func(region_seconds, namespace, _resolved_namespace=resolved_ns)
|
|
302
|
-
return
|
|
303
|
-
return func(seconds, namespace, _resolved_namespace=resolved_ns)
|
|
304
|
-
return wrapper
|
|
305
|
-
return decorator
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
_region_lock = threading.Lock()
|
|
309
|
-
_memory_cache_regions: dict[tuple[str | None, int], CacheRegionWrapper] = {}
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
def memorycache(seconds: int, *, package: str | None = None) -> CacheRegionWrapper:
|
|
313
|
-
"""Create or retrieve a memory cache region with a specified expiration time.
|
|
314
|
-
"""
|
|
315
|
-
with _region_lock:
|
|
316
|
-
namespace = package if package is not None else _get_caller_package()
|
|
317
|
-
cfg = get_config(namespace)
|
|
318
|
-
key = (namespace, seconds)
|
|
319
|
-
|
|
320
|
-
if key not in _memory_cache_regions:
|
|
321
|
-
region = make_region(
|
|
322
|
-
function_key_generator=key_generator,
|
|
323
|
-
key_mangler=_make_key_mangler(cfg.debug_key),
|
|
324
|
-
).configure(
|
|
325
|
-
cfg.memory,
|
|
326
|
-
expiration_time=seconds,
|
|
327
|
-
)
|
|
328
|
-
_memory_cache_regions[key] = _wrap_cache_on_arguments(region)
|
|
329
|
-
logger.debug(f"Created memory cache region for namespace '{namespace}', {seconds}s TTL")
|
|
330
|
-
return _memory_cache_regions[key]
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
_file_cache_regions: dict[tuple[str | None, int], CacheRegionWrapper] = {}
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
def filecache(seconds: int, *, package: str | None = None) -> CacheRegionWrapper:
|
|
337
|
-
"""Create or retrieve a file cache region with a specified expiration time.
|
|
338
|
-
"""
|
|
339
|
-
with _region_lock:
|
|
340
|
-
namespace = package if package is not None else _get_caller_package()
|
|
341
|
-
cfg = get_config(namespace)
|
|
342
|
-
key = (namespace, seconds)
|
|
343
|
-
|
|
344
|
-
if seconds < 60:
|
|
345
|
-
filename = f'cache{seconds}sec'
|
|
346
|
-
elif seconds < 3600:
|
|
347
|
-
filename = f'cache{seconds // 60}min'
|
|
348
|
-
else:
|
|
349
|
-
filename = f'cache{seconds // 3600}hour'
|
|
350
|
-
|
|
351
|
-
if namespace:
|
|
352
|
-
filename = f'{namespace}_{filename}'
|
|
353
|
-
|
|
354
|
-
if key not in _file_cache_regions:
|
|
355
|
-
if cfg.file == 'dogpile.cache.null':
|
|
356
|
-
logger.debug(
|
|
357
|
-
f"filecache() called from '{namespace}' with null backend - "
|
|
358
|
-
f"caching disabled for this region."
|
|
359
|
-
)
|
|
360
|
-
name = _seconds_to_region_name(seconds)
|
|
361
|
-
region = make_region(name=name, function_key_generator=key_generator,
|
|
362
|
-
key_mangler=_make_key_mangler(cfg.debug_key))
|
|
363
|
-
region.configure('dogpile.cache.null')
|
|
364
|
-
else:
|
|
365
|
-
region = make_region(
|
|
366
|
-
function_key_generator=key_generator,
|
|
367
|
-
key_mangler=_make_key_mangler(cfg.debug_key),
|
|
368
|
-
).configure(
|
|
369
|
-
cfg.file,
|
|
370
|
-
expiration_time=seconds,
|
|
371
|
-
arguments={
|
|
372
|
-
'filename': os.path.join(cfg.tmpdir, filename),
|
|
373
|
-
'lock_factory': CustomFileLock
|
|
374
|
-
}
|
|
375
|
-
)
|
|
376
|
-
logger.debug(f"Created file cache region for namespace '{namespace}', {seconds}s TTL")
|
|
377
|
-
_file_cache_regions[key] = _wrap_cache_on_arguments(region)
|
|
378
|
-
return _file_cache_regions[key]
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
_redis_cache_regions: dict[tuple[str | None, int], CacheRegionWrapper] = {}
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
def rediscache(seconds: int, *, package: str | None = None) -> CacheRegionWrapper:
|
|
385
|
-
"""Create or retrieve a Redis cache region with a specified expiration time.
|
|
386
|
-
"""
|
|
387
|
-
with _region_lock:
|
|
388
|
-
namespace = package if package is not None else _get_caller_package()
|
|
389
|
-
cfg = get_config(namespace)
|
|
390
|
-
key = (namespace, seconds)
|
|
391
|
-
|
|
392
|
-
if key not in _redis_cache_regions:
|
|
393
|
-
name = _seconds_to_region_name(seconds)
|
|
394
|
-
region = make_region(name=name, function_key_generator=key_generator,
|
|
395
|
-
key_mangler=_make_region_key_mangler(cfg.debug_key, name))
|
|
396
|
-
|
|
397
|
-
if cfg.redis == 'dogpile.cache.null':
|
|
398
|
-
logger.debug(
|
|
399
|
-
f"rediscache() called from '{namespace}' with null backend - "
|
|
400
|
-
f"caching disabled for this region."
|
|
401
|
-
)
|
|
402
|
-
region.configure('dogpile.cache.null')
|
|
403
|
-
else:
|
|
404
|
-
connection_kwargs = {}
|
|
405
|
-
if cfg.redis_ssl:
|
|
406
|
-
connection_kwargs['ssl'] = True
|
|
407
|
-
|
|
408
|
-
region.configure(
|
|
409
|
-
cfg.redis,
|
|
410
|
-
arguments={
|
|
411
|
-
'host': cfg.redis_host,
|
|
412
|
-
'port': cfg.redis_port,
|
|
413
|
-
'db': cfg.redis_db,
|
|
414
|
-
'redis_expiration_time': seconds,
|
|
415
|
-
'distributed_lock': cfg.redis_distributed,
|
|
416
|
-
'thread_local_lock': not cfg.redis_distributed,
|
|
417
|
-
'connection_kwargs': connection_kwargs,
|
|
418
|
-
},
|
|
419
|
-
region_invalidator=RedisInvalidator(region)
|
|
420
|
-
)
|
|
421
|
-
logger.debug(f"Created redis cache region for namespace '{namespace}', {seconds}s TTL")
|
|
422
|
-
_redis_cache_regions[key] = _wrap_cache_on_arguments(region)
|
|
423
|
-
return _redis_cache_regions[key]
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
@_handle_all_regions(_memory_cache_regions)
|
|
427
|
-
def clear_memorycache(
|
|
428
|
-
seconds: int | None = None,
|
|
429
|
-
namespace: str | None = None,
|
|
430
|
-
*,
|
|
431
|
-
_resolved_namespace: str | None = None,
|
|
432
|
-
) -> None:
|
|
433
|
-
"""Clear a memory cache region.
|
|
434
|
-
"""
|
|
435
|
-
pkg = _resolved_namespace if _resolved_namespace is not None else _get_caller_package()
|
|
436
|
-
region_key = (pkg, seconds)
|
|
437
|
-
|
|
438
|
-
if region_key not in _memory_cache_regions:
|
|
439
|
-
logger.warning(f'No memory cache region exists for namespace "{pkg}", {seconds} seconds')
|
|
440
|
-
return
|
|
441
|
-
|
|
442
|
-
cache_dict = _memory_cache_regions[region_key].actual_backend._cache
|
|
443
|
-
|
|
444
|
-
if namespace is None:
|
|
445
|
-
cache_dict.clear()
|
|
446
|
-
logger.debug(f'Cleared all memory cache keys for namespace "{pkg}", {seconds} second region')
|
|
447
|
-
else:
|
|
448
|
-
matches_namespace = _create_namespace_filter(namespace)
|
|
449
|
-
keys_to_delete = [key for key in list(cache_dict.keys()) if matches_namespace(key)]
|
|
450
|
-
for key in keys_to_delete:
|
|
451
|
-
del cache_dict[key]
|
|
452
|
-
logger.debug(f'Cleared {len(keys_to_delete)} memory cache keys for namespace "{namespace}"')
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
@_handle_all_regions(_file_cache_regions)
|
|
456
|
-
def clear_filecache(
|
|
457
|
-
seconds: int | None = None,
|
|
458
|
-
namespace: str | None = None,
|
|
459
|
-
*,
|
|
460
|
-
_resolved_namespace: str | None = None,
|
|
461
|
-
) -> None:
|
|
462
|
-
"""Clear a file cache region.
|
|
463
|
-
"""
|
|
464
|
-
pkg = _resolved_namespace if _resolved_namespace is not None else _get_caller_package()
|
|
465
|
-
cfg = get_config(pkg)
|
|
466
|
-
region_key = (pkg, seconds)
|
|
467
|
-
|
|
468
|
-
if region_key not in _file_cache_regions:
|
|
469
|
-
logger.warning(f'No file cache region exists for namespace "{pkg}", {seconds} seconds')
|
|
470
|
-
return
|
|
471
|
-
|
|
472
|
-
filename = _file_cache_regions[region_key].actual_backend.filename
|
|
473
|
-
basename = pathlib.Path(filename).name
|
|
474
|
-
filepath = os.path.join(cfg.tmpdir, basename)
|
|
475
|
-
|
|
476
|
-
if namespace is None:
|
|
477
|
-
with dbm.open(filepath, 'n'):
|
|
478
|
-
pass
|
|
479
|
-
logger.debug(f'Cleared all file cache keys for namespace "{pkg}", {seconds} second region')
|
|
480
|
-
else:
|
|
481
|
-
matches_namespace = _create_namespace_filter(namespace)
|
|
482
|
-
with dbm.open(filepath, 'w') as db:
|
|
483
|
-
keys_to_delete = [
|
|
484
|
-
key for key in list(db.keys())
|
|
485
|
-
if matches_namespace(key.decode())
|
|
486
|
-
]
|
|
487
|
-
for key in keys_to_delete:
|
|
488
|
-
del db[key]
|
|
489
|
-
logger.debug(f'Cleared {len(keys_to_delete)} file cache keys for namespace "{namespace}"')
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
@_handle_all_regions(_redis_cache_regions)
|
|
493
|
-
def clear_rediscache(
|
|
494
|
-
seconds: int | None = None,
|
|
495
|
-
namespace: str | None = None,
|
|
496
|
-
*,
|
|
497
|
-
_resolved_namespace: str | None = None,
|
|
498
|
-
) -> None:
|
|
499
|
-
"""Clear a redis cache region.
|
|
500
|
-
"""
|
|
501
|
-
pkg = _resolved_namespace if _resolved_namespace is not None else _get_caller_package()
|
|
502
|
-
cfg = get_config(pkg)
|
|
503
|
-
client = get_redis_client(pkg)
|
|
504
|
-
|
|
505
|
-
try:
|
|
506
|
-
region_name = _seconds_to_region_name(seconds)
|
|
507
|
-
region_prefix = f'{region_name}:{cfg.debug_key}'
|
|
508
|
-
deleted_count = 0
|
|
509
|
-
|
|
510
|
-
if namespace is None:
|
|
511
|
-
for key in client.scan_iter(match=f'{region_prefix}*'):
|
|
512
|
-
client.delete(key)
|
|
513
|
-
deleted_count += 1
|
|
514
|
-
logger.debug(f'Cleared {deleted_count} Redis keys for region "{region_name}"')
|
|
515
|
-
else:
|
|
516
|
-
matches_namespace = _create_namespace_filter(namespace)
|
|
517
|
-
for key in client.scan_iter(match=f'{region_prefix}*'):
|
|
518
|
-
key_str = key.decode()
|
|
519
|
-
key_without_region = key_str[len(region_name) + 1:]
|
|
520
|
-
if matches_namespace(key_without_region):
|
|
521
|
-
client.delete(key)
|
|
522
|
-
deleted_count += 1
|
|
523
|
-
logger.debug(f'Cleared {deleted_count} Redis keys for namespace "{namespace}" in region "{region_name}"')
|
|
524
|
-
finally:
|
|
525
|
-
client.close()
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
def set_memorycache_key(seconds: int, namespace: str, fn: Callable[..., Any], value: Any, **kwargs) -> None:
|
|
529
|
-
"""Set a specific cached entry in memory cache.
|
|
530
|
-
"""
|
|
531
|
-
region = memorycache(seconds)
|
|
532
|
-
cache_key = key_generator(namespace, fn)(**kwargs)
|
|
533
|
-
region.set(cache_key, value)
|
|
534
|
-
logger.debug(f'Set memory cache key for {fn.__name__} in namespace "{namespace}"')
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
def delete_memorycache_key(seconds: int, namespace: str, fn: Callable[..., Any], **kwargs) -> None:
|
|
538
|
-
"""Delete a specific cached entry from memory cache.
|
|
539
|
-
"""
|
|
540
|
-
region = memorycache(seconds)
|
|
541
|
-
cache_key = key_generator(namespace, fn)(**kwargs)
|
|
542
|
-
region.delete(cache_key)
|
|
543
|
-
logger.debug(f'Deleted memory cache key for {fn.__name__} in namespace "{namespace}"')
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
def set_filecache_key(seconds: int, namespace: str, fn: Callable[..., Any], value: Any, **kwargs) -> None:
|
|
547
|
-
"""Set a specific cached entry in file cache.
|
|
548
|
-
"""
|
|
549
|
-
region = filecache(seconds)
|
|
550
|
-
cache_key = key_generator(namespace, fn)(**kwargs)
|
|
551
|
-
region.set(cache_key, value)
|
|
552
|
-
logger.debug(f'Set file cache key for {fn.__name__} in namespace "{namespace}"')
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
def delete_filecache_key(seconds: int, namespace: str, fn: Callable[..., Any], **kwargs) -> None:
|
|
556
|
-
"""Delete a specific cached entry from file cache.
|
|
557
|
-
"""
|
|
558
|
-
region = filecache(seconds)
|
|
559
|
-
cache_key = key_generator(namespace, fn)(**kwargs)
|
|
560
|
-
region.delete(cache_key)
|
|
561
|
-
logger.debug(f'Deleted file cache key for {fn.__name__} in namespace "{namespace}"')
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
def set_rediscache_key(seconds: int, namespace: str, fn: Callable[..., Any], value: Any, **kwargs) -> None:
|
|
565
|
-
"""Set a specific cached entry in redis cache.
|
|
566
|
-
"""
|
|
567
|
-
region = rediscache(seconds)
|
|
568
|
-
cache_key = key_generator(namespace, fn)(**kwargs)
|
|
569
|
-
region.set(cache_key, value)
|
|
570
|
-
logger.debug(f'Set redis cache key for {fn.__name__} in namespace "{namespace}"')
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
def delete_rediscache_key(seconds: int, namespace: str, fn: Callable[..., Any], **kwargs) -> None:
|
|
574
|
-
"""Delete a specific cached entry from redis cache.
|
|
575
|
-
"""
|
|
576
|
-
region = rediscache(seconds)
|
|
577
|
-
cache_key = key_generator(namespace, fn)(**kwargs)
|
|
578
|
-
region.delete(cache_key)
|
|
579
|
-
logger.debug(f'Deleted redis cache key for {fn.__name__} in namespace "{namespace}"')
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
_BACKEND_MAP = {
|
|
583
|
-
'memory': (memorycache, clear_memorycache, set_memorycache_key, delete_memorycache_key),
|
|
584
|
-
'redis': (rediscache, clear_rediscache, set_rediscache_key, delete_rediscache_key),
|
|
585
|
-
'file': (filecache, clear_filecache, set_filecache_key, delete_filecache_key),
|
|
586
|
-
}
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
def defaultcache(seconds: int) -> CacheRegionWrapper:
|
|
590
|
-
"""Return cache region based on configured default backend.
|
|
591
|
-
"""
|
|
592
|
-
backend = config.default_backend
|
|
593
|
-
if backend not in _BACKEND_MAP:
|
|
594
|
-
raise ValueError(f'Unknown default_backend: {backend}. Must be one of: {list(_BACKEND_MAP.keys())}')
|
|
595
|
-
return _BACKEND_MAP[backend][0](seconds)
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
def clear_defaultcache(seconds: int | None = None, namespace: str | None = None) -> None:
|
|
599
|
-
"""Clear the default cache region.
|
|
600
|
-
"""
|
|
601
|
-
return _BACKEND_MAP[config.default_backend][1](seconds, namespace)
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
def set_defaultcache_key(seconds: int, namespace: str, fn: Callable[..., Any],
|
|
605
|
-
value: Any, **kwargs) -> None:
|
|
606
|
-
"""Set a specific cached entry in default cache.
|
|
607
|
-
"""
|
|
608
|
-
return _BACKEND_MAP[config.default_backend][2](seconds, namespace, fn, value, **kwargs)
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
def delete_defaultcache_key(seconds: int, namespace: str,
|
|
612
|
-
fn: Callable[..., Any], **kwargs) -> None:
|
|
613
|
-
"""Delete a specific cached entry from default cache.
|
|
614
|
-
"""
|
|
615
|
-
return _BACKEND_MAP[config.default_backend][3](seconds, namespace, fn, **kwargs)
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
def clear_cache_for_namespace(
|
|
619
|
-
namespace: str,
|
|
620
|
-
backend: str | None = None,
|
|
621
|
-
seconds: int | None = None,
|
|
622
|
-
) -> None:
|
|
623
|
-
"""Clear cache regions for a specific namespace (cross-module safe).
|
|
624
|
-
"""
|
|
625
|
-
backends = [backend] if backend else ['memory', 'file', 'redis']
|
|
626
|
-
for b in backends:
|
|
627
|
-
if b == 'memory':
|
|
628
|
-
clear_memorycache(seconds=seconds, package=namespace)
|
|
629
|
-
elif b == 'file':
|
|
630
|
-
clear_filecache(seconds=seconds, package=namespace)
|
|
631
|
-
elif b == 'redis':
|
|
632
|
-
clear_rediscache(seconds=seconds, package=namespace)
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
if __name__ == '__main__':
|
|
636
|
-
__import__('doctest').testmod(optionflags=4 | 8 | 32)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|