cachu 0.1.1__tar.gz → 0.1.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. {cachu-0.1.1 → cachu-0.1.2}/PKG-INFO +14 -8
  2. {cachu-0.1.1 → cachu-0.1.2}/README.md +12 -7
  3. {cachu-0.1.1 → cachu-0.1.2}/pyproject.toml +2 -1
  4. {cachu-0.1.1 → cachu-0.1.2}/setup.cfg +1 -1
  5. {cachu-0.1.1 → cachu-0.1.2}/src/cachu/__init__.py +1 -1
  6. {cachu-0.1.1 → cachu-0.1.2}/src/cachu.egg-info/PKG-INFO +14 -8
  7. {cachu-0.1.1 → cachu-0.1.2}/src/cachu.egg-info/SOURCES.txt +0 -1
  8. {cachu-0.1.1 → cachu-0.1.2}/src/cachu.egg-info/requires.txt +1 -0
  9. cachu-0.1.1/src/cachu/cache.py +0 -636
  10. {cachu-0.1.1 → cachu-0.1.2}/src/cachu/backends/__init__.py +0 -0
  11. {cachu-0.1.1 → cachu-0.1.2}/src/cachu/backends/file.py +0 -0
  12. {cachu-0.1.1 → cachu-0.1.2}/src/cachu/backends/memory.py +0 -0
  13. {cachu-0.1.1 → cachu-0.1.2}/src/cachu/backends/redis.py +0 -0
  14. {cachu-0.1.1 → cachu-0.1.2}/src/cachu/config.py +0 -0
  15. {cachu-0.1.1 → cachu-0.1.2}/src/cachu/decorator.py +0 -0
  16. {cachu-0.1.1 → cachu-0.1.2}/src/cachu/keys.py +0 -0
  17. {cachu-0.1.1 → cachu-0.1.2}/src/cachu/operations.py +0 -0
  18. {cachu-0.1.1 → cachu-0.1.2}/src/cachu/types.py +0 -0
  19. {cachu-0.1.1 → cachu-0.1.2}/src/cachu.egg-info/dependency_links.txt +0 -0
  20. {cachu-0.1.1 → cachu-0.1.2}/src/cachu.egg-info/top_level.txt +0 -0
  21. {cachu-0.1.1 → cachu-0.1.2}/tests/test_clearing.py +0 -0
  22. {cachu-0.1.1 → cachu-0.1.2}/tests/test_config.py +0 -0
  23. {cachu-0.1.1 → cachu-0.1.2}/tests/test_defaultcache.py +0 -0
  24. {cachu-0.1.1 → cachu-0.1.2}/tests/test_delete_keys.py +0 -0
  25. {cachu-0.1.1 → cachu-0.1.2}/tests/test_disable.py +0 -0
  26. {cachu-0.1.1 → cachu-0.1.2}/tests/test_exclude_params.py +0 -0
  27. {cachu-0.1.1 → cachu-0.1.2}/tests/test_file_cache.py +0 -0
  28. {cachu-0.1.1 → cachu-0.1.2}/tests/test_integration.py +0 -0
  29. {cachu-0.1.1 → cachu-0.1.2}/tests/test_memory_cache.py +0 -0
  30. {cachu-0.1.1 → cachu-0.1.2}/tests/test_namespace.py +0 -0
  31. {cachu-0.1.1 → cachu-0.1.2}/tests/test_namespace_isolation.py +0 -0
  32. {cachu-0.1.1 → cachu-0.1.2}/tests/test_redis_cache.py +0 -0
  33. {cachu-0.1.1 → cachu-0.1.2}/tests/test_set_keys.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cachu
3
- Version: 0.1.1
3
+ Version: 0.1.2
4
4
  Summary: Flexible caching library built on dogpile.cache
5
5
  Author: bissli
6
6
  License-Expression: 0BSD
@@ -14,6 +14,7 @@ Requires-Dist: redis; extra == "redis"
14
14
  Provides-Extra: test
15
15
  Requires-Dist: pytest; extra == "test"
16
16
  Requires-Dist: pytest-mock; extra == "test"
17
+ Requires-Dist: redis; extra == "test"
17
18
  Requires-Dist: testcontainers[redis]; extra == "test"
18
19
 
19
20
  # cachu
@@ -25,13 +26,13 @@ Flexible caching library with support for memory, file, and Redis backends.
25
26
  **Basic installation:**
26
27
 
27
28
  ```bash
28
- pip install git+https://github.com/bissli/cachu.git
29
+ pip install cachu
29
30
  ```
30
31
 
31
32
  **With Redis support:**
32
33
 
33
34
  ```bash
34
- pip install git+https://github.com/bissli/cachu.git#egg=cachu[redis]
35
+ pip install cachu[redis]
35
36
  ```
36
37
 
37
38
  ## Quick Start
@@ -107,7 +108,7 @@ all_configs = cachu.get_all_configs() # All configurations
107
108
  ### Basic Caching
108
109
 
109
110
  ```python
110
- from cachu import cachu
111
+ from cachu import cache
111
112
 
112
113
  @cache(ttl=300, backend='memory')
113
114
  def expensive_operation(param: str) -> dict:
@@ -138,6 +139,8 @@ def fetch_external_data(api_key: str) -> dict:
138
139
  Tags organize cache entries into logical groups for selective clearing:
139
140
 
140
141
  ```python
142
+ from cachu import cache, cache_clear
143
+
141
144
  @cache(ttl=300, tag='users')
142
145
  def get_user(user_id: int) -> dict:
143
146
  return fetch_user(user_id)
@@ -147,7 +150,7 @@ def get_product(product_id: int) -> dict:
147
150
  return fetch_product(product_id)
148
151
 
149
152
  # Clear only user caches
150
- cachu.cache_clear(tag='users', backend='memory', ttl=300)
153
+ cache_clear(tag='users', backend='memory', ttl=300)
151
154
  ```
152
155
 
153
156
  ### Conditional Caching
@@ -214,12 +217,14 @@ result = get_data(123, _overwrite_cache=True)
214
217
  Track hits and misses:
215
218
 
216
219
  ```python
220
+ from cachu import cache, cache_info
221
+
217
222
  @cache(ttl=300)
218
223
  def get_user(user_id: int) -> dict:
219
224
  return fetch_user(user_id)
220
225
 
221
226
  # After some usage
222
- info = cachu.cache_info(get_user)
227
+ info = cache_info(get_user)
223
228
  print(f"Hits: {info.hits}, Misses: {info.misses}, Size: {info.currsize}")
224
229
  ```
225
230
 
@@ -267,7 +272,7 @@ cache_delete(get_user, user_id=123)
267
272
  ### Clearing Caches
268
273
 
269
274
  ```python
270
- from cachu import cachu_clear
275
+ from cachu import cache_clear
271
276
 
272
277
  # Clear specific region
273
278
  cache_clear(backend='memory', ttl=300)
@@ -302,7 +307,8 @@ def get_data(id: int) -> dict:
302
307
  return fetch(id)
303
308
 
304
309
  # In tests/conftest.py
305
- cachu.cache_clear(backend='memory', ttl=300, package='myapp')
310
+ from cachu import cache_clear
311
+ cache_clear(backend='memory', ttl=300, package='myapp')
306
312
  ```
307
313
 
308
314
  ## Instance and Class Methods
@@ -7,13 +7,13 @@ Flexible caching library with support for memory, file, and Redis backends.
7
7
  **Basic installation:**
8
8
 
9
9
  ```bash
10
- pip install git+https://github.com/bissli/cachu.git
10
+ pip install cachu
11
11
  ```
12
12
 
13
13
  **With Redis support:**
14
14
 
15
15
  ```bash
16
- pip install git+https://github.com/bissli/cachu.git#egg=cachu[redis]
16
+ pip install cachu[redis]
17
17
  ```
18
18
 
19
19
  ## Quick Start
@@ -89,7 +89,7 @@ all_configs = cachu.get_all_configs() # All configurations
89
89
  ### Basic Caching
90
90
 
91
91
  ```python
92
- from cachu import cachu
92
+ from cachu import cache
93
93
 
94
94
  @cache(ttl=300, backend='memory')
95
95
  def expensive_operation(param: str) -> dict:
@@ -120,6 +120,8 @@ def fetch_external_data(api_key: str) -> dict:
120
120
  Tags organize cache entries into logical groups for selective clearing:
121
121
 
122
122
  ```python
123
+ from cachu import cache, cache_clear
124
+
123
125
  @cache(ttl=300, tag='users')
124
126
  def get_user(user_id: int) -> dict:
125
127
  return fetch_user(user_id)
@@ -129,7 +131,7 @@ def get_product(product_id: int) -> dict:
129
131
  return fetch_product(product_id)
130
132
 
131
133
  # Clear only user caches
132
- cachu.cache_clear(tag='users', backend='memory', ttl=300)
134
+ cache_clear(tag='users', backend='memory', ttl=300)
133
135
  ```
134
136
 
135
137
  ### Conditional Caching
@@ -196,12 +198,14 @@ result = get_data(123, _overwrite_cache=True)
196
198
  Track hits and misses:
197
199
 
198
200
  ```python
201
+ from cachu import cache, cache_info
202
+
199
203
  @cache(ttl=300)
200
204
  def get_user(user_id: int) -> dict:
201
205
  return fetch_user(user_id)
202
206
 
203
207
  # After some usage
204
- info = cachu.cache_info(get_user)
208
+ info = cache_info(get_user)
205
209
  print(f"Hits: {info.hits}, Misses: {info.misses}, Size: {info.currsize}")
206
210
  ```
207
211
 
@@ -249,7 +253,7 @@ cache_delete(get_user, user_id=123)
249
253
  ### Clearing Caches
250
254
 
251
255
  ```python
252
- from cachu import cachu_clear
256
+ from cachu import cache_clear
253
257
 
254
258
  # Clear specific region
255
259
  cache_clear(backend='memory', ttl=300)
@@ -284,7 +288,8 @@ def get_data(id: int) -> dict:
284
288
  return fetch(id)
285
289
 
286
290
  # In tests/conftest.py
287
- cachu.cache_clear(backend='memory', ttl=300, package='myapp')
291
+ from cachu import cache_clear
292
+ cache_clear(backend='memory', ttl=300, package='myapp')
288
293
  ```
289
294
 
290
295
  ## Instance and Class Methods
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "cachu"
3
- version = "0.1.1"
3
+ version = "0.1.2"
4
4
  description = "Flexible caching library built on dogpile.cache"
5
5
  readme = "README.md"
6
6
  license = "0BSD"
@@ -16,6 +16,7 @@ redis = ["redis"]
16
16
  test = [
17
17
  "pytest",
18
18
  "pytest-mock",
19
+ "redis",
19
20
  "testcontainers[redis]",
20
21
  ]
21
22
 
@@ -1,5 +1,5 @@
1
1
  [bumpversion]
2
- current_version = 0.1.1
2
+ current_version = 0.1.2
3
3
  commit = True
4
4
  tag = True
5
5
 
@@ -1,6 +1,6 @@
1
1
  """Flexible caching library with support for memory, file, and Redis backends.
2
2
  """
3
- __version__ = '0.1.1'
3
+ __version__ = '0.1.2'
4
4
 
5
5
  from .backends.redis import get_redis_client
6
6
  from .config import configure, disable, enable, get_all_configs, get_config
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cachu
3
- Version: 0.1.1
3
+ Version: 0.1.2
4
4
  Summary: Flexible caching library built on dogpile.cache
5
5
  Author: bissli
6
6
  License-Expression: 0BSD
@@ -14,6 +14,7 @@ Requires-Dist: redis; extra == "redis"
14
14
  Provides-Extra: test
15
15
  Requires-Dist: pytest; extra == "test"
16
16
  Requires-Dist: pytest-mock; extra == "test"
17
+ Requires-Dist: redis; extra == "test"
17
18
  Requires-Dist: testcontainers[redis]; extra == "test"
18
19
 
19
20
  # cachu
@@ -25,13 +26,13 @@ Flexible caching library with support for memory, file, and Redis backends.
25
26
  **Basic installation:**
26
27
 
27
28
  ```bash
28
- pip install git+https://github.com/bissli/cachu.git
29
+ pip install cachu
29
30
  ```
30
31
 
31
32
  **With Redis support:**
32
33
 
33
34
  ```bash
34
- pip install git+https://github.com/bissli/cachu.git#egg=cachu[redis]
35
+ pip install cachu[redis]
35
36
  ```
36
37
 
37
38
  ## Quick Start
@@ -107,7 +108,7 @@ all_configs = cachu.get_all_configs() # All configurations
107
108
  ### Basic Caching
108
109
 
109
110
  ```python
110
- from cachu import cachu
111
+ from cachu import cache
111
112
 
112
113
  @cache(ttl=300, backend='memory')
113
114
  def expensive_operation(param: str) -> dict:
@@ -138,6 +139,8 @@ def fetch_external_data(api_key: str) -> dict:
138
139
  Tags organize cache entries into logical groups for selective clearing:
139
140
 
140
141
  ```python
142
+ from cachu import cache, cache_clear
143
+
141
144
  @cache(ttl=300, tag='users')
142
145
  def get_user(user_id: int) -> dict:
143
146
  return fetch_user(user_id)
@@ -147,7 +150,7 @@ def get_product(product_id: int) -> dict:
147
150
  return fetch_product(product_id)
148
151
 
149
152
  # Clear only user caches
150
- cachu.cache_clear(tag='users', backend='memory', ttl=300)
153
+ cache_clear(tag='users', backend='memory', ttl=300)
151
154
  ```
152
155
 
153
156
  ### Conditional Caching
@@ -214,12 +217,14 @@ result = get_data(123, _overwrite_cache=True)
214
217
  Track hits and misses:
215
218
 
216
219
  ```python
220
+ from cachu import cache, cache_info
221
+
217
222
  @cache(ttl=300)
218
223
  def get_user(user_id: int) -> dict:
219
224
  return fetch_user(user_id)
220
225
 
221
226
  # After some usage
222
- info = cachu.cache_info(get_user)
227
+ info = cache_info(get_user)
223
228
  print(f"Hits: {info.hits}, Misses: {info.misses}, Size: {info.currsize}")
224
229
  ```
225
230
 
@@ -267,7 +272,7 @@ cache_delete(get_user, user_id=123)
267
272
  ### Clearing Caches
268
273
 
269
274
  ```python
270
- from cachu import cachu_clear
275
+ from cachu import cache_clear
271
276
 
272
277
  # Clear specific region
273
278
  cache_clear(backend='memory', ttl=300)
@@ -302,7 +307,8 @@ def get_data(id: int) -> dict:
302
307
  return fetch(id)
303
308
 
304
309
  # In tests/conftest.py
305
- cachu.cache_clear(backend='memory', ttl=300, package='myapp')
310
+ from cachu import cache_clear
311
+ cache_clear(backend='memory', ttl=300, package='myapp')
306
312
  ```
307
313
 
308
314
  ## Instance and Class Methods
@@ -2,7 +2,6 @@ README.md
2
2
  pyproject.toml
3
3
  setup.cfg
4
4
  src/cachu/__init__.py
5
- src/cachu/cache.py
6
5
  src/cachu/config.py
7
6
  src/cachu/decorator.py
8
7
  src/cachu/keys.py
@@ -7,4 +7,5 @@ redis
7
7
  [test]
8
8
  pytest
9
9
  pytest-mock
10
+ redis
10
11
  testcontainers[redis]
@@ -1,636 +0,0 @@
1
- import dbm
2
- import inspect
3
- import logging
4
- import os
5
- import pathlib
6
- import threading
7
- from collections.abc import Callable
8
- from functools import partial, wraps
9
- from typing import Any
10
-
11
- from dogpile.cache import CacheRegion, make_region
12
- from dogpile.cache.backends.file import AbstractFileLock
13
- from dogpile.cache.region import DefaultInvalidationStrategy
14
- from dogpile.util.readwrite_lock import ReadWriteMutex
15
-
16
- from .config import _get_caller_package, config, get_config, is_disabled
17
-
18
- logger = logging.getLogger(__name__)
19
-
20
-
21
- def _is_connection_like(obj: Any) -> bool:
22
- """Check if object appears to be a database connection.
23
- """
24
- if hasattr(obj, 'driver_connection'):
25
- return True
26
-
27
- if hasattr(obj, 'dialect'):
28
- return True
29
-
30
- if hasattr(obj, 'engine'):
31
- return True
32
-
33
- obj_type = str(type(obj))
34
- connection_indicators = ('Connection', 'Engine', 'psycopg', 'pyodbc', 'sqlite3')
35
-
36
- return any(indicator in obj_type for indicator in connection_indicators)
37
-
38
-
39
- def _normalize_namespace(namespace: str) -> str:
40
- """Normalize namespace to always be wrapped in pipes.
41
- """
42
- if not namespace:
43
- return ''
44
- namespace = namespace.strip('|')
45
- namespace = namespace.replace('|', '.')
46
- return f'|{namespace}|'
47
-
48
-
49
- def _create_namespace_filter(namespace: str) -> Callable[[str], bool]:
50
- """Create a filter function for namespace-based key matching.
51
- """
52
- debug_prefix = config.debug_key
53
- normalized_ns = _normalize_namespace(namespace)
54
- namespace_pattern = f'|{normalized_ns}|'
55
-
56
- def matches_namespace(key: str) -> bool:
57
- if not key.startswith(debug_prefix):
58
- return False
59
- key_after_prefix = key[len(debug_prefix):]
60
- return namespace_pattern in key_after_prefix
61
-
62
- return matches_namespace
63
-
64
-
65
- def key_generator(namespace: str, fn: Callable[..., Any], exclude_params: set[str] | None = None) -> Callable[..., str]:
66
- """Generate a cache key for the given namespace and function.
67
- """
68
- exclude_params = exclude_params or set()
69
- unwrapped_fn = getattr(fn, '__wrapped__', fn)
70
- namespace = f'{unwrapped_fn.__name__}|{_normalize_namespace(namespace)}' if namespace else f'{unwrapped_fn.__name__}'
71
-
72
- argspec = inspect.getfullargspec(unwrapped_fn)
73
- _args_reversed = list(reversed(argspec.args or []))
74
- _defaults_reversed = list(reversed(argspec.defaults or []))
75
- args_with_defaults = { _args_reversed[i]: default for i, default in enumerate(_defaults_reversed)}
76
-
77
- def generate_key(*args, **kwargs) -> str:
78
- args, vargs = args[:len(argspec.args)], args[len(argspec.args):]
79
- as_kwargs = dict(**args_with_defaults)
80
- as_kwargs.update(dict(zip(argspec.args, args)))
81
- as_kwargs.update({f'vararg{i+1}': varg for i, varg in enumerate(vargs)})
82
- as_kwargs.update(**kwargs)
83
- as_kwargs = {k: v for k, v in as_kwargs.items() if not _is_connection_like(v) and k not in {'self', 'cls'}}
84
- as_kwargs = {k: v for k, v in as_kwargs.items() if not k.startswith('_') and k not in exclude_params}
85
- as_str = ' '.join(f'{str(k)}={repr(v)}' for k, v in sorted(as_kwargs.items()))
86
- return f'{namespace}|{as_str}'
87
-
88
- return generate_key
89
-
90
-
91
- def key_mangler_default(key: str) -> str:
92
- """Modify the key for debugging purposes by prefixing it with a debug marker.
93
- """
94
- return f'{config.debug_key}{key}'
95
-
96
-
97
- def key_mangler_region(key: str, region: str) -> str:
98
- """Modify the key for a specific region for debugging purposes.
99
- """
100
- return f'{region}:{config.debug_key}{key}'
101
-
102
-
103
- def _make_key_mangler(debug_key: str) -> Callable[[str], str]:
104
- """Create a key mangler with a captured debug_key.
105
- """
106
- def mangler(key: str) -> str:
107
- return f'{debug_key}{key}'
108
- return mangler
109
-
110
-
111
- def _make_region_key_mangler(debug_key: str, region_name: str) -> Callable[[str], str]:
112
- """Create a region key mangler with captured debug_key and region name.
113
- """
114
- def mangler(key: str) -> str:
115
- return f'{region_name}:{debug_key}{key}'
116
- return mangler
117
-
118
-
119
- def should_cache_fn(value: Any) -> bool:
120
- """Determine if the given value should be cached.
121
- """
122
- return bool(value)
123
-
124
-
125
- def _seconds_to_region_name(seconds: int) -> str:
126
- """Convert seconds to a human-readable region name.
127
- """
128
- if seconds < 60:
129
- return f'{seconds}s'
130
- elif seconds < 3600:
131
- return f'{seconds // 60}m'
132
- elif seconds < 86400:
133
- return f'{seconds // 3600}h'
134
- else:
135
- return f'{seconds // 86400}d'
136
-
137
-
138
- def get_redis_client(namespace: str | None = None) -> Any:
139
- """Create a Redis client directly from config.
140
- """
141
- try:
142
- import redis
143
- except ImportError as e:
144
- raise RuntimeError(
145
- "Redis support requires the 'redis' package. Install with: pip install redis"
146
- ) from e
147
- if namespace is None:
148
- namespace = _get_caller_package()
149
- cfg = get_config(namespace)
150
- connection_kwargs = {}
151
- if cfg.redis_ssl:
152
- connection_kwargs['ssl'] = True
153
- return redis.Redis(
154
- host=cfg.redis_host,
155
- port=cfg.redis_port,
156
- db=cfg.redis_db,
157
- **connection_kwargs
158
- )
159
-
160
-
161
- class CacheRegionWrapper:
162
- """Wrapper for CacheRegion that adds exclude_params support.
163
- """
164
-
165
- def __init__(self, region: CacheRegion) -> None:
166
- self._region = region
167
- self._original_cache_on_arguments = region.cache_on_arguments
168
-
169
- def cache_on_arguments(
170
- self,
171
- namespace: str = '',
172
- should_cache_fn: Callable[[Any], bool] = should_cache_fn,
173
- exclude_params: set[str] | None = None,
174
- **kwargs) -> Callable:
175
- """Cache function results based on arguments with optional parameter exclusion.
176
- """
177
- if exclude_params:
178
- custom_key_gen = partial(key_generator, exclude_params=exclude_params)
179
- cache_decorator = self._original_cache_on_arguments(
180
- namespace=namespace,
181
- should_cache_fn=should_cache_fn,
182
- function_key_generator=custom_key_gen,
183
- **kwargs
184
- )
185
- else:
186
- cache_decorator = self._original_cache_on_arguments(
187
- namespace=namespace,
188
- should_cache_fn=should_cache_fn,
189
- **kwargs
190
- )
191
-
192
- def decorator(fn: Callable) -> Callable:
193
- cached_fn = cache_decorator(fn)
194
-
195
- @wraps(fn)
196
- def wrapper(*args, **kw):
197
- if is_disabled():
198
- return fn(*args, **kw)
199
- return cached_fn(*args, **kw)
200
- return wrapper
201
- return decorator
202
-
203
- def __getattr__(self, name: str) -> Any:
204
- """Delegate all other attributes to the wrapped region.
205
- """
206
- return getattr(self._region, name)
207
-
208
-
209
- def _wrap_cache_on_arguments(region: CacheRegion) -> CacheRegionWrapper:
210
- """Wrap CacheRegion to add exclude_params support with proper IDE typing.
211
- """
212
- return CacheRegionWrapper(region)
213
-
214
-
215
- class CustomFileLock(AbstractFileLock):
216
- """Implementation of a file lock using a read-write mutex.
217
- """
218
-
219
- def __init__(self, filename: str) -> None:
220
- self.mutex = ReadWriteMutex()
221
-
222
- def acquire_read_lock(self, wait: bool) -> bool:
223
- """Acquire the read lock.
224
- """
225
- ret = self.mutex.acquire_read_lock(wait)
226
- return wait or ret
227
-
228
- def acquire_write_lock(self, wait: bool) -> bool:
229
- """Acquire the write lock.
230
- """
231
- ret = self.mutex.acquire_write_lock(wait)
232
- return wait or ret
233
-
234
- def release_read_lock(self) -> bool:
235
- """Release the read lock.
236
- """
237
- return self.mutex.release_read_lock()
238
-
239
- def release_write_lock(self) -> bool:
240
- """Release the write lock.
241
- """
242
- return self.mutex.release_write_lock()
243
-
244
-
245
- class RedisInvalidator(DefaultInvalidationStrategy):
246
- """Redis invalidation strategy with optional key deletion.
247
- """
248
-
249
- def __init__(self, region: CacheRegion, delete_keys: bool = False) -> None:
250
- """Initialize the RedisInvalidator for a given CacheRegion.
251
- """
252
- self.region = region
253
- self.delete_keys = delete_keys
254
- super().__init__()
255
-
256
- def invalidate(self, hard: bool = True) -> None:
257
- """Invalidate the cache region using timestamp-based invalidation.
258
- """
259
- super().invalidate(hard)
260
- if self.delete_keys:
261
- self._delete_backend_keys()
262
-
263
- def _delete_backend_keys(self) -> None:
264
- """Delete keys from Redis backend for this region.
265
- """
266
- try:
267
- client = self.region.backend.writer_client
268
- region_prefix = f'{self.region.name}:'
269
- deleted_count = 0
270
- for key in client.scan_iter(match=f'{region_prefix}*'):
271
- client.delete(key)
272
- deleted_count += 1
273
- logger.debug(f'Deleted {deleted_count} Redis keys for region "{self.region.name}"')
274
- except Exception as e:
275
- logger.warning(f'Failed to delete Redis keys for region "{self.region.name}": {e}')
276
-
277
-
278
- def _handle_all_regions(regions_dict: dict[tuple[str | None, int], CacheRegionWrapper], log_level: str = 'warning') -> Callable:
279
- """Decorator to handle clearing all cache regions when seconds=None.
280
- """
281
- def decorator(func: Callable) -> Callable:
282
- @wraps(func)
283
- def wrapper(
284
- seconds: int | None = None,
285
- namespace: str | None = None,
286
- *,
287
- package: str | None = None,
288
- ) -> None:
289
- resolved_ns = package if package is not None else _get_caller_package()
290
- if seconds is None:
291
- regions_to_clear = [
292
- (ns, secs) for (ns, secs) in regions_dict
293
- if ns == resolved_ns
294
- ]
295
- if not regions_to_clear:
296
- log_func = getattr(logger, log_level)
297
- cache_type = func.__name__.replace('clear_', '').replace('cache', ' cache')
298
- log_func(f'No{cache_type} regions exist for namespace "{resolved_ns}"')
299
- return
300
- for _, region_seconds in regions_to_clear:
301
- func(region_seconds, namespace, _resolved_namespace=resolved_ns)
302
- return
303
- return func(seconds, namespace, _resolved_namespace=resolved_ns)
304
- return wrapper
305
- return decorator
306
-
307
-
308
- _region_lock = threading.Lock()
309
- _memory_cache_regions: dict[tuple[str | None, int], CacheRegionWrapper] = {}
310
-
311
-
312
- def memorycache(seconds: int, *, package: str | None = None) -> CacheRegionWrapper:
313
- """Create or retrieve a memory cache region with a specified expiration time.
314
- """
315
- with _region_lock:
316
- namespace = package if package is not None else _get_caller_package()
317
- cfg = get_config(namespace)
318
- key = (namespace, seconds)
319
-
320
- if key not in _memory_cache_regions:
321
- region = make_region(
322
- function_key_generator=key_generator,
323
- key_mangler=_make_key_mangler(cfg.debug_key),
324
- ).configure(
325
- cfg.memory,
326
- expiration_time=seconds,
327
- )
328
- _memory_cache_regions[key] = _wrap_cache_on_arguments(region)
329
- logger.debug(f"Created memory cache region for namespace '{namespace}', {seconds}s TTL")
330
- return _memory_cache_regions[key]
331
-
332
-
333
- _file_cache_regions: dict[tuple[str | None, int], CacheRegionWrapper] = {}
334
-
335
-
336
- def filecache(seconds: int, *, package: str | None = None) -> CacheRegionWrapper:
337
- """Create or retrieve a file cache region with a specified expiration time.
338
- """
339
- with _region_lock:
340
- namespace = package if package is not None else _get_caller_package()
341
- cfg = get_config(namespace)
342
- key = (namespace, seconds)
343
-
344
- if seconds < 60:
345
- filename = f'cache{seconds}sec'
346
- elif seconds < 3600:
347
- filename = f'cache{seconds // 60}min'
348
- else:
349
- filename = f'cache{seconds // 3600}hour'
350
-
351
- if namespace:
352
- filename = f'{namespace}_{filename}'
353
-
354
- if key not in _file_cache_regions:
355
- if cfg.file == 'dogpile.cache.null':
356
- logger.debug(
357
- f"filecache() called from '{namespace}' with null backend - "
358
- f"caching disabled for this region."
359
- )
360
- name = _seconds_to_region_name(seconds)
361
- region = make_region(name=name, function_key_generator=key_generator,
362
- key_mangler=_make_key_mangler(cfg.debug_key))
363
- region.configure('dogpile.cache.null')
364
- else:
365
- region = make_region(
366
- function_key_generator=key_generator,
367
- key_mangler=_make_key_mangler(cfg.debug_key),
368
- ).configure(
369
- cfg.file,
370
- expiration_time=seconds,
371
- arguments={
372
- 'filename': os.path.join(cfg.tmpdir, filename),
373
- 'lock_factory': CustomFileLock
374
- }
375
- )
376
- logger.debug(f"Created file cache region for namespace '{namespace}', {seconds}s TTL")
377
- _file_cache_regions[key] = _wrap_cache_on_arguments(region)
378
- return _file_cache_regions[key]
379
-
380
-
381
- _redis_cache_regions: dict[tuple[str | None, int], CacheRegionWrapper] = {}
382
-
383
-
384
- def rediscache(seconds: int, *, package: str | None = None) -> CacheRegionWrapper:
385
- """Create or retrieve a Redis cache region with a specified expiration time.
386
- """
387
- with _region_lock:
388
- namespace = package if package is not None else _get_caller_package()
389
- cfg = get_config(namespace)
390
- key = (namespace, seconds)
391
-
392
- if key not in _redis_cache_regions:
393
- name = _seconds_to_region_name(seconds)
394
- region = make_region(name=name, function_key_generator=key_generator,
395
- key_mangler=_make_region_key_mangler(cfg.debug_key, name))
396
-
397
- if cfg.redis == 'dogpile.cache.null':
398
- logger.debug(
399
- f"rediscache() called from '{namespace}' with null backend - "
400
- f"caching disabled for this region."
401
- )
402
- region.configure('dogpile.cache.null')
403
- else:
404
- connection_kwargs = {}
405
- if cfg.redis_ssl:
406
- connection_kwargs['ssl'] = True
407
-
408
- region.configure(
409
- cfg.redis,
410
- arguments={
411
- 'host': cfg.redis_host,
412
- 'port': cfg.redis_port,
413
- 'db': cfg.redis_db,
414
- 'redis_expiration_time': seconds,
415
- 'distributed_lock': cfg.redis_distributed,
416
- 'thread_local_lock': not cfg.redis_distributed,
417
- 'connection_kwargs': connection_kwargs,
418
- },
419
- region_invalidator=RedisInvalidator(region)
420
- )
421
- logger.debug(f"Created redis cache region for namespace '{namespace}', {seconds}s TTL")
422
- _redis_cache_regions[key] = _wrap_cache_on_arguments(region)
423
- return _redis_cache_regions[key]
424
-
425
-
426
- @_handle_all_regions(_memory_cache_regions)
427
- def clear_memorycache(
428
- seconds: int | None = None,
429
- namespace: str | None = None,
430
- *,
431
- _resolved_namespace: str | None = None,
432
- ) -> None:
433
- """Clear a memory cache region.
434
- """
435
- pkg = _resolved_namespace if _resolved_namespace is not None else _get_caller_package()
436
- region_key = (pkg, seconds)
437
-
438
- if region_key not in _memory_cache_regions:
439
- logger.warning(f'No memory cache region exists for namespace "{pkg}", {seconds} seconds')
440
- return
441
-
442
- cache_dict = _memory_cache_regions[region_key].actual_backend._cache
443
-
444
- if namespace is None:
445
- cache_dict.clear()
446
- logger.debug(f'Cleared all memory cache keys for namespace "{pkg}", {seconds} second region')
447
- else:
448
- matches_namespace = _create_namespace_filter(namespace)
449
- keys_to_delete = [key for key in list(cache_dict.keys()) if matches_namespace(key)]
450
- for key in keys_to_delete:
451
- del cache_dict[key]
452
- logger.debug(f'Cleared {len(keys_to_delete)} memory cache keys for namespace "{namespace}"')
453
-
454
-
455
- @_handle_all_regions(_file_cache_regions)
456
- def clear_filecache(
457
- seconds: int | None = None,
458
- namespace: str | None = None,
459
- *,
460
- _resolved_namespace: str | None = None,
461
- ) -> None:
462
- """Clear a file cache region.
463
- """
464
- pkg = _resolved_namespace if _resolved_namespace is not None else _get_caller_package()
465
- cfg = get_config(pkg)
466
- region_key = (pkg, seconds)
467
-
468
- if region_key not in _file_cache_regions:
469
- logger.warning(f'No file cache region exists for namespace "{pkg}", {seconds} seconds')
470
- return
471
-
472
- filename = _file_cache_regions[region_key].actual_backend.filename
473
- basename = pathlib.Path(filename).name
474
- filepath = os.path.join(cfg.tmpdir, basename)
475
-
476
- if namespace is None:
477
- with dbm.open(filepath, 'n'):
478
- pass
479
- logger.debug(f'Cleared all file cache keys for namespace "{pkg}", {seconds} second region')
480
- else:
481
- matches_namespace = _create_namespace_filter(namespace)
482
- with dbm.open(filepath, 'w') as db:
483
- keys_to_delete = [
484
- key for key in list(db.keys())
485
- if matches_namespace(key.decode())
486
- ]
487
- for key in keys_to_delete:
488
- del db[key]
489
- logger.debug(f'Cleared {len(keys_to_delete)} file cache keys for namespace "{namespace}"')
490
-
491
-
492
- @_handle_all_regions(_redis_cache_regions)
493
- def clear_rediscache(
494
- seconds: int | None = None,
495
- namespace: str | None = None,
496
- *,
497
- _resolved_namespace: str | None = None,
498
- ) -> None:
499
- """Clear a redis cache region.
500
- """
501
- pkg = _resolved_namespace if _resolved_namespace is not None else _get_caller_package()
502
- cfg = get_config(pkg)
503
- client = get_redis_client(pkg)
504
-
505
- try:
506
- region_name = _seconds_to_region_name(seconds)
507
- region_prefix = f'{region_name}:{cfg.debug_key}'
508
- deleted_count = 0
509
-
510
- if namespace is None:
511
- for key in client.scan_iter(match=f'{region_prefix}*'):
512
- client.delete(key)
513
- deleted_count += 1
514
- logger.debug(f'Cleared {deleted_count} Redis keys for region "{region_name}"')
515
- else:
516
- matches_namespace = _create_namespace_filter(namespace)
517
- for key in client.scan_iter(match=f'{region_prefix}*'):
518
- key_str = key.decode()
519
- key_without_region = key_str[len(region_name) + 1:]
520
- if matches_namespace(key_without_region):
521
- client.delete(key)
522
- deleted_count += 1
523
- logger.debug(f'Cleared {deleted_count} Redis keys for namespace "{namespace}" in region "{region_name}"')
524
- finally:
525
- client.close()
526
-
527
-
528
- def set_memorycache_key(seconds: int, namespace: str, fn: Callable[..., Any], value: Any, **kwargs) -> None:
529
- """Set a specific cached entry in memory cache.
530
- """
531
- region = memorycache(seconds)
532
- cache_key = key_generator(namespace, fn)(**kwargs)
533
- region.set(cache_key, value)
534
- logger.debug(f'Set memory cache key for {fn.__name__} in namespace "{namespace}"')
535
-
536
-
537
- def delete_memorycache_key(seconds: int, namespace: str, fn: Callable[..., Any], **kwargs) -> None:
538
- """Delete a specific cached entry from memory cache.
539
- """
540
- region = memorycache(seconds)
541
- cache_key = key_generator(namespace, fn)(**kwargs)
542
- region.delete(cache_key)
543
- logger.debug(f'Deleted memory cache key for {fn.__name__} in namespace "{namespace}"')
544
-
545
-
546
- def set_filecache_key(seconds: int, namespace: str, fn: Callable[..., Any], value: Any, **kwargs) -> None:
547
- """Set a specific cached entry in file cache.
548
- """
549
- region = filecache(seconds)
550
- cache_key = key_generator(namespace, fn)(**kwargs)
551
- region.set(cache_key, value)
552
- logger.debug(f'Set file cache key for {fn.__name__} in namespace "{namespace}"')
553
-
554
-
555
- def delete_filecache_key(seconds: int, namespace: str, fn: Callable[..., Any], **kwargs) -> None:
556
- """Delete a specific cached entry from file cache.
557
- """
558
- region = filecache(seconds)
559
- cache_key = key_generator(namespace, fn)(**kwargs)
560
- region.delete(cache_key)
561
- logger.debug(f'Deleted file cache key for {fn.__name__} in namespace "{namespace}"')
562
-
563
-
564
- def set_rediscache_key(seconds: int, namespace: str, fn: Callable[..., Any], value: Any, **kwargs) -> None:
565
- """Set a specific cached entry in redis cache.
566
- """
567
- region = rediscache(seconds)
568
- cache_key = key_generator(namespace, fn)(**kwargs)
569
- region.set(cache_key, value)
570
- logger.debug(f'Set redis cache key for {fn.__name__} in namespace "{namespace}"')
571
-
572
-
573
- def delete_rediscache_key(seconds: int, namespace: str, fn: Callable[..., Any], **kwargs) -> None:
574
- """Delete a specific cached entry from redis cache.
575
- """
576
- region = rediscache(seconds)
577
- cache_key = key_generator(namespace, fn)(**kwargs)
578
- region.delete(cache_key)
579
- logger.debug(f'Deleted redis cache key for {fn.__name__} in namespace "{namespace}"')
580
-
581
-
582
- _BACKEND_MAP = {
583
- 'memory': (memorycache, clear_memorycache, set_memorycache_key, delete_memorycache_key),
584
- 'redis': (rediscache, clear_rediscache, set_rediscache_key, delete_rediscache_key),
585
- 'file': (filecache, clear_filecache, set_filecache_key, delete_filecache_key),
586
- }
587
-
588
-
589
- def defaultcache(seconds: int) -> CacheRegionWrapper:
590
- """Return cache region based on configured default backend.
591
- """
592
- backend = config.default_backend
593
- if backend not in _BACKEND_MAP:
594
- raise ValueError(f'Unknown default_backend: {backend}. Must be one of: {list(_BACKEND_MAP.keys())}')
595
- return _BACKEND_MAP[backend][0](seconds)
596
-
597
-
598
- def clear_defaultcache(seconds: int | None = None, namespace: str | None = None) -> None:
599
- """Clear the default cache region.
600
- """
601
- return _BACKEND_MAP[config.default_backend][1](seconds, namespace)
602
-
603
-
604
- def set_defaultcache_key(seconds: int, namespace: str, fn: Callable[..., Any],
605
- value: Any, **kwargs) -> None:
606
- """Set a specific cached entry in default cache.
607
- """
608
- return _BACKEND_MAP[config.default_backend][2](seconds, namespace, fn, value, **kwargs)
609
-
610
-
611
- def delete_defaultcache_key(seconds: int, namespace: str,
612
- fn: Callable[..., Any], **kwargs) -> None:
613
- """Delete a specific cached entry from default cache.
614
- """
615
- return _BACKEND_MAP[config.default_backend][3](seconds, namespace, fn, **kwargs)
616
-
617
-
618
- def clear_cache_for_namespace(
619
- namespace: str,
620
- backend: str | None = None,
621
- seconds: int | None = None,
622
- ) -> None:
623
- """Clear cache regions for a specific namespace (cross-module safe).
624
- """
625
- backends = [backend] if backend else ['memory', 'file', 'redis']
626
- for b in backends:
627
- if b == 'memory':
628
- clear_memorycache(seconds=seconds, package=namespace)
629
- elif b == 'file':
630
- clear_filecache(seconds=seconds, package=namespace)
631
- elif b == 'redis':
632
- clear_rediscache(seconds=seconds, package=namespace)
633
-
634
-
635
- if __name__ == '__main__':
636
- __import__('doctest').testmod(optionflags=4 | 8 | 32)
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes