cachu 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cachu/__init__.py +27 -0
- cachu/backends/__init__.py +47 -0
- cachu/backends/file.py +158 -0
- cachu/backends/memory.py +102 -0
- cachu/backends/redis.py +131 -0
- cachu/cache.py +636 -0
- cachu/config.py +193 -0
- cachu/decorator.py +257 -0
- cachu/keys.py +122 -0
- cachu/operations.py +174 -0
- cachu/types.py +37 -0
- cachu-0.1.1.dist-info/METADATA +410 -0
- cachu-0.1.1.dist-info/RECORD +15 -0
- cachu-0.1.1.dist-info/WHEEL +5 -0
- cachu-0.1.1.dist-info/top_level.txt +1 -0
cachu/config.py
ADDED
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
"""Configuration module for cache backends with package isolation.
|
|
2
|
+
|
|
3
|
+
Each calling library gets its own isolated configuration, preventing
|
|
4
|
+
configuration conflicts when multiple libraries use the cachu package.
|
|
5
|
+
"""
|
|
6
|
+
import logging
|
|
7
|
+
import os
|
|
8
|
+
import pathlib
|
|
9
|
+
import sys
|
|
10
|
+
from dataclasses import asdict, dataclass, replace
|
|
11
|
+
from typing import Any
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
_disabled: bool = False
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def disable() -> None:
|
|
19
|
+
"""Disable all caching globally.
|
|
20
|
+
"""
|
|
21
|
+
global _disabled
|
|
22
|
+
_disabled = True
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def enable() -> None:
|
|
26
|
+
"""Re-enable caching after disable().
|
|
27
|
+
"""
|
|
28
|
+
global _disabled
|
|
29
|
+
_disabled = False
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def is_disabled() -> bool:
|
|
33
|
+
"""Check if caching is globally disabled.
|
|
34
|
+
"""
|
|
35
|
+
return _disabled
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _get_caller_package() -> str | None:
|
|
39
|
+
"""Get the top-level package name of the caller.
|
|
40
|
+
"""
|
|
41
|
+
frame = sys._getframe(1)
|
|
42
|
+
while frame:
|
|
43
|
+
name = frame.f_globals.get('__name__', '')
|
|
44
|
+
if name and not name.startswith('cachu'):
|
|
45
|
+
pkg = name.split('.')[0]
|
|
46
|
+
if pkg == '__main__' and sys.argv and sys.argv[0]:
|
|
47
|
+
return f'__main__.{pathlib.Path(sys.argv[0]).stem}'
|
|
48
|
+
return pkg
|
|
49
|
+
frame = frame.f_back
|
|
50
|
+
return None
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@dataclass
|
|
54
|
+
class CacheConfig:
|
|
55
|
+
"""Configuration for cache backends.
|
|
56
|
+
"""
|
|
57
|
+
backend: str = 'memory'
|
|
58
|
+
key_prefix: str = ''
|
|
59
|
+
file_dir: str = '/tmp'
|
|
60
|
+
redis_url: str = 'redis://localhost:6379/0'
|
|
61
|
+
redis_distributed: bool = False
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class ConfigRegistry:
|
|
65
|
+
"""Registry that maintains per-package cache configurations.
|
|
66
|
+
|
|
67
|
+
Each library (identified by top-level package name) gets its own
|
|
68
|
+
isolated configuration. This prevents configuration conflicts when
|
|
69
|
+
multiple libraries use the cache package with different settings.
|
|
70
|
+
"""
|
|
71
|
+
|
|
72
|
+
def __init__(self) -> None:
|
|
73
|
+
self._configs: dict[str | None, CacheConfig] = {}
|
|
74
|
+
self._default = CacheConfig()
|
|
75
|
+
|
|
76
|
+
def configure(
|
|
77
|
+
self,
|
|
78
|
+
package: str | None = None,
|
|
79
|
+
backend: str | None = None,
|
|
80
|
+
key_prefix: str | None = None,
|
|
81
|
+
file_dir: str | None = None,
|
|
82
|
+
redis_url: str | None = None,
|
|
83
|
+
redis_distributed: bool | None = None,
|
|
84
|
+
) -> CacheConfig:
|
|
85
|
+
"""Configure cache for a specific package.
|
|
86
|
+
"""
|
|
87
|
+
if package is None:
|
|
88
|
+
package = _get_caller_package()
|
|
89
|
+
|
|
90
|
+
updates = {
|
|
91
|
+
'backend': backend,
|
|
92
|
+
'key_prefix': key_prefix,
|
|
93
|
+
'file_dir': str(file_dir) if file_dir else None,
|
|
94
|
+
'redis_url': redis_url,
|
|
95
|
+
'redis_distributed': redis_distributed,
|
|
96
|
+
}
|
|
97
|
+
updates = {k: v for k, v in updates.items() if v is not None}
|
|
98
|
+
|
|
99
|
+
self._validate_config(updates)
|
|
100
|
+
|
|
101
|
+
if package not in self._configs:
|
|
102
|
+
self._configs[package] = replace(self._default)
|
|
103
|
+
logger.debug(f"Created new cache config for package '{package}'")
|
|
104
|
+
|
|
105
|
+
cfg = self._configs[package]
|
|
106
|
+
for key, value in updates.items():
|
|
107
|
+
setattr(cfg, key, value)
|
|
108
|
+
|
|
109
|
+
logger.debug(f"Configured cache for package '{package}': {updates}")
|
|
110
|
+
return cfg
|
|
111
|
+
|
|
112
|
+
def _validate_config(self, kwargs: dict[str, Any]) -> None:
|
|
113
|
+
"""Validate configuration values.
|
|
114
|
+
"""
|
|
115
|
+
if 'backend' in kwargs:
|
|
116
|
+
backend = kwargs['backend']
|
|
117
|
+
valid_backends = ('memory', 'redis', 'file')
|
|
118
|
+
if backend not in valid_backends:
|
|
119
|
+
raise ValueError(f'backend must be one of {valid_backends}, got {backend!r}')
|
|
120
|
+
|
|
121
|
+
if 'file_dir' in kwargs:
|
|
122
|
+
file_dir = kwargs['file_dir']
|
|
123
|
+
if not pathlib.Path(file_dir).is_dir():
|
|
124
|
+
raise ValueError(f'file_dir must be an existing directory, got {file_dir!r}')
|
|
125
|
+
if not os.access(file_dir, os.W_OK):
|
|
126
|
+
raise ValueError(f'file_dir must be writable, got {file_dir!r}')
|
|
127
|
+
|
|
128
|
+
def get_config(self, package: str | None = None) -> CacheConfig:
|
|
129
|
+
"""Get config for a package, with fallback to default.
|
|
130
|
+
"""
|
|
131
|
+
if package is None:
|
|
132
|
+
package = _get_caller_package()
|
|
133
|
+
|
|
134
|
+
if package in self._configs:
|
|
135
|
+
return self._configs[package]
|
|
136
|
+
|
|
137
|
+
return self._default
|
|
138
|
+
|
|
139
|
+
def get_all_packages(self) -> list[str | None]:
|
|
140
|
+
"""Return list of configured packages.
|
|
141
|
+
"""
|
|
142
|
+
return list(self._configs.keys())
|
|
143
|
+
|
|
144
|
+
def clear(self) -> None:
|
|
145
|
+
"""Clear all package configurations. Primarily for testing.
|
|
146
|
+
"""
|
|
147
|
+
self._configs.clear()
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
_registry = ConfigRegistry()
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def configure(
|
|
154
|
+
backend: str | None = None,
|
|
155
|
+
key_prefix: str | None = None,
|
|
156
|
+
file_dir: str | None = None,
|
|
157
|
+
redis_url: str | None = None,
|
|
158
|
+
redis_distributed: bool | None = None,
|
|
159
|
+
) -> CacheConfig:
|
|
160
|
+
"""Configure cache settings for the caller's package.
|
|
161
|
+
|
|
162
|
+
This is the main entry point for configuration. Each calling package
|
|
163
|
+
gets its own isolated configuration.
|
|
164
|
+
|
|
165
|
+
Args:
|
|
166
|
+
backend: Default backend type ('memory', 'file', 'redis')
|
|
167
|
+
key_prefix: Prefix for all cache keys (for versioning/debugging)
|
|
168
|
+
file_dir: Directory for file-based caches
|
|
169
|
+
redis_url: Redis connection URL (e.g., 'redis://localhost:6379/0')
|
|
170
|
+
redis_distributed: Use distributed locks for Redis
|
|
171
|
+
"""
|
|
172
|
+
return _registry.configure(
|
|
173
|
+
backend=backend,
|
|
174
|
+
key_prefix=key_prefix,
|
|
175
|
+
file_dir=str(file_dir) if file_dir else None,
|
|
176
|
+
redis_url=redis_url,
|
|
177
|
+
redis_distributed=redis_distributed,
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def get_config(package: str | None = None) -> CacheConfig:
|
|
182
|
+
"""Get the CacheConfig for a specific package or the caller's package.
|
|
183
|
+
"""
|
|
184
|
+
return _registry.get_config(package)
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def get_all_configs() -> dict[str | None, dict[str, Any]]:
|
|
188
|
+
"""Return all package configurations as a dictionary.
|
|
189
|
+
"""
|
|
190
|
+
result: dict[str | None, dict[str, Any]] = {'_default': asdict(_registry._default)}
|
|
191
|
+
for pkg, cfg in _registry._configs.items():
|
|
192
|
+
result[pkg] = asdict(cfg)
|
|
193
|
+
return result
|
cachu/decorator.py
ADDED
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
"""Cache decorator implementation.
|
|
2
|
+
"""
|
|
3
|
+
import logging
|
|
4
|
+
import os
|
|
5
|
+
import threading
|
|
6
|
+
import time
|
|
7
|
+
from functools import wraps
|
|
8
|
+
from typing import Any
|
|
9
|
+
from collections.abc import Callable
|
|
10
|
+
|
|
11
|
+
from .backends import NO_VALUE, Backend
|
|
12
|
+
from .backends.file import FileBackend
|
|
13
|
+
from .backends.memory import MemoryBackend
|
|
14
|
+
from .config import _get_caller_package, get_config, is_disabled
|
|
15
|
+
from .keys import make_key_generator, mangle_key
|
|
16
|
+
from .types import CacheEntry, CacheInfo, CacheMeta
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
_backends: dict[tuple[str | None, str, int], Backend] = {}
|
|
21
|
+
_backends_lock = threading.Lock()
|
|
22
|
+
|
|
23
|
+
_stats: dict[int, tuple[int, int]] = {}
|
|
24
|
+
_stats_lock = threading.Lock()
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def _get_backend(package: str | None, backend_type: str, ttl: int) -> Backend:
|
|
28
|
+
"""Get or create a backend instance.
|
|
29
|
+
"""
|
|
30
|
+
key = (package, backend_type, ttl)
|
|
31
|
+
|
|
32
|
+
with _backends_lock:
|
|
33
|
+
if key in _backends:
|
|
34
|
+
return _backends[key]
|
|
35
|
+
|
|
36
|
+
cfg = get_config(package)
|
|
37
|
+
|
|
38
|
+
if backend_type == 'memory':
|
|
39
|
+
backend = MemoryBackend()
|
|
40
|
+
elif backend_type == 'file':
|
|
41
|
+
if ttl < 60:
|
|
42
|
+
filename = f'cache{ttl}sec'
|
|
43
|
+
elif ttl < 3600:
|
|
44
|
+
filename = f'cache{ttl // 60}min'
|
|
45
|
+
else:
|
|
46
|
+
filename = f'cache{ttl // 3600}hour'
|
|
47
|
+
|
|
48
|
+
if package:
|
|
49
|
+
filename = f'{package}_{filename}'
|
|
50
|
+
|
|
51
|
+
filepath = os.path.join(cfg.file_dir, filename)
|
|
52
|
+
backend = FileBackend(filepath)
|
|
53
|
+
elif backend_type == 'redis':
|
|
54
|
+
from .backends.redis import RedisBackend
|
|
55
|
+
backend = RedisBackend(cfg.redis_url, cfg.redis_distributed)
|
|
56
|
+
else:
|
|
57
|
+
raise ValueError(f'Unknown backend type: {backend_type}')
|
|
58
|
+
|
|
59
|
+
_backends[key] = backend
|
|
60
|
+
logger.debug(f"Created {backend_type} backend for package '{package}', {ttl}s TTL")
|
|
61
|
+
return backend
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def get_backend(backend_type: str | None = None, package: str | None = None, ttl: int = 300) -> Backend:
|
|
65
|
+
"""Get a backend instance.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
backend_type: 'memory', 'file', or 'redis'. Uses config default if None.
|
|
69
|
+
package: Package name. Auto-detected if None.
|
|
70
|
+
ttl: TTL in seconds (used for backend separation).
|
|
71
|
+
"""
|
|
72
|
+
if package is None:
|
|
73
|
+
package = _get_caller_package()
|
|
74
|
+
|
|
75
|
+
if backend_type is None:
|
|
76
|
+
cfg = get_config(package)
|
|
77
|
+
backend_type = cfg.backend
|
|
78
|
+
|
|
79
|
+
return _get_backend(package, backend_type, ttl)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def cache(
|
|
83
|
+
ttl: int = 300,
|
|
84
|
+
backend: str | None = None,
|
|
85
|
+
tag: str = '',
|
|
86
|
+
exclude: set[str] | None = None,
|
|
87
|
+
cache_if: Callable[[Any], bool] | None = None,
|
|
88
|
+
validate: Callable[[CacheEntry], bool] | None = None,
|
|
89
|
+
package: str | None = None,
|
|
90
|
+
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
|
91
|
+
"""Cache decorator with configurable backend and behavior.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
ttl: Time-to-live in seconds (default: 300)
|
|
95
|
+
backend: Backend type ('memory', 'file', 'redis'). Uses config default if None.
|
|
96
|
+
tag: Tag for grouping related cache entries
|
|
97
|
+
exclude: Parameter names to exclude from cache key
|
|
98
|
+
cache_if: Function to determine if result should be cached.
|
|
99
|
+
Called with result value, caches if returns True.
|
|
100
|
+
validate: Function to validate cached entries before returning.
|
|
101
|
+
Called with CacheEntry, returns False to recompute.
|
|
102
|
+
package: Package name for config isolation. Auto-detected if None.
|
|
103
|
+
|
|
104
|
+
Per-call control via reserved kwargs (not passed to function):
|
|
105
|
+
_skip_cache: If True, bypass cache completely for this call
|
|
106
|
+
_overwrite_cache: If True, execute function and overwrite cached value
|
|
107
|
+
|
|
108
|
+
Example:
|
|
109
|
+
@cache(ttl=300, tag='users')
|
|
110
|
+
def get_user(user_id: int) -> dict:
|
|
111
|
+
return fetch_user(user_id)
|
|
112
|
+
|
|
113
|
+
# Normal call
|
|
114
|
+
user = get_user(123)
|
|
115
|
+
|
|
116
|
+
# Skip cache
|
|
117
|
+
user = get_user(123, _skip_cache=True)
|
|
118
|
+
|
|
119
|
+
# Force refresh
|
|
120
|
+
user = get_user(123, _overwrite_cache=True)
|
|
121
|
+
"""
|
|
122
|
+
resolved_package = package if package is not None else _get_caller_package()
|
|
123
|
+
|
|
124
|
+
if backend is None:
|
|
125
|
+
cfg = get_config(resolved_package)
|
|
126
|
+
resolved_backend = cfg.backend
|
|
127
|
+
else:
|
|
128
|
+
resolved_backend = backend
|
|
129
|
+
|
|
130
|
+
def decorator(fn: Callable[..., Any]) -> Callable[..., Any]:
|
|
131
|
+
key_generator = make_key_generator(fn, tag, exclude)
|
|
132
|
+
|
|
133
|
+
meta = CacheMeta(
|
|
134
|
+
ttl=ttl,
|
|
135
|
+
backend=resolved_backend,
|
|
136
|
+
tag=tag,
|
|
137
|
+
exclude=exclude or set(),
|
|
138
|
+
cache_if=cache_if,
|
|
139
|
+
validate=validate,
|
|
140
|
+
package=resolved_package,
|
|
141
|
+
key_generator=key_generator,
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
@wraps(fn)
|
|
145
|
+
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
146
|
+
skip_cache = kwargs.pop('_skip_cache', False)
|
|
147
|
+
overwrite_cache = kwargs.pop('_overwrite_cache', False)
|
|
148
|
+
|
|
149
|
+
if is_disabled() or skip_cache:
|
|
150
|
+
return fn(*args, **kwargs)
|
|
151
|
+
|
|
152
|
+
backend_instance = _get_backend(resolved_package, resolved_backend, ttl)
|
|
153
|
+
cfg = get_config(resolved_package)
|
|
154
|
+
|
|
155
|
+
base_key = key_generator(*args, **kwargs)
|
|
156
|
+
cache_key = mangle_key(base_key, cfg.key_prefix, ttl)
|
|
157
|
+
|
|
158
|
+
if not overwrite_cache:
|
|
159
|
+
value, created_at = backend_instance.get_with_metadata(cache_key)
|
|
160
|
+
|
|
161
|
+
if value is not NO_VALUE:
|
|
162
|
+
if validate is not None and created_at is not None:
|
|
163
|
+
entry = CacheEntry(
|
|
164
|
+
value=value,
|
|
165
|
+
created_at=created_at,
|
|
166
|
+
age=time.time() - created_at,
|
|
167
|
+
)
|
|
168
|
+
if not validate(entry):
|
|
169
|
+
logger.debug(f'Cache validation failed for {fn.__name__}')
|
|
170
|
+
else:
|
|
171
|
+
_record_hit(wrapper)
|
|
172
|
+
return value
|
|
173
|
+
else:
|
|
174
|
+
_record_hit(wrapper)
|
|
175
|
+
return value
|
|
176
|
+
|
|
177
|
+
_record_miss(wrapper)
|
|
178
|
+
result = fn(*args, **kwargs)
|
|
179
|
+
|
|
180
|
+
should_cache = cache_if is None or cache_if(result)
|
|
181
|
+
|
|
182
|
+
if should_cache:
|
|
183
|
+
backend_instance.set(cache_key, result, ttl)
|
|
184
|
+
logger.debug(f'Cached {fn.__name__} with key {cache_key}')
|
|
185
|
+
|
|
186
|
+
return result
|
|
187
|
+
|
|
188
|
+
wrapper._cache_meta = meta # type: ignore
|
|
189
|
+
wrapper._cache_key_generator = key_generator # type: ignore
|
|
190
|
+
|
|
191
|
+
return wrapper
|
|
192
|
+
|
|
193
|
+
return decorator
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def _record_hit(fn: Callable[..., Any]) -> None:
|
|
197
|
+
"""Record a cache hit for the function.
|
|
198
|
+
"""
|
|
199
|
+
fn_id = id(fn)
|
|
200
|
+
with _stats_lock:
|
|
201
|
+
hits, misses = _stats.get(fn_id, (0, 0))
|
|
202
|
+
_stats[fn_id] = (hits + 1, misses)
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def _record_miss(fn: Callable[..., Any]) -> None:
|
|
206
|
+
"""Record a cache miss for the function.
|
|
207
|
+
"""
|
|
208
|
+
fn_id = id(fn)
|
|
209
|
+
with _stats_lock:
|
|
210
|
+
hits, misses = _stats.get(fn_id, (0, 0))
|
|
211
|
+
_stats[fn_id] = (hits, misses + 1)
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def get_cache_info(fn: Callable[..., Any]) -> CacheInfo:
|
|
215
|
+
"""Get cache statistics for a decorated function.
|
|
216
|
+
|
|
217
|
+
Args:
|
|
218
|
+
fn: A function decorated with @cache
|
|
219
|
+
|
|
220
|
+
Returns
|
|
221
|
+
CacheInfo with hits, misses, and currsize
|
|
222
|
+
"""
|
|
223
|
+
if hasattr(fn, '__wrapped__'):
|
|
224
|
+
actual_fn = fn
|
|
225
|
+
else:
|
|
226
|
+
actual_fn = fn
|
|
227
|
+
|
|
228
|
+
fn_id = id(actual_fn)
|
|
229
|
+
|
|
230
|
+
with _stats_lock:
|
|
231
|
+
hits, misses = _stats.get(fn_id, (0, 0))
|
|
232
|
+
|
|
233
|
+
meta = getattr(fn, '_cache_meta', None)
|
|
234
|
+
if meta is None:
|
|
235
|
+
return CacheInfo(hits=hits, misses=misses, currsize=0)
|
|
236
|
+
|
|
237
|
+
backend_instance = _get_backend(meta.package, meta.backend, meta.ttl)
|
|
238
|
+
cfg = get_config(meta.package)
|
|
239
|
+
|
|
240
|
+
fn_name = getattr(fn, '__wrapped__', fn).__name__
|
|
241
|
+
pattern = f'*:{cfg.key_prefix}{fn_name}|*'
|
|
242
|
+
|
|
243
|
+
currsize = backend_instance.count(pattern)
|
|
244
|
+
|
|
245
|
+
return CacheInfo(hits=hits, misses=misses, currsize=currsize)
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
def clear_backends(package: str | None = None) -> None:
|
|
249
|
+
"""Clear all backend instances for a package. Primarily for testing.
|
|
250
|
+
"""
|
|
251
|
+
with _backends_lock:
|
|
252
|
+
if package is None:
|
|
253
|
+
_backends.clear()
|
|
254
|
+
else:
|
|
255
|
+
keys_to_delete = [k for k in _backends if k[0] == package]
|
|
256
|
+
for key in keys_to_delete:
|
|
257
|
+
del _backends[key]
|
cachu/keys.py
ADDED
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
"""Cache key generation and parameter filtering.
|
|
2
|
+
"""
|
|
3
|
+
import inspect
|
|
4
|
+
from collections.abc import Callable
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def _is_connection_like(obj: Any) -> bool:
|
|
9
|
+
"""Check if object appears to be a database connection.
|
|
10
|
+
|
|
11
|
+
Detects SQLAlchemy connections, psycopg2, pyodbc, sqlite3, and similar.
|
|
12
|
+
"""
|
|
13
|
+
if hasattr(obj, 'driver_connection'):
|
|
14
|
+
return True
|
|
15
|
+
|
|
16
|
+
if hasattr(obj, 'dialect'):
|
|
17
|
+
return True
|
|
18
|
+
|
|
19
|
+
if hasattr(obj, 'engine'):
|
|
20
|
+
return True
|
|
21
|
+
|
|
22
|
+
obj_type = str(type(obj))
|
|
23
|
+
connection_indicators = ('Connection', 'Engine', 'psycopg', 'pyodbc', 'sqlite3')
|
|
24
|
+
|
|
25
|
+
return any(indicator in obj_type for indicator in connection_indicators)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _normalize_tag(tag: str) -> str:
|
|
29
|
+
"""Normalize tag to always be wrapped in pipes.
|
|
30
|
+
"""
|
|
31
|
+
if not tag:
|
|
32
|
+
return ''
|
|
33
|
+
tag = tag.strip('|')
|
|
34
|
+
tag = tag.replace('|', '.')
|
|
35
|
+
return f'|{tag}|'
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def make_key_generator(
|
|
39
|
+
fn: Callable[..., Any],
|
|
40
|
+
tag: str = '',
|
|
41
|
+
exclude: set[str] | None = None,
|
|
42
|
+
) -> Callable[..., str]:
|
|
43
|
+
"""Create a key generator function for the given function.
|
|
44
|
+
|
|
45
|
+
The generated keys include:
|
|
46
|
+
- Function name
|
|
47
|
+
- Tag (if provided)
|
|
48
|
+
- All parameters except: self, cls, connections, underscore-prefixed, and excluded
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
fn: The function to generate keys for
|
|
52
|
+
tag: Optional tag for key grouping
|
|
53
|
+
exclude: Parameter names to exclude from the key
|
|
54
|
+
|
|
55
|
+
Returns
|
|
56
|
+
A function that generates cache keys from arguments
|
|
57
|
+
"""
|
|
58
|
+
exclude = exclude or set()
|
|
59
|
+
unwrapped_fn = getattr(fn, '__wrapped__', fn)
|
|
60
|
+
fn_name = unwrapped_fn.__name__
|
|
61
|
+
|
|
62
|
+
if tag:
|
|
63
|
+
key_prefix = f'{fn_name}|{_normalize_tag(tag)}'
|
|
64
|
+
else:
|
|
65
|
+
key_prefix = fn_name
|
|
66
|
+
|
|
67
|
+
argspec = inspect.getfullargspec(unwrapped_fn)
|
|
68
|
+
args_reversed = list(reversed(argspec.args or []))
|
|
69
|
+
defaults_reversed = list(reversed(argspec.defaults or []))
|
|
70
|
+
args_with_defaults = {args_reversed[i]: default for i, default in enumerate(defaults_reversed)}
|
|
71
|
+
|
|
72
|
+
def generate_key(*args: Any, **kwargs: Any) -> str:
|
|
73
|
+
"""Generate a cache key from function arguments.
|
|
74
|
+
"""
|
|
75
|
+
positional_args = args[:len(argspec.args)]
|
|
76
|
+
varargs = args[len(argspec.args):]
|
|
77
|
+
|
|
78
|
+
as_kwargs = dict(**args_with_defaults)
|
|
79
|
+
as_kwargs.update(dict(zip(argspec.args, positional_args)))
|
|
80
|
+
as_kwargs.update({f'vararg{i+1}': varg for i, varg in enumerate(varargs)})
|
|
81
|
+
as_kwargs.update(**kwargs)
|
|
82
|
+
|
|
83
|
+
filtered = {
|
|
84
|
+
k: v for k, v in as_kwargs.items()
|
|
85
|
+
if k not in {'self', 'cls'}
|
|
86
|
+
and not k.startswith('_')
|
|
87
|
+
and k not in exclude
|
|
88
|
+
and not _is_connection_like(v)
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
params_str = ' '.join(f'{k}={repr(v)}' for k, v in sorted(filtered.items()))
|
|
92
|
+
return f'{key_prefix}|{params_str}'
|
|
93
|
+
|
|
94
|
+
return generate_key
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def mangle_key(key: str, key_prefix: str, ttl: int) -> str:
|
|
98
|
+
"""Apply key mangling with prefix and TTL region.
|
|
99
|
+
|
|
100
|
+
Args:
|
|
101
|
+
key: The base cache key
|
|
102
|
+
key_prefix: Global key prefix from config
|
|
103
|
+
ttl: TTL in seconds (used as region identifier)
|
|
104
|
+
|
|
105
|
+
Returns
|
|
106
|
+
The mangled key
|
|
107
|
+
"""
|
|
108
|
+
region = _seconds_to_region_name(ttl)
|
|
109
|
+
return f'{region}:{key_prefix}{key}'
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def _seconds_to_region_name(seconds: int) -> str:
|
|
113
|
+
"""Convert seconds to a human-readable region name.
|
|
114
|
+
"""
|
|
115
|
+
if seconds < 60:
|
|
116
|
+
return f'{seconds}s'
|
|
117
|
+
elif seconds < 3600:
|
|
118
|
+
return f'{seconds // 60}m'
|
|
119
|
+
elif seconds < 86400:
|
|
120
|
+
return f'{seconds // 3600}h'
|
|
121
|
+
else:
|
|
122
|
+
return f'{seconds // 86400}d'
|