cachu 0.2.3__py3-none-any.whl → 0.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cachu/decorator.py CHANGED
@@ -1,68 +1,230 @@
1
- """Cache decorator implementation.
1
+ """Cache decorator implementation with sync and async support.
2
2
  """
3
+ import asyncio
3
4
  import logging
4
5
  import os
5
6
  import threading
6
7
  import time
7
- from collections.abc import Callable
8
+ from collections.abc import Awaitable, Callable
8
9
  from functools import wraps
9
10
  from typing import Any
10
11
 
11
- from .backends import NO_VALUE, Backend
12
+ from .backends import NO_VALUE, AsyncBackend, Backend
12
13
  from .backends.file import FileBackend
13
- from .backends.memory import MemoryBackend
14
+ from .backends.memory import AsyncMemoryBackend, MemoryBackend
14
15
  from .config import _get_caller_package, get_config, is_disabled
15
16
  from .keys import make_key_generator, mangle_key
16
17
  from .types import CacheEntry, CacheInfo, CacheMeta
17
18
 
18
19
  logger = logging.getLogger(__name__)
19
20
 
20
- _backends: dict[tuple[str | None, str, int], Backend] = {}
21
- _backends_lock = threading.Lock()
22
21
 
23
- _stats: dict[int, tuple[int, int]] = {}
24
- _stats_lock = threading.Lock()
22
+ class CacheManager:
23
+ """Manages sync cache backends and statistics.
24
+ """
25
25
 
26
+ def __init__(self) -> None:
27
+ self.backends: dict[tuple[str | None, str, int], Backend] = {}
28
+ self.stats: dict[int, tuple[int, int]] = {}
29
+ self._backends_lock = threading.Lock()
30
+ self._stats_lock = threading.Lock()
31
+
32
+ def get_backend(self, package: str | None, backend_type: str, ttl: int) -> Backend:
33
+ """Get or create a backend instance.
34
+ """
35
+ key = (package, backend_type, ttl)
36
+
37
+ with self._backends_lock:
38
+ if key in self.backends:
39
+ return self.backends[key]
40
+
41
+ cfg = get_config(package)
42
+
43
+ if backend_type == 'memory':
44
+ backend = MemoryBackend()
45
+ elif backend_type == 'file':
46
+ if ttl < 60:
47
+ filename = f'cache{ttl}sec.db'
48
+ elif ttl < 3600:
49
+ filename = f'cache{ttl // 60}min.db'
50
+ else:
51
+ filename = f'cache{ttl // 3600}hour.db'
52
+
53
+ if package:
54
+ filename = f'{package}_{filename}'
55
+
56
+ filepath = os.path.join(cfg.file_dir, filename)
57
+ backend = FileBackend(filepath)
58
+ elif backend_type == 'redis':
59
+ from .backends.redis import RedisBackend
60
+ backend = RedisBackend(cfg.redis_url)
61
+ else:
62
+ raise ValueError(f'Unknown backend type: {backend_type}')
63
+
64
+ self.backends[key] = backend
65
+ logger.debug(f"Created {backend_type} backend for package '{package}', {ttl}s TTL")
66
+ return backend
67
+
68
+ def record_hit(self, fn: Callable[..., Any]) -> None:
69
+ """Record a cache hit for the function.
70
+ """
71
+ fn_id = id(fn)
72
+ with self._stats_lock:
73
+ hits, misses = self.stats.get(fn_id, (0, 0))
74
+ self.stats[fn_id] = (hits + 1, misses)
75
+
76
+ def record_miss(self, fn: Callable[..., Any]) -> None:
77
+ """Record a cache miss for the function.
78
+ """
79
+ fn_id = id(fn)
80
+ with self._stats_lock:
81
+ hits, misses = self.stats.get(fn_id, (0, 0))
82
+ self.stats[fn_id] = (hits, misses + 1)
83
+
84
+ def get_stats(self, fn: Callable[..., Any]) -> tuple[int, int]:
85
+ """Get (hits, misses) for a function.
86
+ """
87
+ fn_id = id(fn)
88
+ with self._stats_lock:
89
+ return self.stats.get(fn_id, (0, 0))
90
+
91
+ def clear(self, package: str | None = None) -> None:
92
+ """Clear backend instances for a package.
93
+ """
94
+ with self._backends_lock:
95
+ if package is None:
96
+ self.backends.clear()
97
+ else:
98
+ keys_to_delete = [k for k in self.backends if k[0] == package]
99
+ for key in keys_to_delete:
100
+ del self.backends[key]
26
101
 
27
- def _get_backend(package: str | None, backend_type: str, ttl: int) -> Backend:
28
- """Get or create a backend instance.
102
+
103
+ class AsyncCacheManager:
104
+ """Manages async cache backends and statistics.
29
105
  """
30
- key = (package, backend_type, ttl)
31
106
 
32
- with _backends_lock:
33
- if key in _backends:
34
- return _backends[key]
107
+ def __init__(self) -> None:
108
+ self.backends: dict[tuple[str | None, str, int], AsyncBackend] = {}
109
+ self.stats: dict[int, tuple[int, int]] = {}
110
+ self._backends_lock = asyncio.Lock()
111
+ self._stats_lock = asyncio.Lock()
112
+
113
+ async def get_backend(
114
+ self,
115
+ package: str | None,
116
+ backend_type: str,
117
+ ttl: int,
118
+ ) -> AsyncBackend:
119
+ """Get or create an async backend instance.
120
+ """
121
+ key = (package, backend_type, ttl)
122
+
123
+ async with self._backends_lock:
124
+ if key in self.backends:
125
+ return self.backends[key]
126
+
127
+ cfg = get_config(package)
128
+
129
+ if backend_type == 'memory':
130
+ backend: AsyncBackend = AsyncMemoryBackend()
131
+ elif backend_type == 'file':
132
+ from .backends.sqlite import AsyncSqliteBackend
133
+
134
+ if ttl < 60:
135
+ filename = f'cache{ttl}sec.db'
136
+ elif ttl < 3600:
137
+ filename = f'cache{ttl // 60}min.db'
138
+ else:
139
+ filename = f'cache{ttl // 3600}hour.db'
140
+
141
+ if package:
142
+ filename = f'{package}_{filename}'
143
+
144
+ filepath = os.path.join(cfg.file_dir, filename)
145
+ backend = AsyncSqliteBackend(filepath)
146
+ elif backend_type == 'redis':
147
+ from .backends.redis import AsyncRedisBackend
148
+ backend = AsyncRedisBackend(cfg.redis_url)
149
+ else:
150
+ raise ValueError(f'Unknown backend type: {backend_type}')
151
+
152
+ self.backends[key] = backend
153
+ logger.debug(f"Created async {backend_type} backend for package '{package}', {ttl}s TTL")
154
+ return backend
155
+
156
+ async def record_hit(self, fn: Callable[..., Any]) -> None:
157
+ """Record a cache hit for the async function.
158
+ """
159
+ fn_id = id(fn)
160
+ async with self._stats_lock:
161
+ hits, misses = self.stats.get(fn_id, (0, 0))
162
+ self.stats[fn_id] = (hits + 1, misses)
163
+
164
+ async def record_miss(self, fn: Callable[..., Any]) -> None:
165
+ """Record a cache miss for the async function.
166
+ """
167
+ fn_id = id(fn)
168
+ async with self._stats_lock:
169
+ hits, misses = self.stats.get(fn_id, (0, 0))
170
+ self.stats[fn_id] = (hits, misses + 1)
171
+
172
+ async def get_stats(self, fn: Callable[..., Any]) -> tuple[int, int]:
173
+ """Get (hits, misses) for a function.
174
+ """
175
+ fn_id = id(fn)
176
+ async with self._stats_lock:
177
+ return self.stats.get(fn_id, (0, 0))
178
+
179
+ async def clear(self, package: str | None = None) -> None:
180
+ """Clear backend instances, calling close() on each.
181
+ """
182
+ async with self._backends_lock:
183
+ if package is None:
184
+ for backend in self.backends.values():
185
+ await backend.close()
186
+ self.backends.clear()
187
+ else:
188
+ keys_to_delete = [k for k in self.backends if k[0] == package]
189
+ for key in keys_to_delete:
190
+ await self.backends[key].close()
191
+ del self.backends[key]
35
192
 
36
- cfg = get_config(package)
37
193
 
38
- if backend_type == 'memory':
39
- backend = MemoryBackend()
40
- elif backend_type == 'file':
41
- if ttl < 60:
42
- filename = f'cache{ttl}sec.db'
43
- elif ttl < 3600:
44
- filename = f'cache{ttl // 60}min.db'
45
- else:
46
- filename = f'cache{ttl // 3600}hour.db'
194
+ manager = CacheManager()
195
+ async_manager = AsyncCacheManager()
47
196
 
48
- if package:
49
- filename = f'{package}_{filename}'
50
197
 
51
- filepath = os.path.join(cfg.file_dir, filename)
52
- backend = FileBackend(filepath)
53
- elif backend_type == 'redis':
54
- from .backends.redis import RedisBackend
55
- backend = RedisBackend(cfg.redis_url, cfg.redis_distributed)
56
- else:
57
- raise ValueError(f'Unknown backend type: {backend_type}')
198
+ def get_backend(
199
+ backend_type: str | None = None,
200
+ package: str | None = None,
201
+ *,
202
+ ttl: int,
203
+ ) -> Backend:
204
+ """Get a backend instance.
58
205
 
59
- _backends[key] = backend
60
- logger.debug(f"Created {backend_type} backend for package '{package}', {ttl}s TTL")
61
- return backend
206
+ Args:
207
+ backend_type: 'memory', 'file', or 'redis'. Uses config default if None.
208
+ package: Package name. Auto-detected if None.
209
+ ttl: TTL in seconds (used for backend separation).
210
+ """
211
+ if package is None:
212
+ package = _get_caller_package()
62
213
 
214
+ if backend_type is None:
215
+ cfg = get_config(package)
216
+ backend_type = cfg.backend
63
217
 
64
- def get_backend(backend_type: str | None = None, package: str | None = None, *, ttl: int) -> Backend:
65
- """Get a backend instance.
218
+ return manager.get_backend(package, backend_type, ttl)
219
+
220
+
221
+ async def get_async_backend(
222
+ backend_type: str | None = None,
223
+ package: str | None = None,
224
+ *,
225
+ ttl: int,
226
+ ) -> AsyncBackend:
227
+ """Get an async backend instance.
66
228
 
67
229
  Args:
68
230
  backend_type: 'memory', 'file', or 'redis'. Uses config default if None.
@@ -76,7 +238,7 @@ def get_backend(backend_type: str | None = None, package: str | None = None, *,
76
238
  cfg = get_config(package)
77
239
  backend_type = cfg.backend
78
240
 
79
- return _get_backend(package, backend_type, ttl)
241
+ return await async_manager.get_backend(package, backend_type, ttl)
80
242
 
81
243
 
82
244
  def cache(
@@ -149,7 +311,7 @@ def cache(
149
311
  if is_disabled() or skip_cache:
150
312
  return fn(*args, **kwargs)
151
313
 
152
- backend_instance = _get_backend(resolved_package, resolved_backend, ttl)
314
+ backend_instance = manager.get_backend(resolved_package, resolved_backend, ttl)
153
315
  cfg = get_config(resolved_package)
154
316
 
155
317
  base_key = key_generator(*args, **kwargs)
@@ -168,13 +330,13 @@ def cache(
168
330
  if not validate(entry):
169
331
  logger.debug(f'Cache validation failed for {fn.__name__}')
170
332
  else:
171
- _record_hit(wrapper)
333
+ manager.record_hit(wrapper)
172
334
  return value
173
335
  else:
174
- _record_hit(wrapper)
336
+ manager.record_hit(wrapper)
175
337
  return value
176
338
 
177
- _record_miss(wrapper)
339
+ manager.record_miss(wrapper)
178
340
  result = fn(*args, **kwargs)
179
341
 
180
342
  should_cache = cache_if is None or cache_if(result)
@@ -193,22 +355,122 @@ def cache(
193
355
  return decorator
194
356
 
195
357
 
196
- def _record_hit(fn: Callable[..., Any]) -> None:
197
- """Record a cache hit for the function.
198
- """
199
- fn_id = id(fn)
200
- with _stats_lock:
201
- hits, misses = _stats.get(fn_id, (0, 0))
202
- _stats[fn_id] = (hits + 1, misses)
358
+ def async_cache(
359
+ ttl: int = 300,
360
+ backend: str | None = None,
361
+ tag: str = '',
362
+ exclude: set[str] | None = None,
363
+ cache_if: Callable[[Any], bool] | None = None,
364
+ validate: Callable[[CacheEntry], bool] | None = None,
365
+ package: str | None = None,
366
+ ) -> Callable[[Callable[..., Awaitable[Any]]], Callable[..., Awaitable[Any]]]:
367
+ """Async cache decorator with configurable backend and behavior.
368
+
369
+ Args:
370
+ ttl: Time-to-live in seconds (default: 300)
371
+ backend: Backend type ('memory', 'file', 'redis'). Uses config default if None.
372
+ tag: Tag for grouping related cache entries
373
+ exclude: Parameter names to exclude from cache key
374
+ cache_if: Function to determine if result should be cached.
375
+ Called with result value, caches if returns True.
376
+ validate: Function to validate cached entries before returning.
377
+ Called with CacheEntry, returns False to recompute.
378
+ package: Package name for config isolation. Auto-detected if None.
379
+
380
+ Per-call control via reserved kwargs (not passed to function):
381
+ _skip_cache: If True, bypass cache completely for this call
382
+ _overwrite_cache: If True, execute function and overwrite cached value
203
383
 
384
+ Example:
385
+ @async_cache(ttl=300, tag='users')
386
+ async def get_user(user_id: int) -> dict:
387
+ return await fetch_user(user_id)
204
388
 
205
- def _record_miss(fn: Callable[..., Any]) -> None:
206
- """Record a cache miss for the function.
389
+ # Normal call
390
+ user = await get_user(123)
391
+
392
+ # Skip cache
393
+ user = await get_user(123, _skip_cache=True)
394
+
395
+ # Force refresh
396
+ user = await get_user(123, _overwrite_cache=True)
207
397
  """
208
- fn_id = id(fn)
209
- with _stats_lock:
210
- hits, misses = _stats.get(fn_id, (0, 0))
211
- _stats[fn_id] = (hits, misses + 1)
398
+ resolved_package = package if package is not None else _get_caller_package()
399
+
400
+ if backend is None:
401
+ cfg = get_config(resolved_package)
402
+ resolved_backend = cfg.backend
403
+ else:
404
+ resolved_backend = backend
405
+
406
+ def decorator(fn: Callable[..., Awaitable[Any]]) -> Callable[..., Awaitable[Any]]:
407
+ key_generator = make_key_generator(fn, tag, exclude)
408
+
409
+ meta = CacheMeta(
410
+ ttl=ttl,
411
+ backend=resolved_backend,
412
+ tag=tag,
413
+ exclude=exclude or set(),
414
+ cache_if=cache_if,
415
+ validate=validate,
416
+ package=resolved_package,
417
+ key_generator=key_generator,
418
+ )
419
+
420
+ @wraps(fn)
421
+ async def wrapper(*args: Any, **kwargs: Any) -> Any:
422
+ skip_cache = kwargs.pop('_skip_cache', False)
423
+ overwrite_cache = kwargs.pop('_overwrite_cache', False)
424
+
425
+ if is_disabled() or skip_cache:
426
+ return await fn(*args, **kwargs)
427
+
428
+ backend_instance = await async_manager.get_backend(
429
+ resolved_package,
430
+ resolved_backend,
431
+ ttl,
432
+ )
433
+ cfg = get_config(resolved_package)
434
+
435
+ base_key = key_generator(*args, **kwargs)
436
+ cache_key = mangle_key(base_key, cfg.key_prefix, ttl)
437
+
438
+ if not overwrite_cache:
439
+ value, created_at = await backend_instance.get_with_metadata(cache_key)
440
+
441
+ if value is not NO_VALUE:
442
+ if validate is not None and created_at is not None:
443
+ entry = CacheEntry(
444
+ value=value,
445
+ created_at=created_at,
446
+ age=time.time() - created_at,
447
+ )
448
+ if not validate(entry):
449
+ logger.debug(f'Cache validation failed for {fn.__name__}')
450
+ else:
451
+ await async_manager.record_hit(wrapper)
452
+ return value
453
+ else:
454
+ await async_manager.record_hit(wrapper)
455
+ return value
456
+
457
+ await async_manager.record_miss(wrapper)
458
+ result = await fn(*args, **kwargs)
459
+
460
+ should_cache = cache_if is None or cache_if(result)
461
+
462
+ if should_cache:
463
+ await backend_instance.set(cache_key, result, ttl)
464
+ logger.debug(f'Cached {fn.__name__} with key {cache_key}')
465
+
466
+ return result
467
+
468
+ wrapper._cache_meta = meta # type: ignore
469
+ wrapper._cache_key_generator = key_generator # type: ignore
470
+
471
+ return wrapper
472
+
473
+ return decorator
212
474
 
213
475
 
214
476
  def get_cache_info(fn: Callable[..., Any]) -> CacheInfo:
@@ -220,16 +482,13 @@ def get_cache_info(fn: Callable[..., Any]) -> CacheInfo:
220
482
  Returns
221
483
  CacheInfo with hits, misses, and currsize
222
484
  """
223
- fn_id = id(fn)
224
-
225
- with _stats_lock:
226
- hits, misses = _stats.get(fn_id, (0, 0))
485
+ hits, misses = manager.get_stats(fn)
227
486
 
228
487
  meta = getattr(fn, '_cache_meta', None)
229
488
  if meta is None:
230
489
  return CacheInfo(hits=hits, misses=misses, currsize=0)
231
490
 
232
- backend_instance = _get_backend(meta.package, meta.backend, meta.ttl)
491
+ backend_instance = manager.get_backend(meta.package, meta.backend, meta.ttl)
233
492
  cfg = get_config(meta.package)
234
493
 
235
494
  fn_name = getattr(fn, '__wrapped__', fn).__name__
@@ -240,13 +499,39 @@ def get_cache_info(fn: Callable[..., Any]) -> CacheInfo:
240
499
  return CacheInfo(hits=hits, misses=misses, currsize=currsize)
241
500
 
242
501
 
502
+ async def get_async_cache_info(fn: Callable[..., Any]) -> CacheInfo:
503
+ """Get cache statistics for an async decorated function.
504
+
505
+ Args:
506
+ fn: A function decorated with @async_cache
507
+
508
+ Returns
509
+ CacheInfo with hits, misses, and currsize
510
+ """
511
+ hits, misses = await async_manager.get_stats(fn)
512
+
513
+ meta = getattr(fn, '_cache_meta', None)
514
+ if meta is None:
515
+ return CacheInfo(hits=hits, misses=misses, currsize=0)
516
+
517
+ backend_instance = await async_manager.get_backend(meta.package, meta.backend, meta.ttl)
518
+ cfg = get_config(meta.package)
519
+
520
+ fn_name = getattr(fn, '__wrapped__', fn).__name__
521
+ pattern = f'*:{cfg.key_prefix}{fn_name}|*'
522
+
523
+ currsize = await backend_instance.count(pattern)
524
+
525
+ return CacheInfo(hits=hits, misses=misses, currsize=currsize)
526
+
527
+
243
528
  def clear_backends(package: str | None = None) -> None:
244
529
  """Clear all backend instances for a package. Primarily for testing.
245
530
  """
246
- with _backends_lock:
247
- if package is None:
248
- _backends.clear()
249
- else:
250
- keys_to_delete = [k for k in _backends if k[0] == package]
251
- for key in keys_to_delete:
252
- del _backends[key]
531
+ manager.clear(package)
532
+
533
+
534
+ async def clear_async_backends(package: str | None = None) -> None:
535
+ """Clear all async backend instances for a package. Primarily for testing.
536
+ """
537
+ await async_manager.clear(package)
cachu/keys.py CHANGED
@@ -35,6 +35,14 @@ def _normalize_tag(tag: str) -> str:
35
35
  return f'|{tag}|'
36
36
 
37
37
 
38
+ def _tag_to_pattern(tag: str | None) -> str | None:
39
+ """Convert tag to cache key pattern for clearing.
40
+ """
41
+ if not tag:
42
+ return None
43
+ return f'*{_normalize_tag(tag)}*'
44
+
45
+
38
46
  def make_key_generator(
39
47
  fn: Callable[..., Any],
40
48
  tag: str = '',