cachify 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cachify/__init__.py ADDED
@@ -0,0 +1,22 @@
1
+ from .features.never_die import clear_never_die_registry
2
+ from .memory_cache import cache
3
+ from .redis import DEFAULT_KEY_PREFIX, get_redis_config, reset_redis_config, setup_redis_config
4
+ from .redis_cache import redis_cache
5
+ from .types import CacheKwargs
6
+
7
+ __version__ = "0.1.0"
8
+
9
+ rcache = redis_cache
10
+
11
+ __all__ = [
12
+ "__version__",
13
+ "cache",
14
+ "rcache",
15
+ "redis_cache",
16
+ "setup_redis_config",
17
+ "get_redis_config",
18
+ "reset_redis_config",
19
+ "DEFAULT_KEY_PREFIX",
20
+ "CacheKwargs",
21
+ "clear_never_die_registry",
22
+ ]
cachify/cache.py ADDED
@@ -0,0 +1,116 @@
1
+ import functools
2
+ import inspect
3
+ from typing import Any, Callable, cast
4
+
5
+ from cachify.features.never_die import register_never_die_function
6
+ from cachify.types import CacheConfig, CacheKeyFunction, F, Number
7
+ from cachify.utils.arguments import create_cache_key
8
+
9
+
10
+ def _async_decorator(
11
+ function: F,
12
+ ttl: Number,
13
+ never_die: bool,
14
+ cache_key_func: CacheKeyFunction | None,
15
+ ignore_fields: tuple[str, ...],
16
+ config: CacheConfig,
17
+ ) -> F:
18
+ @functools.wraps(function)
19
+ async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
20
+ skip_cache = kwargs.pop("skip_cache", False)
21
+ cache_key = create_cache_key(function, cache_key_func, ignore_fields, args, kwargs)
22
+
23
+ if cache_entry := await config.storage.aget(cache_key, skip_cache):
24
+ return cache_entry.result
25
+
26
+ async with config.async_lock(cache_key):
27
+ if cache_entry := await config.storage.aget(cache_key, skip_cache):
28
+ return cache_entry.result
29
+
30
+ result = await function(*args, **kwargs)
31
+ await config.storage.aset(cache_key, result, None if never_die else ttl)
32
+
33
+ if never_die:
34
+ register_never_die_function(function, ttl, args, kwargs, cache_key_func, ignore_fields, config)
35
+
36
+ return result
37
+
38
+ return cast(F, async_wrapper)
39
+
40
+
41
+ def _sync_decorator(
42
+ function: F,
43
+ ttl: Number,
44
+ never_die: bool,
45
+ cache_key_func: CacheKeyFunction | None,
46
+ ignore_fields: tuple[str, ...],
47
+ config: CacheConfig,
48
+ ) -> F:
49
+ @functools.wraps(function)
50
+ def sync_wrapper(*args: Any, **kwargs: Any) -> Any:
51
+ skip_cache = kwargs.pop("skip_cache", False)
52
+ cache_key = create_cache_key(function, cache_key_func, ignore_fields, args, kwargs)
53
+
54
+ if cache_entry := config.storage.get(cache_key, skip_cache):
55
+ return cache_entry.result
56
+
57
+ with config.sync_lock(cache_key):
58
+ if cache_entry := config.storage.get(cache_key, skip_cache):
59
+ return cache_entry.result
60
+
61
+ result = function(*args, **kwargs)
62
+ config.storage.set(cache_key, result, None if never_die else ttl)
63
+
64
+ if never_die:
65
+ register_never_die_function(function, ttl, args, kwargs, cache_key_func, ignore_fields, config)
66
+
67
+ return result
68
+
69
+ return cast(F, sync_wrapper)
70
+
71
+
72
+ def base_cache(
73
+ ttl: Number,
74
+ never_die: bool,
75
+ cache_key_func: CacheKeyFunction | None,
76
+ ignore_fields: tuple[str, ...],
77
+ config: CacheConfig,
78
+ ) -> Callable[[F], F]:
79
+ """
80
+ Base cache decorator factory used by both memory and Redis cache implementations.
81
+
82
+ Args:
83
+ ttl: Time to live for cached items in seconds
84
+ never_die: If True, the cache will never expire and will be recalculated based on the ttl
85
+ cache_key_func: Custom cache key function, used for more complex cache scenarios
86
+ ignore_fields: Tuple of strings with the function params to ignore when creating the cache key
87
+ config: Cache configuration specifying storage, locks, and never_die registration
88
+
89
+ Features:
90
+ - Works for both sync and async functions
91
+ - Only allows one execution at a time per function+args
92
+ - Makes subsequent calls wait for the first call to complete
93
+ """
94
+ if cache_key_func and ignore_fields:
95
+ raise ValueError("Either cache_key_func or ignore_fields can be provided, but not both")
96
+
97
+ def decorator(function: F) -> F:
98
+ if inspect.iscoroutinefunction(function):
99
+ return _async_decorator(
100
+ function=function,
101
+ ttl=ttl,
102
+ never_die=never_die,
103
+ cache_key_func=cache_key_func,
104
+ ignore_fields=ignore_fields,
105
+ config=config,
106
+ )
107
+ return _sync_decorator(
108
+ function=function,
109
+ ttl=ttl,
110
+ never_die=never_die,
111
+ cache_key_func=cache_key_func,
112
+ ignore_fields=ignore_fields,
113
+ config=config,
114
+ )
115
+
116
+ return decorator
@@ -0,0 +1,4 @@
1
+ import logging
2
+
3
+ logger = logging.getLogger("cachify")
4
+ logger.addHandler(logging.NullHandler())
File without changes
@@ -0,0 +1,219 @@
1
+ import asyncio
2
+ import functools
3
+ import inspect
4
+ import threading
5
+ import time
6
+ from asyncio import AbstractEventLoop
7
+ from concurrent.futures import Future as ConcurrentFuture
8
+ from dataclasses import dataclass
9
+ from typing import Any, Callable
10
+
11
+ from cachify.config import logger
12
+ from cachify.types import CacheConfig, CacheKeyFunction, Number
13
+ from cachify.utils.arguments import create_cache_key
14
+
15
+ _NEVER_DIE_THREAD: threading.Thread | None = None
16
+ _NEVER_DIE_LOCK: threading.Lock = threading.Lock()
17
+ _NEVER_DIE_REGISTRY: list["NeverDieCacheEntry"] = []
18
+ _NEVER_DIE_CACHE_THREADS: dict[str, threading.Thread] = {}
19
+ _NEVER_DIE_CACHE_FUTURES: dict[str, ConcurrentFuture] = {}
20
+
21
+ _MAX_BACKOFF: int = 10
22
+ _BACKOFF_MULTIPLIER: float = 1.25
23
+ _REFRESH_INTERVAL_SECONDS: float = 0.1
24
+
25
+
26
+ @dataclass
27
+ class NeverDieCacheEntry:
28
+ function: Callable[..., Any]
29
+ ttl: Number
30
+ args: tuple
31
+ kwargs: dict
32
+ cache_key_func: CacheKeyFunction | None
33
+ ignore_fields: tuple[str, ...]
34
+ loop: AbstractEventLoop | None
35
+ config: CacheConfig
36
+
37
+ def __post_init__(self):
38
+ self._backoff: float = 1
39
+ self._expires_at: float = time.monotonic() + self.ttl
40
+
41
+ @functools.cached_property
42
+ def cache_key(self) -> str:
43
+ return create_cache_key(
44
+ self.function,
45
+ self.cache_key_func,
46
+ self.ignore_fields,
47
+ self.args,
48
+ self.kwargs,
49
+ )
50
+
51
+ def __eq__(self, other: Any) -> bool:
52
+ if not isinstance(other, NeverDieCacheEntry):
53
+ return False
54
+ return self.cache_key == other.cache_key
55
+
56
+ def __hash__(self) -> int:
57
+ return hash(self.cache_key)
58
+
59
+ def is_expired(self) -> bool:
60
+ return time.monotonic() > self._expires_at
61
+
62
+ def reset(self):
63
+ self._backoff = 1
64
+ self._expires_at = time.monotonic() + self.ttl
65
+
66
+ def revive(self):
67
+ self._backoff = min(self._backoff * _BACKOFF_MULTIPLIER, _MAX_BACKOFF)
68
+ self._expires_at = time.monotonic() + self.ttl * self._backoff
69
+
70
+
71
+ def _run_sync_function_and_cache(entry: NeverDieCacheEntry):
72
+ """Run a function and cache its result"""
73
+ try:
74
+ with entry.config.sync_lock(entry.cache_key):
75
+ result = entry.function(*entry.args, **entry.kwargs)
76
+ entry.config.storage.set(entry.cache_key, result, None)
77
+ entry.reset()
78
+ except BaseException:
79
+ entry.revive()
80
+ logger.debug(
81
+ "Exception caching function with never_die",
82
+ extra={"function": entry.function.__qualname__},
83
+ exc_info=True,
84
+ )
85
+
86
+
87
+ async def _run_async_function_and_cache(entry: NeverDieCacheEntry):
88
+ """Run a function and cache its result"""
89
+ try:
90
+ async with entry.config.async_lock(entry.cache_key):
91
+ result = await entry.function(*entry.args, **entry.kwargs)
92
+ await entry.config.storage.aset(entry.cache_key, result, None)
93
+ entry.reset()
94
+ except BaseException:
95
+ entry.revive()
96
+ logger.debug(
97
+ "Exception caching function with never_die",
98
+ extra={"function": entry.function.__qualname__},
99
+ exc_info=True,
100
+ )
101
+
102
+
103
+ def _cache_is_being_set(entry: NeverDieCacheEntry) -> bool:
104
+ if entry.loop:
105
+ return entry.cache_key in _NEVER_DIE_CACHE_FUTURES and not _NEVER_DIE_CACHE_FUTURES[entry.cache_key].done()
106
+ return entry.cache_key in _NEVER_DIE_CACHE_THREADS and _NEVER_DIE_CACHE_THREADS[entry.cache_key].is_alive()
107
+
108
+
109
+ def _clear_dead_futures():
110
+ """Clear dead futures from the cache future registry"""
111
+ for cache_key, thread in list(_NEVER_DIE_CACHE_FUTURES.items()):
112
+ if thread.done():
113
+ del _NEVER_DIE_CACHE_FUTURES[cache_key]
114
+
115
+
116
+ def _clear_dead_threads():
117
+ """Clear dead threads from the cache thread registry"""
118
+ for cache_key, thread in list(_NEVER_DIE_CACHE_THREADS.items()):
119
+ if thread.is_alive():
120
+ continue
121
+ del _NEVER_DIE_CACHE_THREADS[cache_key]
122
+
123
+
124
+ def _refresh_never_die_caches():
125
+ """Background thread function that periodically refreshes never_die cache entries"""
126
+ while True:
127
+ try:
128
+ for entry in list(_NEVER_DIE_REGISTRY):
129
+ if not entry.is_expired():
130
+ continue
131
+
132
+ if _cache_is_being_set(entry):
133
+ continue
134
+
135
+ if not entry.loop: # sync
136
+ thread = threading.Thread(target=_run_sync_function_and_cache, args=(entry,), daemon=True)
137
+ thread.start()
138
+ _NEVER_DIE_CACHE_THREADS[entry.cache_key] = thread
139
+ continue
140
+
141
+ if entry.loop.is_closed():
142
+ logger.debug(
143
+ f"Loop is closed, skipping future creation",
144
+ extra={"function": entry.function.__qualname__},
145
+ exc_info=True,
146
+ )
147
+ continue
148
+
149
+ try:
150
+ coroutine = _run_async_function_and_cache(entry)
151
+ future = asyncio.run_coroutine_threadsafe(coroutine, entry.loop)
152
+ except RuntimeError:
153
+ coroutine.close()
154
+ logger.debug(
155
+ f"Loop is closed, skipping future creation",
156
+ extra={"function": entry.function.__qualname__},
157
+ exc_info=True,
158
+ )
159
+ continue
160
+
161
+ _NEVER_DIE_CACHE_FUTURES[entry.cache_key] = future
162
+ finally:
163
+ time.sleep(_REFRESH_INTERVAL_SECONDS)
164
+ _clear_dead_futures()
165
+ _clear_dead_threads()
166
+
167
+
168
+ def _start_never_die_thread():
169
+ """Start the background thread if it's not already running"""
170
+ global _NEVER_DIE_THREAD
171
+ with _NEVER_DIE_LOCK:
172
+ if _NEVER_DIE_THREAD and _NEVER_DIE_THREAD.is_alive():
173
+ return
174
+
175
+ _NEVER_DIE_THREAD = threading.Thread(target=_refresh_never_die_caches, daemon=True)
176
+ _NEVER_DIE_THREAD.start()
177
+
178
+
179
+ def register_never_die_function(
180
+ function: Callable[..., Any],
181
+ ttl: Number,
182
+ args: tuple,
183
+ kwargs: dict,
184
+ cache_key_func: CacheKeyFunction | None,
185
+ ignore_fields: tuple[str, ...],
186
+ config: CacheConfig,
187
+ ):
188
+ """Register a function for never_die cache refreshing"""
189
+ is_async = inspect.iscoroutinefunction(function)
190
+
191
+ entry = NeverDieCacheEntry(
192
+ function=function,
193
+ ttl=ttl,
194
+ args=args,
195
+ kwargs=kwargs,
196
+ cache_key_func=cache_key_func,
197
+ ignore_fields=ignore_fields,
198
+ loop=asyncio.get_running_loop() if is_async else None,
199
+ config=config,
200
+ )
201
+
202
+ with _NEVER_DIE_LOCK:
203
+ if entry not in _NEVER_DIE_REGISTRY:
204
+ _NEVER_DIE_REGISTRY.append(entry)
205
+
206
+ _start_never_die_thread()
207
+
208
+
209
+ def clear_never_die_registry():
210
+ """
211
+ Clear all entries from the never_die registry.
212
+
213
+ Useful for testing to prevent background threads from
214
+ accessing resources that have been cleaned up.
215
+ """
216
+ with _NEVER_DIE_LOCK:
217
+ _NEVER_DIE_REGISTRY.clear()
218
+ _NEVER_DIE_CACHE_THREADS.clear()
219
+ _NEVER_DIE_CACHE_FUTURES.clear()
@@ -0,0 +1,37 @@
1
+ import threading
2
+ from typing import Callable
3
+
4
+ from cachify.cache import base_cache
5
+ from cachify.storage.memory_storage import MemoryStorage
6
+ from cachify.types import CacheConfig, CacheKeyFunction, F, Number
7
+ from cachify.utils.locks import ASYNC_LOCKS, SYNC_LOCKS
8
+
9
+ _CACHE_CLEAR_THREAD: threading.Thread | None = None
10
+ _CACHE_CLEAR_LOCK: threading.Lock = threading.Lock()
11
+
12
+ _MEMORY_CONFIG = CacheConfig(
13
+ storage=MemoryStorage,
14
+ sync_lock=lambda cache_key: SYNC_LOCKS[cache_key],
15
+ async_lock=lambda cache_key: ASYNC_LOCKS[cache_key],
16
+ )
17
+
18
+
19
+ def _start_cache_clear_thread():
20
+ """This is to avoid memory leaks by clearing expired cache items periodically."""
21
+ global _CACHE_CLEAR_THREAD
22
+ with _CACHE_CLEAR_LOCK:
23
+ if _CACHE_CLEAR_THREAD and _CACHE_CLEAR_THREAD.is_alive():
24
+ return
25
+ _CACHE_CLEAR_THREAD = threading.Thread(target=MemoryStorage.clear_expired_cached_items, daemon=True)
26
+ _CACHE_CLEAR_THREAD.start()
27
+
28
+
29
+ def cache(
30
+ ttl: Number = 300,
31
+ never_die: bool = False,
32
+ cache_key_func: CacheKeyFunction | None = None,
33
+ ignore_fields: tuple[str, ...] = (),
34
+ ) -> Callable[[F], F]:
35
+ """In-memory cache decorator. See `base_cache` for full documentation."""
36
+ _start_cache_clear_thread()
37
+ return base_cache(ttl, never_die, cache_key_func, ignore_fields, _MEMORY_CONFIG)
@@ -0,0 +1,19 @@
1
+ from cachify.redis.config import (
2
+ DEFAULT_KEY_PREFIX,
3
+ DEFAULT_LOCK_TIMEOUT,
4
+ RedisConfig,
5
+ get_redis_config,
6
+ reset_redis_config,
7
+ setup_redis_config,
8
+ )
9
+ from cachify.redis.lock import RedisLockManager
10
+
11
+ __all__ = [
12
+ "DEFAULT_KEY_PREFIX",
13
+ "DEFAULT_LOCK_TIMEOUT",
14
+ "RedisConfig",
15
+ "RedisLockManager",
16
+ "get_redis_config",
17
+ "reset_redis_config",
18
+ "setup_redis_config",
19
+ ]
@@ -0,0 +1,115 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+ from typing import TYPE_CHECKING, Literal, get_args, overload
5
+
6
+ if TYPE_CHECKING:
7
+ from redis import Redis
8
+ from redis.asyncio import Redis as AsyncRedis
9
+
10
+ OnErrorType = Literal["silent", "raise"]
11
+
12
+ DEFAULT_KEY_PREFIX = "cachify"
13
+ DEFAULT_LOCK_TIMEOUT = 10
14
+
15
+
16
+ @dataclass
17
+ class RedisConfig:
18
+ """Configuration for Redis cache backend."""
19
+
20
+ sync_client: Redis | None
21
+ async_client: AsyncRedis | None
22
+ key_prefix: str
23
+ lock_timeout: int
24
+ on_error: OnErrorType
25
+
26
+ @overload
27
+ def get_client(self, is_async: Literal[True]) -> AsyncRedis: ...
28
+
29
+ @overload
30
+ def get_client(self, is_async: Literal[False]) -> Redis: ...
31
+
32
+ def get_client(self, is_async: bool) -> Redis | AsyncRedis:
33
+ """Get the appropriate Redis client based on is_async flag."""
34
+ if is_async:
35
+ client = self.async_client
36
+ else:
37
+ client = self.sync_client
38
+
39
+ if not client:
40
+ mode = "async" if is_async else "sync"
41
+ raise RuntimeError(
42
+ f"Redis {mode} client not configured."
43
+ f"Provide {mode}_client in setup_redis_config() to use @redis_cache on {mode} functions."
44
+ )
45
+ return client
46
+
47
+
48
+ _redis_config: RedisConfig | None = None
49
+
50
+
51
+ def setup_redis_config(
52
+ sync_client: Redis | None = None,
53
+ async_client: AsyncRedis | None = None,
54
+ key_prefix: str = DEFAULT_KEY_PREFIX,
55
+ lock_timeout: int = DEFAULT_LOCK_TIMEOUT,
56
+ on_error: OnErrorType = "silent",
57
+ ):
58
+ """
59
+ Configure the Redis cache backend.
60
+
61
+ Must be called before using @redis_cache decorator.
62
+ Can only be called once. Use reset_redis_config() first if reconfiguration is needed.
63
+
64
+ Args:
65
+ sync_client: Redis sync client instance (redis.Redis)
66
+ async_client: Redis async client instance (redis.asyncio.Redis)
67
+ key_prefix: Prefix for all cache keys in Redis (default: "cache")
68
+ lock_timeout: Timeout in seconds for distributed locks (default: 10)
69
+ on_error: Error handling mode - "silent" treats errors as cache miss,
70
+ "raise" propagates exceptions (default: "silent")
71
+
72
+ Raises:
73
+ ValueError: If neither sync_client nor async_client is provided
74
+ RuntimeError: If called twice without reset_redis_config() first
75
+ """
76
+ global _redis_config
77
+
78
+ if _redis_config is not None:
79
+ raise RuntimeError("Redis config already set. Call reset_redis_config() first if you need to reconfigure.")
80
+
81
+ if sync_client is None and async_client is None:
82
+ raise ValueError("At least one of sync_client or async_client must be provided")
83
+
84
+ if on_error not in get_args(OnErrorType):
85
+ raise ValueError(f"on_error must be one of {get_args(OnErrorType)}")
86
+
87
+ _redis_config = RedisConfig(
88
+ sync_client=sync_client,
89
+ async_client=async_client,
90
+ key_prefix=key_prefix,
91
+ lock_timeout=lock_timeout,
92
+ on_error=on_error,
93
+ )
94
+
95
+
96
+ def get_redis_config() -> RedisConfig:
97
+ """
98
+ Get the current Redis configuration.
99
+
100
+ Raises:
101
+ RuntimeError: If setup_redis_config() has not been called
102
+ """
103
+ if _redis_config is None:
104
+ raise RuntimeError("Redis not configured. Call setup_redis_config() before using @redis_cache")
105
+ return _redis_config
106
+
107
+
108
+ def reset_redis_config():
109
+ """
110
+ Reset the Redis configuration to allow reconfiguration.
111
+
112
+ Useful for testing or when you need to change Redis connection settings.
113
+ """
114
+ global _redis_config
115
+ _redis_config = None