cachify 0.1.0__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cachify/redis/lock.py CHANGED
@@ -1,232 +1,232 @@
1
- import asyncio
2
- import contextlib
3
- import threading
4
- import time
5
- from contextlib import asynccontextmanager, contextmanager
6
- from dataclasses import dataclass, field
7
- from typing import AsyncIterator, Iterator, Literal, overload
8
-
9
- from redis.lock import Lock
10
- from redis.asyncio.lock import Lock as AsyncLock
11
-
12
- from cachify.config import logger
13
- from cachify.redis.config import get_redis_config
14
-
15
- HEARTBEAT_INTERVAL = 1
16
-
17
-
18
- @dataclass
19
- class _ActiveLockBase:
20
- """Base class for active lock tracking with shared logic."""
21
-
22
- timeout: float
23
- last_extended_at: float = field(default_factory=time.monotonic)
24
-
25
- def should_extend(self) -> bool:
26
- elapsed = time.monotonic() - self.last_extended_at
27
- return elapsed >= self.timeout / 2
28
-
29
- def mark_extended(self):
30
- self.last_extended_at = time.monotonic()
31
-
32
-
33
- @dataclass
34
- class _ActiveAsyncLock(_ActiveLockBase):
35
- """Tracks an async lock that needs heartbeat extension."""
36
-
37
- lock: AsyncLock = field(kw_only=True)
38
-
39
- async def extend(self) -> bool:
40
- try:
41
- await self.lock.extend(self.timeout)
42
- self.mark_extended()
43
- return True
44
- except Exception:
45
- return False
46
-
47
-
48
- @dataclass
49
- class _ActiveSyncLock(_ActiveLockBase):
50
- """Tracks a sync lock that needs heartbeat extension."""
51
-
52
- lock: Lock = field(kw_only=True)
53
-
54
- def extend(self) -> bool:
55
- try:
56
- self.lock.extend(self.timeout)
57
- self.mark_extended()
58
- return True
59
- except Exception:
60
- return False
61
-
62
-
63
- class _AsyncHeartbeatManager:
64
- """Manages heartbeat extensions for all async Redis locks."""
65
-
66
- _locks: dict[str, _ActiveAsyncLock] = {}
67
- _task: asyncio.Task | None = None
68
-
69
- @classmethod
70
- def register(cls, key: str, lock: AsyncLock, timeout: float):
71
- cls._locks[key] = _ActiveAsyncLock(timeout=timeout, lock=lock)
72
- cls._ensure_worker_running()
73
-
74
- @classmethod
75
- def unregister(cls, key: str):
76
- cls._locks.pop(key, None)
77
-
78
- @classmethod
79
- def reset(cls):
80
- """Cancel worker and clear state. Used for testing cleanup."""
81
- cls._locks.clear()
82
- if cls._task is not None and not cls._task.done():
83
- with contextlib.suppress(RuntimeError):
84
- cls._task.cancel()
85
- cls._task = None
86
-
87
- @classmethod
88
- def _ensure_worker_running(cls):
89
- if cls._task is None or cls._task.done():
90
- cls._task = asyncio.create_task(cls._worker())
91
-
92
- @classmethod
93
- async def _worker(cls):
94
- while True:
95
- await asyncio.sleep(HEARTBEAT_INTERVAL)
96
-
97
- if not cls._locks:
98
- cls._task = None
99
- return
100
-
101
- for key, active in list(cls._locks.items()):
102
- if not active.should_extend():
103
- continue
104
- if not await active.extend():
105
- logger.warning(f"Failed to extend lock, it may have expired", extra={"lock_key": key})
106
-
107
-
108
- class _SyncHeartbeatManager:
109
- """Manages heartbeat extensions for all sync Redis locks."""
110
-
111
- _locks: dict[str, _ActiveSyncLock] = {}
112
- _thread: threading.Thread | None = None
113
- _state_lock: threading.Lock = threading.Lock()
114
-
115
- @classmethod
116
- def register(cls, key: str, lock: Lock, timeout: float):
117
- with cls._state_lock:
118
- cls._locks[key] = _ActiveSyncLock(timeout=timeout, lock=lock)
119
- cls._ensure_worker_running()
120
-
121
- @classmethod
122
- def unregister(cls, key: str):
123
- cls._locks.pop(key, None)
124
-
125
- @classmethod
126
- def reset(cls):
127
- """Clear state. Used for testing cleanup. Thread exits on next iteration when _locks is empty."""
128
- cls._locks.clear()
129
- cls._thread = None
130
-
131
- @classmethod
132
- def _ensure_worker_running(cls):
133
- if cls._thread is None or not cls._thread.is_alive():
134
- cls._thread = threading.Thread(target=cls._worker, daemon=True)
135
- cls._thread.start()
136
-
137
- @classmethod
138
- def _worker(cls):
139
- while True:
140
- time.sleep(HEARTBEAT_INTERVAL)
141
-
142
- with cls._state_lock:
143
- if not cls._locks:
144
- cls._thread = None
145
- return
146
- locks_snapshot = list(cls._locks.items())
147
-
148
- for key, active in locks_snapshot:
149
- if not active.should_extend():
150
- continue
151
- if not active.extend():
152
- logger.warning(f"Failed to extend lock, it may have expired", extra={"lock_key": key})
153
-
154
-
155
- class RedisLockManager:
156
- """Distributed lock manager using Redis locks."""
157
-
158
- @classmethod
159
- def _make_lock_key(cls, cache_key: str) -> str:
160
- """Create a Redis lock key."""
161
- config = get_redis_config()
162
- return f"{config.key_prefix}:lock:{cache_key}"
163
-
164
- @overload
165
- @classmethod
166
- def _get_lock(cls, cache_key: str, is_async: Literal[True]) -> AsyncLock: ...
167
-
168
- @overload
169
- @classmethod
170
- def _get_lock(cls, cache_key: str, is_async: Literal[False]) -> Lock: ...
171
-
172
- @classmethod
173
- def _get_lock(cls, cache_key: str, is_async: bool) -> Lock | AsyncLock:
174
- """Get client and create lock."""
175
- config = get_redis_config()
176
- client = config.get_client(is_async)
177
- lock_key = cls._make_lock_key(cache_key)
178
- return client.lock(
179
- lock_key,
180
- timeout=config.lock_timeout,
181
- blocking=True,
182
- blocking_timeout=None,
183
- thread_local=False, # Required for heartbeat extension from background thread
184
- )
185
-
186
- @classmethod
187
- @contextmanager
188
- def sync_lock(cls, cache_key: str) -> Iterator[None]:
189
- """
190
- Acquire a distributed lock for sync operations.
191
-
192
- Uses Redis lock with blocking behavior - waits for lock holder to finish.
193
- Lock is automatically extended via heartbeat to prevent expiration during long operations.
194
- """
195
- config = get_redis_config()
196
- lock = cls._get_lock(cache_key, is_async=False)
197
- acquired = False
198
-
199
- try:
200
- acquired = lock.acquire()
201
- if acquired:
202
- _SyncHeartbeatManager.register(lock.name, lock, config.lock_timeout)
203
- yield
204
- finally:
205
- if acquired:
206
- _SyncHeartbeatManager.unregister(lock.name)
207
- with contextlib.suppress(Exception):
208
- lock.release()
209
-
210
- @classmethod
211
- @asynccontextmanager
212
- async def async_lock(cls, cache_key: str) -> AsyncIterator[None]:
213
- """
214
- Acquire a distributed lock for async operations.
215
-
216
- Uses Redis lock with blocking behavior - waits for lock holder to finish.
217
- Lock is automatically extended via heartbeat to prevent expiration during long operations.
218
- """
219
- config = get_redis_config()
220
- lock = cls._get_lock(cache_key, is_async=True)
221
- acquired = False
222
-
223
- try:
224
- acquired = await lock.acquire()
225
- if acquired:
226
- _AsyncHeartbeatManager.register(lock.name, lock, config.lock_timeout) # type: ignore
227
- yield
228
- finally:
229
- if acquired:
230
- _AsyncHeartbeatManager.unregister(lock.name) # type: ignore
231
- with contextlib.suppress(Exception):
232
- await lock.release()
1
+ import asyncio
2
+ import contextlib
3
+ import threading
4
+ import time
5
+ from contextlib import asynccontextmanager, contextmanager
6
+ from dataclasses import dataclass, field
7
+ from typing import AsyncIterator, Iterator, Literal, overload
8
+
9
+ from redis.lock import Lock
10
+ from redis.asyncio.lock import Lock as AsyncLock
11
+
12
+ from cachify.config import logger
13
+ from cachify.redis.config import get_redis_config
14
+
15
+ HEARTBEAT_INTERVAL = 1
16
+
17
+
18
+ @dataclass
19
+ class _ActiveLockBase:
20
+ """Base class for active lock tracking with shared logic."""
21
+
22
+ timeout: float
23
+ last_extended_at: float = field(default_factory=time.monotonic)
24
+
25
+ def should_extend(self) -> bool:
26
+ elapsed = time.monotonic() - self.last_extended_at
27
+ return elapsed >= self.timeout / 2
28
+
29
+ def mark_extended(self):
30
+ self.last_extended_at = time.monotonic()
31
+
32
+
33
+ @dataclass
34
+ class _ActiveAsyncLock(_ActiveLockBase):
35
+ """Tracks an async lock that needs heartbeat extension."""
36
+
37
+ lock: AsyncLock = field(kw_only=True)
38
+
39
+ async def extend(self) -> bool:
40
+ try:
41
+ await self.lock.extend(self.timeout)
42
+ self.mark_extended()
43
+ return True
44
+ except Exception:
45
+ return False
46
+
47
+
48
+ @dataclass
49
+ class _ActiveSyncLock(_ActiveLockBase):
50
+ """Tracks a sync lock that needs heartbeat extension."""
51
+
52
+ lock: Lock = field(kw_only=True)
53
+
54
+ def extend(self) -> bool:
55
+ try:
56
+ self.lock.extend(self.timeout)
57
+ self.mark_extended()
58
+ return True
59
+ except Exception:
60
+ return False
61
+
62
+
63
+ class _AsyncHeartbeatManager:
64
+ """Manages heartbeat extensions for all async Redis locks."""
65
+
66
+ _locks: dict[str, _ActiveAsyncLock] = {}
67
+ _task: asyncio.Task | None = None
68
+
69
+ @classmethod
70
+ def register(cls, key: str, lock: AsyncLock, timeout: float):
71
+ cls._locks[key] = _ActiveAsyncLock(timeout=timeout, lock=lock)
72
+ cls._ensure_worker_running()
73
+
74
+ @classmethod
75
+ def unregister(cls, key: str):
76
+ cls._locks.pop(key, None)
77
+
78
+ @classmethod
79
+ def reset(cls):
80
+ """Cancel worker and clear state. Used for testing cleanup."""
81
+ cls._locks.clear()
82
+ if cls._task is not None and not cls._task.done():
83
+ with contextlib.suppress(RuntimeError):
84
+ cls._task.cancel()
85
+ cls._task = None
86
+
87
+ @classmethod
88
+ def _ensure_worker_running(cls):
89
+ if cls._task is None or cls._task.done():
90
+ cls._task = asyncio.create_task(cls._worker())
91
+
92
+ @classmethod
93
+ async def _worker(cls):
94
+ while True:
95
+ await asyncio.sleep(HEARTBEAT_INTERVAL)
96
+
97
+ if not cls._locks:
98
+ cls._task = None
99
+ return
100
+
101
+ for key, active in list(cls._locks.items()):
102
+ if not active.should_extend():
103
+ continue
104
+ if not await active.extend():
105
+ logger.warning(f"Failed to extend lock, it may have expired", extra={"lock_key": key})
106
+
107
+
108
+ class _SyncHeartbeatManager:
109
+ """Manages heartbeat extensions for all sync Redis locks."""
110
+
111
+ _locks: dict[str, _ActiveSyncLock] = {}
112
+ _thread: threading.Thread | None = None
113
+ _state_lock: threading.Lock = threading.Lock()
114
+
115
+ @classmethod
116
+ def register(cls, key: str, lock: Lock, timeout: float):
117
+ with cls._state_lock:
118
+ cls._locks[key] = _ActiveSyncLock(timeout=timeout, lock=lock)
119
+ cls._ensure_worker_running()
120
+
121
+ @classmethod
122
+ def unregister(cls, key: str):
123
+ cls._locks.pop(key, None)
124
+
125
+ @classmethod
126
+ def reset(cls):
127
+ """Clear state. Used for testing cleanup. Thread exits on next iteration when _locks is empty."""
128
+ cls._locks.clear()
129
+ cls._thread = None
130
+
131
+ @classmethod
132
+ def _ensure_worker_running(cls):
133
+ if cls._thread is None or not cls._thread.is_alive():
134
+ cls._thread = threading.Thread(target=cls._worker, daemon=True)
135
+ cls._thread.start()
136
+
137
+ @classmethod
138
+ def _worker(cls):
139
+ while True:
140
+ time.sleep(HEARTBEAT_INTERVAL)
141
+
142
+ with cls._state_lock:
143
+ if not cls._locks:
144
+ cls._thread = None
145
+ return
146
+ locks_snapshot = list(cls._locks.items())
147
+
148
+ for key, active in locks_snapshot:
149
+ if not active.should_extend():
150
+ continue
151
+ if not active.extend():
152
+ logger.warning(f"Failed to extend lock, it may have expired", extra={"lock_key": key})
153
+
154
+
155
+ class RedisLockManager:
156
+ """Distributed lock manager using Redis locks."""
157
+
158
+ @classmethod
159
+ def _make_lock_key(cls, cache_key: str) -> str:
160
+ """Create a Redis lock key."""
161
+ config = get_redis_config()
162
+ return f"{config.key_prefix}:lock:{cache_key}"
163
+
164
+ @overload
165
+ @classmethod
166
+ def _get_lock(cls, cache_key: str, is_async: Literal[True]) -> AsyncLock: ...
167
+
168
+ @overload
169
+ @classmethod
170
+ def _get_lock(cls, cache_key: str, is_async: Literal[False]) -> Lock: ...
171
+
172
+ @classmethod
173
+ def _get_lock(cls, cache_key: str, is_async: bool) -> Lock | AsyncLock:
174
+ """Get client and create lock."""
175
+ config = get_redis_config()
176
+ client = config.get_client(is_async)
177
+ lock_key = cls._make_lock_key(cache_key)
178
+ return client.lock(
179
+ lock_key,
180
+ timeout=config.lock_timeout,
181
+ blocking=True,
182
+ blocking_timeout=None,
183
+ thread_local=False, # Required for heartbeat extension from background thread
184
+ )
185
+
186
+ @classmethod
187
+ @contextmanager
188
+ def sync_lock(cls, cache_key: str) -> Iterator[None]:
189
+ """
190
+ Acquire a distributed lock for sync operations.
191
+
192
+ Uses Redis lock with blocking behavior - waits for lock holder to finish.
193
+ Lock is automatically extended via heartbeat to prevent expiration during long operations.
194
+ """
195
+ config = get_redis_config()
196
+ lock = cls._get_lock(cache_key, is_async=False)
197
+ acquired = False
198
+
199
+ try:
200
+ acquired = lock.acquire()
201
+ if acquired:
202
+ _SyncHeartbeatManager.register(lock.name, lock, config.lock_timeout)
203
+ yield
204
+ finally:
205
+ if acquired:
206
+ _SyncHeartbeatManager.unregister(lock.name)
207
+ with contextlib.suppress(Exception):
208
+ lock.release()
209
+
210
+ @classmethod
211
+ @asynccontextmanager
212
+ async def async_lock(cls, cache_key: str) -> AsyncIterator[None]:
213
+ """
214
+ Acquire a distributed lock for async operations.
215
+
216
+ Uses Redis lock with blocking behavior - waits for lock holder to finish.
217
+ Lock is automatically extended via heartbeat to prevent expiration during long operations.
218
+ """
219
+ config = get_redis_config()
220
+ lock = cls._get_lock(cache_key, is_async=True)
221
+ acquired = False
222
+
223
+ try:
224
+ acquired = await lock.acquire()
225
+ if acquired:
226
+ _AsyncHeartbeatManager.register(lock.name, lock, config.lock_timeout) # type: ignore
227
+ yield
228
+ finally:
229
+ if acquired:
230
+ _AsyncHeartbeatManager.unregister(lock.name) # type: ignore
231
+ with contextlib.suppress(Exception):
232
+ await lock.release()
cachify/redis_cache.py CHANGED
@@ -1,27 +1,27 @@
1
- from typing import Callable
2
-
3
- from cachify.cache import base_cache
4
- from cachify.redis.lock import RedisLockManager
5
- from cachify.storage.redis_storage import RedisStorage
6
- from cachify.types import CacheConfig, CacheKeyFunction, F, Number
7
-
8
- _REDIS_CONFIG = CacheConfig(
9
- storage=RedisStorage,
10
- sync_lock=RedisLockManager.sync_lock,
11
- async_lock=RedisLockManager.async_lock,
12
- )
13
-
14
-
15
- def redis_cache(
16
- ttl: Number = 300,
17
- never_die: bool = False,
18
- cache_key_func: CacheKeyFunction | None = None,
19
- ignore_fields: tuple[str, ...] = (),
20
- ) -> Callable[[F], F]:
21
- """
22
- Redis cache decorator. See `base_cache` for full documentation.
23
-
24
- Requires setup_redis_config() to be called before use.
25
- Uses Redis for distributed caching across multiple processes/machines.
26
- """
27
- return base_cache(ttl, never_die, cache_key_func, ignore_fields, _REDIS_CONFIG)
1
+ from typing import Callable
2
+
3
+ from cachify.cache import base_cache
4
+ from cachify.redis.lock import RedisLockManager
5
+ from cachify.storage.redis_storage import RedisStorage
6
+ from cachify.types import CacheConfig, CacheKeyFunction, F, Number
7
+
8
+ _REDIS_CONFIG = CacheConfig(
9
+ storage=RedisStorage,
10
+ sync_lock=RedisLockManager.sync_lock,
11
+ async_lock=RedisLockManager.async_lock,
12
+ )
13
+
14
+
15
+ def redis_cache(
16
+ ttl: Number = 300,
17
+ never_die: bool = False,
18
+ cache_key_func: CacheKeyFunction | None = None,
19
+ ignore_fields: tuple[str, ...] = (),
20
+ ) -> Callable[[F], F]:
21
+ """
22
+ Redis cache decorator. See `base_cache` for full documentation.
23
+
24
+ Requires setup_redis_config() to be called before use.
25
+ Uses Redis for distributed caching across multiple processes/machines.
26
+ """
27
+ return base_cache(ttl, never_die, cache_key_func, ignore_fields, _REDIS_CONFIG)
@@ -1,9 +1,9 @@
1
- from cachify.storage.memory_storage import MemoryCacheEntry, MemoryStorage
2
- from cachify.storage.redis_storage import RedisCacheEntry, RedisStorage
3
-
4
- __all__ = [
5
- "MemoryCacheEntry",
6
- "MemoryStorage",
7
- "RedisCacheEntry",
8
- "RedisStorage",
9
- ]
1
+ from cachify.storage.memory_storage import MemoryCacheEntry, MemoryStorage
2
+ from cachify.storage.redis_storage import RedisCacheEntry, RedisStorage
3
+
4
+ __all__ = [
5
+ "MemoryCacheEntry",
6
+ "MemoryStorage",
7
+ "RedisCacheEntry",
8
+ "RedisStorage",
9
+ ]
@@ -1,52 +1,52 @@
1
- import contextlib
2
- import time
3
- from typing import Any
4
-
5
- from cachify.types import CacheEntry, Number
6
-
7
- _CACHE_CLEAR_INTERVAL_SECONDS: int = 10
8
-
9
-
10
- class MemoryCacheEntry(CacheEntry): ...
11
-
12
-
13
- class MemoryStorage:
14
- """In-memory cache storage implementing CacheStorage protocol."""
15
-
16
- _CACHE: dict[str, MemoryCacheEntry] = {}
17
-
18
- @classmethod
19
- def clear_expired_cached_items(cls):
20
- """Clear expired cached items from the cache."""
21
- while True:
22
- with contextlib.suppress(Exception):
23
- for key, entry in list(cls._CACHE.items()):
24
- if entry.is_expired():
25
- del cls._CACHE[key]
26
-
27
- time.sleep(_CACHE_CLEAR_INTERVAL_SECONDS)
28
-
29
- @classmethod
30
- def set(cls, cache_key: str, result: Any, ttl: Number | None):
31
- cls._CACHE[cache_key] = MemoryCacheEntry(result, ttl)
32
-
33
- @classmethod
34
- def get(cls, cache_key: str, skip_cache: bool) -> MemoryCacheEntry | None:
35
- if skip_cache:
36
- return None
37
- if entry := cls._CACHE.get(cache_key):
38
- if not entry.is_expired():
39
- return entry
40
- return None
41
-
42
- @classmethod
43
- async def aset(cls, cache_key: str, result: Any, ttl: Number | None):
44
- cls.set(cache_key, result, ttl)
45
-
46
- @classmethod
47
- async def aget(cls, cache_key: str, skip_cache: bool) -> MemoryCacheEntry | None:
48
- return cls.get(cache_key, skip_cache)
49
-
50
- @classmethod
51
- def clear(cls):
52
- cls._CACHE.clear()
1
+ import contextlib
2
+ import time
3
+ from typing import Any
4
+
5
+ from cachify.types import CacheEntry, Number
6
+
7
+ _CACHE_CLEAR_INTERVAL_SECONDS: int = 10
8
+
9
+
10
+ class MemoryCacheEntry(CacheEntry): ...
11
+
12
+
13
+ class MemoryStorage:
14
+ """In-memory cache storage implementing CacheStorage protocol."""
15
+
16
+ _CACHE: dict[str, MemoryCacheEntry] = {}
17
+
18
+ @classmethod
19
+ def clear_expired_cached_items(cls):
20
+ """Clear expired cached items from the cache."""
21
+ while True:
22
+ with contextlib.suppress(Exception):
23
+ for key, entry in list(cls._CACHE.items()):
24
+ if entry.is_expired():
25
+ del cls._CACHE[key]
26
+
27
+ time.sleep(_CACHE_CLEAR_INTERVAL_SECONDS)
28
+
29
+ @classmethod
30
+ def set(cls, cache_key: str, result: Any, ttl: Number | None):
31
+ cls._CACHE[cache_key] = MemoryCacheEntry(result, ttl)
32
+
33
+ @classmethod
34
+ def get(cls, cache_key: str, skip_cache: bool) -> MemoryCacheEntry | None:
35
+ if skip_cache:
36
+ return None
37
+ if entry := cls._CACHE.get(cache_key):
38
+ if not entry.is_expired():
39
+ return entry
40
+ return None
41
+
42
+ @classmethod
43
+ async def aset(cls, cache_key: str, result: Any, ttl: Number | None):
44
+ cls.set(cache_key, result, ttl)
45
+
46
+ @classmethod
47
+ async def aget(cls, cache_key: str, skip_cache: bool) -> MemoryCacheEntry | None:
48
+ return cls.get(cache_key, skip_cache)
49
+
50
+ @classmethod
51
+ def clear(cls):
52
+ cls._CACHE.clear()