cachify 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cachify/redis/lock.py ADDED
@@ -0,0 +1,232 @@
1
+ import asyncio
2
+ import contextlib
3
+ import threading
4
+ import time
5
+ from contextlib import asynccontextmanager, contextmanager
6
+ from dataclasses import dataclass, field
7
+ from typing import AsyncIterator, Iterator, Literal, overload
8
+
9
+ from redis.lock import Lock
10
+ from redis.asyncio.lock import Lock as AsyncLock
11
+
12
+ from cachify.config import logger
13
+ from cachify.redis.config import get_redis_config
14
+
15
+ HEARTBEAT_INTERVAL = 1
16
+
17
+
18
+ @dataclass
19
+ class _ActiveLockBase:
20
+ """Base class for active lock tracking with shared logic."""
21
+
22
+ timeout: float
23
+ last_extended_at: float = field(default_factory=time.monotonic)
24
+
25
+ def should_extend(self) -> bool:
26
+ elapsed = time.monotonic() - self.last_extended_at
27
+ return elapsed >= self.timeout / 2
28
+
29
+ def mark_extended(self):
30
+ self.last_extended_at = time.monotonic()
31
+
32
+
33
+ @dataclass
34
+ class _ActiveAsyncLock(_ActiveLockBase):
35
+ """Tracks an async lock that needs heartbeat extension."""
36
+
37
+ lock: AsyncLock = field(kw_only=True)
38
+
39
+ async def extend(self) -> bool:
40
+ try:
41
+ await self.lock.extend(self.timeout)
42
+ self.mark_extended()
43
+ return True
44
+ except Exception:
45
+ return False
46
+
47
+
48
+ @dataclass
49
+ class _ActiveSyncLock(_ActiveLockBase):
50
+ """Tracks a sync lock that needs heartbeat extension."""
51
+
52
+ lock: Lock = field(kw_only=True)
53
+
54
+ def extend(self) -> bool:
55
+ try:
56
+ self.lock.extend(self.timeout)
57
+ self.mark_extended()
58
+ return True
59
+ except Exception:
60
+ return False
61
+
62
+
63
+ class _AsyncHeartbeatManager:
64
+ """Manages heartbeat extensions for all async Redis locks."""
65
+
66
+ _locks: dict[str, _ActiveAsyncLock] = {}
67
+ _task: asyncio.Task | None = None
68
+
69
+ @classmethod
70
+ def register(cls, key: str, lock: AsyncLock, timeout: float):
71
+ cls._locks[key] = _ActiveAsyncLock(timeout=timeout, lock=lock)
72
+ cls._ensure_worker_running()
73
+
74
+ @classmethod
75
+ def unregister(cls, key: str):
76
+ cls._locks.pop(key, None)
77
+
78
+ @classmethod
79
+ def reset(cls):
80
+ """Cancel worker and clear state. Used for testing cleanup."""
81
+ cls._locks.clear()
82
+ if cls._task is not None and not cls._task.done():
83
+ with contextlib.suppress(RuntimeError):
84
+ cls._task.cancel()
85
+ cls._task = None
86
+
87
+ @classmethod
88
+ def _ensure_worker_running(cls):
89
+ if cls._task is None or cls._task.done():
90
+ cls._task = asyncio.create_task(cls._worker())
91
+
92
+ @classmethod
93
+ async def _worker(cls):
94
+ while True:
95
+ await asyncio.sleep(HEARTBEAT_INTERVAL)
96
+
97
+ if not cls._locks:
98
+ cls._task = None
99
+ return
100
+
101
+ for key, active in list(cls._locks.items()):
102
+ if not active.should_extend():
103
+ continue
104
+ if not await active.extend():
105
+ logger.warning(f"Failed to extend lock, it may have expired", extra={"lock_key": key})
106
+
107
+
108
+ class _SyncHeartbeatManager:
109
+ """Manages heartbeat extensions for all sync Redis locks."""
110
+
111
+ _locks: dict[str, _ActiveSyncLock] = {}
112
+ _thread: threading.Thread | None = None
113
+ _state_lock: threading.Lock = threading.Lock()
114
+
115
+ @classmethod
116
+ def register(cls, key: str, lock: Lock, timeout: float):
117
+ with cls._state_lock:
118
+ cls._locks[key] = _ActiveSyncLock(timeout=timeout, lock=lock)
119
+ cls._ensure_worker_running()
120
+
121
+ @classmethod
122
+ def unregister(cls, key: str):
123
+ cls._locks.pop(key, None)
124
+
125
+ @classmethod
126
+ def reset(cls):
127
+ """Clear state. Used for testing cleanup. Thread exits on next iteration when _locks is empty."""
128
+ cls._locks.clear()
129
+ cls._thread = None
130
+
131
+ @classmethod
132
+ def _ensure_worker_running(cls):
133
+ if cls._thread is None or not cls._thread.is_alive():
134
+ cls._thread = threading.Thread(target=cls._worker, daemon=True)
135
+ cls._thread.start()
136
+
137
+ @classmethod
138
+ def _worker(cls):
139
+ while True:
140
+ time.sleep(HEARTBEAT_INTERVAL)
141
+
142
+ with cls._state_lock:
143
+ if not cls._locks:
144
+ cls._thread = None
145
+ return
146
+ locks_snapshot = list(cls._locks.items())
147
+
148
+ for key, active in locks_snapshot:
149
+ if not active.should_extend():
150
+ continue
151
+ if not active.extend():
152
+ logger.warning(f"Failed to extend lock, it may have expired", extra={"lock_key": key})
153
+
154
+
155
+ class RedisLockManager:
156
+ """Distributed lock manager using Redis locks."""
157
+
158
+ @classmethod
159
+ def _make_lock_key(cls, cache_key: str) -> str:
160
+ """Create a Redis lock key."""
161
+ config = get_redis_config()
162
+ return f"{config.key_prefix}:lock:{cache_key}"
163
+
164
+ @overload
165
+ @classmethod
166
+ def _get_lock(cls, cache_key: str, is_async: Literal[True]) -> AsyncLock: ...
167
+
168
+ @overload
169
+ @classmethod
170
+ def _get_lock(cls, cache_key: str, is_async: Literal[False]) -> Lock: ...
171
+
172
+ @classmethod
173
+ def _get_lock(cls, cache_key: str, is_async: bool) -> Lock | AsyncLock:
174
+ """Get client and create lock."""
175
+ config = get_redis_config()
176
+ client = config.get_client(is_async)
177
+ lock_key = cls._make_lock_key(cache_key)
178
+ return client.lock(
179
+ lock_key,
180
+ timeout=config.lock_timeout,
181
+ blocking=True,
182
+ blocking_timeout=None,
183
+ thread_local=False, # Required for heartbeat extension from background thread
184
+ )
185
+
186
+ @classmethod
187
+ @contextmanager
188
+ def sync_lock(cls, cache_key: str) -> Iterator[None]:
189
+ """
190
+ Acquire a distributed lock for sync operations.
191
+
192
+ Uses Redis lock with blocking behavior - waits for lock holder to finish.
193
+ Lock is automatically extended via heartbeat to prevent expiration during long operations.
194
+ """
195
+ config = get_redis_config()
196
+ lock = cls._get_lock(cache_key, is_async=False)
197
+ acquired = False
198
+
199
+ try:
200
+ acquired = lock.acquire()
201
+ if acquired:
202
+ _SyncHeartbeatManager.register(lock.name, lock, config.lock_timeout)
203
+ yield
204
+ finally:
205
+ if acquired:
206
+ _SyncHeartbeatManager.unregister(lock.name)
207
+ with contextlib.suppress(Exception):
208
+ lock.release()
209
+
210
+ @classmethod
211
+ @asynccontextmanager
212
+ async def async_lock(cls, cache_key: str) -> AsyncIterator[None]:
213
+ """
214
+ Acquire a distributed lock for async operations.
215
+
216
+ Uses Redis lock with blocking behavior - waits for lock holder to finish.
217
+ Lock is automatically extended via heartbeat to prevent expiration during long operations.
218
+ """
219
+ config = get_redis_config()
220
+ lock = cls._get_lock(cache_key, is_async=True)
221
+ acquired = False
222
+
223
+ try:
224
+ acquired = await lock.acquire()
225
+ if acquired:
226
+ _AsyncHeartbeatManager.register(lock.name, lock, config.lock_timeout) # type: ignore
227
+ yield
228
+ finally:
229
+ if acquired:
230
+ _AsyncHeartbeatManager.unregister(lock.name) # type: ignore
231
+ with contextlib.suppress(Exception):
232
+ await lock.release()
cachify/redis_cache.py ADDED
@@ -0,0 +1,27 @@
1
+ from typing import Callable
2
+
3
+ from cachify.cache import base_cache
4
+ from cachify.redis.lock import RedisLockManager
5
+ from cachify.storage.redis_storage import RedisStorage
6
+ from cachify.types import CacheConfig, CacheKeyFunction, F, Number
7
+
8
+ _REDIS_CONFIG = CacheConfig(
9
+ storage=RedisStorage,
10
+ sync_lock=RedisLockManager.sync_lock,
11
+ async_lock=RedisLockManager.async_lock,
12
+ )
13
+
14
+
15
+ def redis_cache(
16
+ ttl: Number = 300,
17
+ never_die: bool = False,
18
+ cache_key_func: CacheKeyFunction | None = None,
19
+ ignore_fields: tuple[str, ...] = (),
20
+ ) -> Callable[[F], F]:
21
+ """
22
+ Redis cache decorator. See `base_cache` for full documentation.
23
+
24
+ Requires setup_redis_config() to be called before use.
25
+ Uses Redis for distributed caching across multiple processes/machines.
26
+ """
27
+ return base_cache(ttl, never_die, cache_key_func, ignore_fields, _REDIS_CONFIG)
@@ -0,0 +1,9 @@
1
+ from cachify.storage.memory_storage import MemoryCacheEntry, MemoryStorage
2
+ from cachify.storage.redis_storage import RedisCacheEntry, RedisStorage
3
+
4
+ __all__ = [
5
+ "MemoryCacheEntry",
6
+ "MemoryStorage",
7
+ "RedisCacheEntry",
8
+ "RedisStorage",
9
+ ]
@@ -0,0 +1,52 @@
1
+ import contextlib
2
+ import time
3
+ from typing import Any
4
+
5
+ from cachify.types import CacheEntry, Number
6
+
7
+ _CACHE_CLEAR_INTERVAL_SECONDS: int = 10
8
+
9
+
10
+ class MemoryCacheEntry(CacheEntry): ...
11
+
12
+
13
+ class MemoryStorage:
14
+ """In-memory cache storage implementing CacheStorage protocol."""
15
+
16
+ _CACHE: dict[str, MemoryCacheEntry] = {}
17
+
18
+ @classmethod
19
+ def clear_expired_cached_items(cls):
20
+ """Clear expired cached items from the cache."""
21
+ while True:
22
+ with contextlib.suppress(Exception):
23
+ for key, entry in list(cls._CACHE.items()):
24
+ if entry.is_expired():
25
+ del cls._CACHE[key]
26
+
27
+ time.sleep(_CACHE_CLEAR_INTERVAL_SECONDS)
28
+
29
+ @classmethod
30
+ def set(cls, cache_key: str, result: Any, ttl: Number | None):
31
+ cls._CACHE[cache_key] = MemoryCacheEntry(result, ttl)
32
+
33
+ @classmethod
34
+ def get(cls, cache_key: str, skip_cache: bool) -> MemoryCacheEntry | None:
35
+ if skip_cache:
36
+ return None
37
+ if entry := cls._CACHE.get(cache_key):
38
+ if not entry.is_expired():
39
+ return entry
40
+ return None
41
+
42
+ @classmethod
43
+ async def aset(cls, cache_key: str, result: Any, ttl: Number | None):
44
+ cls.set(cache_key, result, ttl)
45
+
46
+ @classmethod
47
+ async def aget(cls, cache_key: str, skip_cache: bool) -> MemoryCacheEntry | None:
48
+ return cls.get(cache_key, skip_cache)
49
+
50
+ @classmethod
51
+ def clear(cls):
52
+ cls._CACHE.clear()
@@ -0,0 +1,138 @@
1
+ import pickle
2
+ import time
3
+ from typing import Any, overload
4
+
5
+ from cachify.redis.config import get_redis_config
6
+ from cachify.config import logger
7
+ from cachify.types import CacheEntry, Number
8
+
9
+
10
+ class RedisCacheEntry(CacheEntry):
11
+ @classmethod
12
+ def time(cls) -> float:
13
+ return time.time()
14
+
15
+
16
+ class RedisStorage:
17
+ """Redis cache storage implementing CacheStorage protocol."""
18
+
19
+ @classmethod
20
+ def _make_key(cls, cache_key: str) -> str:
21
+ """Create a Redis key from cache_key."""
22
+ config = get_redis_config()
23
+ return f"{config.key_prefix}:{cache_key}"
24
+
25
+ @classmethod
26
+ def _serialize(cls, entry: RedisCacheEntry) -> bytes:
27
+ """Serialize a cache entry to bytes."""
28
+ try:
29
+ return pickle.dumps(entry, protocol=pickle.HIGHEST_PROTOCOL)
30
+ except (pickle.PicklingError, TypeError, AttributeError) as exc:
31
+ raise TypeError(
32
+ f"Failed to serialize cache entry. Object of type {type(entry.result).__name__} "
33
+ f"cannot be pickled. Ensure the cached result is serializable."
34
+ ) from exc
35
+
36
+ @classmethod
37
+ def _deserialize(cls, data: bytes) -> RedisCacheEntry:
38
+ """Deserialize bytes to a cache entry."""
39
+ return pickle.loads(data)
40
+
41
+ @overload
42
+ @classmethod
43
+ def _prepare_set(cls, cache_key: str, result: Any, ttl: None) -> tuple[str, bytes, None]: ...
44
+
45
+ @overload
46
+ @classmethod
47
+ def _prepare_set(cls, cache_key: str, result: Any, ttl: Number) -> tuple[str, bytes, int]: ...
48
+
49
+ @classmethod
50
+ def _prepare_set(cls, cache_key: str, result: Any, ttl: Number | None) -> tuple[str, bytes, int | None]:
51
+ """Prepare key, data, and expiry in milliseconds for set operations."""
52
+ key = cls._make_key(cache_key)
53
+ data = cls._serialize(RedisCacheEntry(result, ttl))
54
+ if ttl is None:
55
+ return key, data, None
56
+
57
+ return key, data, int(ttl * 1000)
58
+
59
+ @classmethod
60
+ def _handle_error(cls, exc: Exception, operation: str, cache_key: str):
61
+ """Handle Redis errors based on config."""
62
+ config = get_redis_config()
63
+ if config.on_error == "raise":
64
+ raise
65
+
66
+ logger.debug(f"Redis operation error: {exc}", extra={"operation": operation, "cache_key": cache_key})
67
+
68
+ @classmethod
69
+ def _handle_get_result(cls, data: bytes | None) -> RedisCacheEntry | None:
70
+ """Process get result and return entry if valid."""
71
+ if data is None:
72
+ return None
73
+
74
+ entry = cls._deserialize(data)
75
+ if entry.is_expired():
76
+ return None
77
+
78
+ return entry
79
+
80
+ @classmethod
81
+ def set(cls, cache_key: str, result: Any, ttl: Number | None):
82
+ """Store a result in Redis cache."""
83
+ config = get_redis_config()
84
+ client = config.get_client(is_async=False)
85
+ key, data, expiry_ms = cls._prepare_set(cache_key, result, ttl)
86
+ try:
87
+ if expiry_ms is None:
88
+ client.set(key, data)
89
+ return
90
+
91
+ client.psetex(key, expiry_ms, data)
92
+ except Exception as exc:
93
+ cls._handle_error(exc, "set", cache_key)
94
+
95
+ @classmethod
96
+ def get(cls, cache_key: str, skip_cache: bool) -> RedisCacheEntry | None:
97
+ """Retrieve a cache entry from Redis."""
98
+ if skip_cache:
99
+ return None
100
+
101
+ config = get_redis_config()
102
+ client = config.get_client(is_async=False)
103
+ key = cls._make_key(cache_key)
104
+ try:
105
+ return cls._handle_get_result(client.get(key)) # type: ignore[arg-type]
106
+ except Exception as exc:
107
+ cls._handle_error(exc, "get", cache_key)
108
+ return None
109
+
110
+ @classmethod
111
+ async def aset(cls, cache_key: str, result: Any, ttl: Number | None):
112
+ """Store a result in Redis cache (async)."""
113
+ config = get_redis_config()
114
+ client = config.get_client(is_async=True)
115
+ key, data, expiry_ms = cls._prepare_set(cache_key, result, ttl)
116
+ try:
117
+ if expiry_ms is None:
118
+ await client.set(key, data)
119
+ return
120
+
121
+ await client.psetex(key, expiry_ms, data)
122
+ except Exception as exc:
123
+ cls._handle_error(exc, "aset", cache_key)
124
+
125
+ @classmethod
126
+ async def aget(cls, cache_key: str, skip_cache: bool) -> RedisCacheEntry | None:
127
+ """Retrieve a cache entry from Redis (async)."""
128
+ if skip_cache:
129
+ return None
130
+
131
+ config = get_redis_config()
132
+ client = config.get_client(is_async=True)
133
+ key = cls._make_key(cache_key)
134
+ try:
135
+ return cls._handle_get_result(await client.get(key)) # type: ignore[arg-type]
136
+ except Exception as exc:
137
+ cls._handle_error(exc, "aget", cache_key)
138
+ return None
@@ -0,0 +1,95 @@
1
+ import time
2
+ from dataclasses import dataclass, field
3
+ from typing import Any, AsyncContextManager, Callable, ContextManager, Hashable, Protocol, TypeAlias, TypedDict, TypeVar
4
+
5
+ Number: TypeAlias = int | float
6
+ CacheKeyFunction: TypeAlias = Callable[[tuple, dict], Hashable]
7
+
8
+ F = TypeVar("F", bound=Callable[..., Any])
9
+
10
+
11
+ @dataclass
12
+ class CacheEntry:
13
+ """Base cache entry with TTL and expiration tracking."""
14
+
15
+ result: Any
16
+ ttl: float | None
17
+
18
+ cached_at: float = field(init=False)
19
+ expires_at: float = field(init=False)
20
+
21
+ @classmethod
22
+ def time(cls) -> float:
23
+ return time.monotonic()
24
+
25
+ def __post_init__(self):
26
+ self.cached_at = self.time()
27
+ self.expires_at = 0 if self.ttl is None else self.cached_at + self.ttl
28
+
29
+ def is_expired(self) -> bool:
30
+ if self.ttl is None:
31
+ return False
32
+
33
+ return self.time() > self.expires_at
34
+
35
+
36
+ @dataclass(frozen=True, slots=True)
37
+ class CacheConfig:
38
+ """Configuration for cache, grouping storage, lock, and never_die registration."""
39
+
40
+ storage: "CacheStorage"
41
+ sync_lock: Callable[[str], ContextManager]
42
+ async_lock: Callable[[str], AsyncContextManager]
43
+
44
+
45
+ class CacheEntryProtocol(Protocol):
46
+ """Protocol for cache entry objects."""
47
+
48
+ result: Any
49
+
50
+ def is_expired(self) -> bool: ...
51
+
52
+
53
+ class CacheStorage(Protocol):
54
+ """Protocol defining the interface for cache storage."""
55
+
56
+ def get(self, cache_key: str, skip_cache: bool) -> CacheEntryProtocol | None:
57
+ """Retrieve a cache entry. Returns None if not found, expired, or skip_cache is True."""
58
+ ...
59
+
60
+ def set(self, cache_key: str, result: Any, ttl: Number | None):
61
+ """Store a result in the cache with optional TTL."""
62
+ ...
63
+
64
+ async def aget(self, cache_key: str, skip_cache: bool) -> CacheEntryProtocol | None:
65
+ """Async version of get."""
66
+ ...
67
+
68
+ async def aset(self, cache_key: str, result: Any, ttl: Number | None):
69
+ """Async version of set."""
70
+ ...
71
+
72
+
73
+ class CacheKwargs(TypedDict, total=False):
74
+ """
75
+ ### Description
76
+ This type can be used in conjuction with `Unpack` to provide static type
77
+ checking for the parameters added by the `@cache()` decorator.
78
+
79
+ This type is completely optional and `skip_cache` will work regardless
80
+ of what static type checkers complain about.
81
+
82
+ ### Example
83
+ ```
84
+ @cache()
85
+ def function_with_cache(**_: Unpack[CacheKwargs]): ...
86
+
87
+ # pylance/pyright should not complain
88
+ function_with_cache(skip_cache=True)
89
+ ```
90
+
91
+ ### Notes
92
+ Prior to Python 3.11, `Unpack` is only available with typing_extensions
93
+ """
94
+
95
+ skip_cache: bool
File without changes
@@ -0,0 +1,65 @@
1
+ import hashlib
2
+ import inspect
3
+ import pickle
4
+ from collections.abc import Callable, Generator
5
+ from inspect import Signature
6
+ from typing import Any
7
+
8
+ from cachify.types import CacheKeyFunction
9
+ from cachify.utils.functions import get_function_id
10
+
11
+
12
+ def _cache_key_fingerprint(value: object) -> str:
13
+ payload = pickle.dumps(value, protocol=pickle.HIGHEST_PROTOCOL)
14
+ return hashlib.blake2b(payload, digest_size=16).hexdigest()
15
+
16
+
17
+ def _iter_arguments(
18
+ function_signature: Signature,
19
+ args: tuple,
20
+ kwargs: dict,
21
+ ignore_fields: tuple[str, ...],
22
+ ) -> Generator[Any, None, None]:
23
+ bound = function_signature.bind_partial(*args, **kwargs)
24
+ bound.apply_defaults()
25
+
26
+ for name, value in bound.arguments.items():
27
+ if name in ignore_fields:
28
+ continue
29
+
30
+ param = function_signature.parameters[name]
31
+
32
+ # Positional variable arguments can just be yielded like so
33
+ if param.kind == param.VAR_POSITIONAL:
34
+ yield from value
35
+ continue
36
+
37
+ # Keyword variable arguments need to be unpacked from .items()
38
+ if param.kind == param.VAR_KEYWORD:
39
+ yield from value.items()
40
+ continue
41
+
42
+ yield name, value
43
+
44
+
45
+ def create_cache_key(
46
+ function: Callable[..., Any],
47
+ cache_key_func: CacheKeyFunction | None,
48
+ ignore_fields: tuple[str, ...],
49
+ args: tuple,
50
+ kwargs: dict,
51
+ ) -> str:
52
+ function_id = get_function_id(function)
53
+
54
+ if not cache_key_func:
55
+ function_signature = inspect.signature(function)
56
+ items = tuple(_iter_arguments(function_signature, args, kwargs, ignore_fields))
57
+ return f"{function_id}:{_cache_key_fingerprint(items)}"
58
+
59
+ cache_key = cache_key_func(args, kwargs)
60
+ try:
61
+ return f"{function_id}:{_cache_key_fingerprint(cache_key)}"
62
+ except TypeError as exc:
63
+ raise ValueError(
64
+ "Cache key function must return a hashable cache key - be careful with mutable types (list, dict, set) and non built-in types"
65
+ ) from exc
@@ -0,0 +1,44 @@
1
+ import inspect
2
+ from typing import Callable
3
+
4
+ from cachify._async import async_decorator
5
+ from cachify._sync import sync_decorator
6
+ from cachify.types import CacheConfig, CacheKeyFunction, F, Number
7
+
8
+
9
+ def create_cache_decorator(
10
+ ttl: Number,
11
+ never_die: bool,
12
+ cache_key_func: CacheKeyFunction | None,
13
+ ignore_fields: tuple[str, ...],
14
+ config: CacheConfig,
15
+ ) -> Callable[[F], F]:
16
+ """
17
+ Create a cache decorator with the given configuration.
18
+
19
+ This is a shared factory used by both memory_cache and redis_cache
20
+ to avoid code duplication.
21
+ """
22
+ if cache_key_func and ignore_fields:
23
+ raise ValueError("Either cache_key_func or ignore_fields can be provided, but not both")
24
+
25
+ def decorator(function: F) -> F:
26
+ if inspect.iscoroutinefunction(function):
27
+ return async_decorator(
28
+ function=function,
29
+ ttl=ttl,
30
+ never_die=never_die,
31
+ cache_key_func=cache_key_func,
32
+ ignore_fields=ignore_fields,
33
+ config=config,
34
+ )
35
+ return sync_decorator(
36
+ function=function,
37
+ ttl=ttl,
38
+ never_die=never_die,
39
+ cache_key_func=cache_key_func,
40
+ ignore_fields=ignore_fields,
41
+ config=config,
42
+ )
43
+
44
+ return decorator
@@ -0,0 +1,10 @@
1
+ import functools
2
+ from typing import Any, Callable
3
+
4
+
5
+ @functools.cache
6
+ def get_function_id(function: Callable[..., Any]) -> str:
7
+ """
8
+ Returns the unique identifier for the function, which is a combination of its module and qualified name.
9
+ """
10
+ return f"{function.__module__}.{function.__qualname__}"
cachify/utils/locks.py ADDED
@@ -0,0 +1,6 @@
1
+ import asyncio
2
+ import threading
3
+ from collections import defaultdict
4
+
5
+ ASYNC_LOCKS: defaultdict[str, asyncio.Lock] = defaultdict(asyncio.Lock)
6
+ SYNC_LOCKS: defaultdict[str, threading.Lock] = defaultdict(threading.Lock)