cachify 0.1.0__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cachify/__init__.py +24 -22
- cachify/cache.py +116 -116
- cachify/config/__init__.py +4 -4
- cachify/features/never_die.py +219 -219
- cachify/memory_cache.py +37 -37
- cachify/redis/__init__.py +19 -19
- cachify/redis/config.py +115 -115
- cachify/redis/lock.py +232 -232
- cachify/redis_cache.py +27 -27
- cachify/storage/__init__.py +9 -9
- cachify/storage/memory_storage.py +52 -52
- cachify/storage/redis_storage.py +138 -138
- cachify/types/__init__.py +95 -95
- cachify/utils/arguments.py +65 -65
- cachify/utils/decorator_factory.py +44 -44
- cachify/utils/functions.py +10 -10
- cachify/utils/locks.py +6 -6
- {cachify-0.1.0.dist-info → cachify-0.2.0.dist-info}/METADATA +4 -3
- cachify-0.2.0.dist-info/RECORD +24 -0
- {cachify-0.1.0.dist-info → cachify-0.2.0.dist-info}/WHEEL +1 -1
- {cachify-0.1.0.dist-info → cachify-0.2.0.dist-info/licenses}/LICENSE +21 -21
- cachify-0.1.0.dist-info/RECORD +0 -24
- {cachify-0.1.0.dist-info → cachify-0.2.0.dist-info}/entry_points.txt +0 -0
cachify/storage/redis_storage.py
CHANGED
|
@@ -1,138 +1,138 @@
|
|
|
1
|
-
import pickle
|
|
2
|
-
import time
|
|
3
|
-
from typing import Any, overload
|
|
4
|
-
|
|
5
|
-
from cachify.redis.config import get_redis_config
|
|
6
|
-
from cachify.config import logger
|
|
7
|
-
from cachify.types import CacheEntry, Number
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class RedisCacheEntry(CacheEntry):
|
|
11
|
-
@classmethod
|
|
12
|
-
def time(cls) -> float:
|
|
13
|
-
return time.time()
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
class RedisStorage:
|
|
17
|
-
"""Redis cache storage implementing CacheStorage protocol."""
|
|
18
|
-
|
|
19
|
-
@classmethod
|
|
20
|
-
def _make_key(cls, cache_key: str) -> str:
|
|
21
|
-
"""Create a Redis key from cache_key."""
|
|
22
|
-
config = get_redis_config()
|
|
23
|
-
return f"{config.key_prefix}:{cache_key}"
|
|
24
|
-
|
|
25
|
-
@classmethod
|
|
26
|
-
def _serialize(cls, entry: RedisCacheEntry) -> bytes:
|
|
27
|
-
"""Serialize a cache entry to bytes."""
|
|
28
|
-
try:
|
|
29
|
-
return pickle.dumps(entry, protocol=pickle.HIGHEST_PROTOCOL)
|
|
30
|
-
except (pickle.PicklingError, TypeError, AttributeError) as exc:
|
|
31
|
-
raise TypeError(
|
|
32
|
-
f"Failed to serialize cache entry. Object of type {type(entry.result).__name__} "
|
|
33
|
-
f"cannot be pickled. Ensure the cached result is serializable."
|
|
34
|
-
) from exc
|
|
35
|
-
|
|
36
|
-
@classmethod
|
|
37
|
-
def _deserialize(cls, data: bytes) -> RedisCacheEntry:
|
|
38
|
-
"""Deserialize bytes to a cache entry."""
|
|
39
|
-
return pickle.loads(data)
|
|
40
|
-
|
|
41
|
-
@overload
|
|
42
|
-
@classmethod
|
|
43
|
-
def _prepare_set(cls, cache_key: str, result: Any, ttl: None) -> tuple[str, bytes, None]: ...
|
|
44
|
-
|
|
45
|
-
@overload
|
|
46
|
-
@classmethod
|
|
47
|
-
def _prepare_set(cls, cache_key: str, result: Any, ttl: Number) -> tuple[str, bytes, int]: ...
|
|
48
|
-
|
|
49
|
-
@classmethod
|
|
50
|
-
def _prepare_set(cls, cache_key: str, result: Any, ttl: Number | None) -> tuple[str, bytes, int | None]:
|
|
51
|
-
"""Prepare key, data, and expiry in milliseconds for set operations."""
|
|
52
|
-
key = cls._make_key(cache_key)
|
|
53
|
-
data = cls._serialize(RedisCacheEntry(result, ttl))
|
|
54
|
-
if ttl is None:
|
|
55
|
-
return key, data, None
|
|
56
|
-
|
|
57
|
-
return key, data, int(ttl * 1000)
|
|
58
|
-
|
|
59
|
-
@classmethod
|
|
60
|
-
def _handle_error(cls, exc: Exception, operation: str, cache_key: str):
|
|
61
|
-
"""Handle Redis errors based on config."""
|
|
62
|
-
config = get_redis_config()
|
|
63
|
-
if config.on_error == "raise":
|
|
64
|
-
raise
|
|
65
|
-
|
|
66
|
-
logger.debug(f"Redis operation error: {exc}", extra={"operation": operation, "cache_key": cache_key})
|
|
67
|
-
|
|
68
|
-
@classmethod
|
|
69
|
-
def _handle_get_result(cls, data: bytes | None) -> RedisCacheEntry | None:
|
|
70
|
-
"""Process get result and return entry if valid."""
|
|
71
|
-
if data is None:
|
|
72
|
-
return None
|
|
73
|
-
|
|
74
|
-
entry = cls._deserialize(data)
|
|
75
|
-
if entry.is_expired():
|
|
76
|
-
return None
|
|
77
|
-
|
|
78
|
-
return entry
|
|
79
|
-
|
|
80
|
-
@classmethod
|
|
81
|
-
def set(cls, cache_key: str, result: Any, ttl: Number | None):
|
|
82
|
-
"""Store a result in Redis cache."""
|
|
83
|
-
config = get_redis_config()
|
|
84
|
-
client = config.get_client(is_async=False)
|
|
85
|
-
key, data, expiry_ms = cls._prepare_set(cache_key, result, ttl)
|
|
86
|
-
try:
|
|
87
|
-
if expiry_ms is None:
|
|
88
|
-
client.set(key, data)
|
|
89
|
-
return
|
|
90
|
-
|
|
91
|
-
client.psetex(key, expiry_ms, data)
|
|
92
|
-
except Exception as exc:
|
|
93
|
-
cls._handle_error(exc, "set", cache_key)
|
|
94
|
-
|
|
95
|
-
@classmethod
|
|
96
|
-
def get(cls, cache_key: str, skip_cache: bool) -> RedisCacheEntry | None:
|
|
97
|
-
"""Retrieve a cache entry from Redis."""
|
|
98
|
-
if skip_cache:
|
|
99
|
-
return None
|
|
100
|
-
|
|
101
|
-
config = get_redis_config()
|
|
102
|
-
client = config.get_client(is_async=False)
|
|
103
|
-
key = cls._make_key(cache_key)
|
|
104
|
-
try:
|
|
105
|
-
return cls._handle_get_result(client.get(key)) # type: ignore[arg-type]
|
|
106
|
-
except Exception as exc:
|
|
107
|
-
cls._handle_error(exc, "get", cache_key)
|
|
108
|
-
return None
|
|
109
|
-
|
|
110
|
-
@classmethod
|
|
111
|
-
async def aset(cls, cache_key: str, result: Any, ttl: Number | None):
|
|
112
|
-
"""Store a result in Redis cache (async)."""
|
|
113
|
-
config = get_redis_config()
|
|
114
|
-
client = config.get_client(is_async=True)
|
|
115
|
-
key, data, expiry_ms = cls._prepare_set(cache_key, result, ttl)
|
|
116
|
-
try:
|
|
117
|
-
if expiry_ms is None:
|
|
118
|
-
await client.set(key, data)
|
|
119
|
-
return
|
|
120
|
-
|
|
121
|
-
await client.psetex(key, expiry_ms, data)
|
|
122
|
-
except Exception as exc:
|
|
123
|
-
cls._handle_error(exc, "aset", cache_key)
|
|
124
|
-
|
|
125
|
-
@classmethod
|
|
126
|
-
async def aget(cls, cache_key: str, skip_cache: bool) -> RedisCacheEntry | None:
|
|
127
|
-
"""Retrieve a cache entry from Redis (async)."""
|
|
128
|
-
if skip_cache:
|
|
129
|
-
return None
|
|
130
|
-
|
|
131
|
-
config = get_redis_config()
|
|
132
|
-
client = config.get_client(is_async=True)
|
|
133
|
-
key = cls._make_key(cache_key)
|
|
134
|
-
try:
|
|
135
|
-
return cls._handle_get_result(await client.get(key)) # type: ignore[arg-type]
|
|
136
|
-
except Exception as exc:
|
|
137
|
-
cls._handle_error(exc, "aget", cache_key)
|
|
138
|
-
return None
|
|
1
|
+
import pickle
|
|
2
|
+
import time
|
|
3
|
+
from typing import Any, overload
|
|
4
|
+
|
|
5
|
+
from cachify.redis.config import get_redis_config
|
|
6
|
+
from cachify.config import logger
|
|
7
|
+
from cachify.types import CacheEntry, Number
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class RedisCacheEntry(CacheEntry):
|
|
11
|
+
@classmethod
|
|
12
|
+
def time(cls) -> float:
|
|
13
|
+
return time.time()
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class RedisStorage:
|
|
17
|
+
"""Redis cache storage implementing CacheStorage protocol."""
|
|
18
|
+
|
|
19
|
+
@classmethod
|
|
20
|
+
def _make_key(cls, cache_key: str) -> str:
|
|
21
|
+
"""Create a Redis key from cache_key."""
|
|
22
|
+
config = get_redis_config()
|
|
23
|
+
return f"{config.key_prefix}:{cache_key}"
|
|
24
|
+
|
|
25
|
+
@classmethod
|
|
26
|
+
def _serialize(cls, entry: RedisCacheEntry) -> bytes:
|
|
27
|
+
"""Serialize a cache entry to bytes."""
|
|
28
|
+
try:
|
|
29
|
+
return pickle.dumps(entry, protocol=pickle.HIGHEST_PROTOCOL)
|
|
30
|
+
except (pickle.PicklingError, TypeError, AttributeError) as exc:
|
|
31
|
+
raise TypeError(
|
|
32
|
+
f"Failed to serialize cache entry. Object of type {type(entry.result).__name__} "
|
|
33
|
+
f"cannot be pickled. Ensure the cached result is serializable."
|
|
34
|
+
) from exc
|
|
35
|
+
|
|
36
|
+
@classmethod
|
|
37
|
+
def _deserialize(cls, data: bytes) -> RedisCacheEntry:
|
|
38
|
+
"""Deserialize bytes to a cache entry."""
|
|
39
|
+
return pickle.loads(data)
|
|
40
|
+
|
|
41
|
+
@overload
|
|
42
|
+
@classmethod
|
|
43
|
+
def _prepare_set(cls, cache_key: str, result: Any, ttl: None) -> tuple[str, bytes, None]: ...
|
|
44
|
+
|
|
45
|
+
@overload
|
|
46
|
+
@classmethod
|
|
47
|
+
def _prepare_set(cls, cache_key: str, result: Any, ttl: Number) -> tuple[str, bytes, int]: ...
|
|
48
|
+
|
|
49
|
+
@classmethod
|
|
50
|
+
def _prepare_set(cls, cache_key: str, result: Any, ttl: Number | None) -> tuple[str, bytes, int | None]:
|
|
51
|
+
"""Prepare key, data, and expiry in milliseconds for set operations."""
|
|
52
|
+
key = cls._make_key(cache_key)
|
|
53
|
+
data = cls._serialize(RedisCacheEntry(result, ttl))
|
|
54
|
+
if ttl is None:
|
|
55
|
+
return key, data, None
|
|
56
|
+
|
|
57
|
+
return key, data, int(ttl * 1000)
|
|
58
|
+
|
|
59
|
+
@classmethod
|
|
60
|
+
def _handle_error(cls, exc: Exception, operation: str, cache_key: str):
|
|
61
|
+
"""Handle Redis errors based on config."""
|
|
62
|
+
config = get_redis_config()
|
|
63
|
+
if config.on_error == "raise":
|
|
64
|
+
raise
|
|
65
|
+
|
|
66
|
+
logger.debug(f"Redis operation error: {exc}", extra={"operation": operation, "cache_key": cache_key})
|
|
67
|
+
|
|
68
|
+
@classmethod
|
|
69
|
+
def _handle_get_result(cls, data: bytes | None) -> RedisCacheEntry | None:
|
|
70
|
+
"""Process get result and return entry if valid."""
|
|
71
|
+
if data is None:
|
|
72
|
+
return None
|
|
73
|
+
|
|
74
|
+
entry = cls._deserialize(data)
|
|
75
|
+
if entry.is_expired():
|
|
76
|
+
return None
|
|
77
|
+
|
|
78
|
+
return entry
|
|
79
|
+
|
|
80
|
+
@classmethod
|
|
81
|
+
def set(cls, cache_key: str, result: Any, ttl: Number | None):
|
|
82
|
+
"""Store a result in Redis cache."""
|
|
83
|
+
config = get_redis_config()
|
|
84
|
+
client = config.get_client(is_async=False)
|
|
85
|
+
key, data, expiry_ms = cls._prepare_set(cache_key, result, ttl)
|
|
86
|
+
try:
|
|
87
|
+
if expiry_ms is None:
|
|
88
|
+
client.set(key, data)
|
|
89
|
+
return
|
|
90
|
+
|
|
91
|
+
client.psetex(key, expiry_ms, data)
|
|
92
|
+
except Exception as exc:
|
|
93
|
+
cls._handle_error(exc, "set", cache_key)
|
|
94
|
+
|
|
95
|
+
@classmethod
|
|
96
|
+
def get(cls, cache_key: str, skip_cache: bool) -> RedisCacheEntry | None:
|
|
97
|
+
"""Retrieve a cache entry from Redis."""
|
|
98
|
+
if skip_cache:
|
|
99
|
+
return None
|
|
100
|
+
|
|
101
|
+
config = get_redis_config()
|
|
102
|
+
client = config.get_client(is_async=False)
|
|
103
|
+
key = cls._make_key(cache_key)
|
|
104
|
+
try:
|
|
105
|
+
return cls._handle_get_result(client.get(key)) # type: ignore[arg-type]
|
|
106
|
+
except Exception as exc:
|
|
107
|
+
cls._handle_error(exc, "get", cache_key)
|
|
108
|
+
return None
|
|
109
|
+
|
|
110
|
+
@classmethod
|
|
111
|
+
async def aset(cls, cache_key: str, result: Any, ttl: Number | None):
|
|
112
|
+
"""Store a result in Redis cache (async)."""
|
|
113
|
+
config = get_redis_config()
|
|
114
|
+
client = config.get_client(is_async=True)
|
|
115
|
+
key, data, expiry_ms = cls._prepare_set(cache_key, result, ttl)
|
|
116
|
+
try:
|
|
117
|
+
if expiry_ms is None:
|
|
118
|
+
await client.set(key, data)
|
|
119
|
+
return
|
|
120
|
+
|
|
121
|
+
await client.psetex(key, expiry_ms, data)
|
|
122
|
+
except Exception as exc:
|
|
123
|
+
cls._handle_error(exc, "aset", cache_key)
|
|
124
|
+
|
|
125
|
+
@classmethod
|
|
126
|
+
async def aget(cls, cache_key: str, skip_cache: bool) -> RedisCacheEntry | None:
|
|
127
|
+
"""Retrieve a cache entry from Redis (async)."""
|
|
128
|
+
if skip_cache:
|
|
129
|
+
return None
|
|
130
|
+
|
|
131
|
+
config = get_redis_config()
|
|
132
|
+
client = config.get_client(is_async=True)
|
|
133
|
+
key = cls._make_key(cache_key)
|
|
134
|
+
try:
|
|
135
|
+
return cls._handle_get_result(await client.get(key)) # type: ignore[arg-type]
|
|
136
|
+
except Exception as exc:
|
|
137
|
+
cls._handle_error(exc, "aget", cache_key)
|
|
138
|
+
return None
|
cachify/types/__init__.py
CHANGED
|
@@ -1,95 +1,95 @@
|
|
|
1
|
-
import time
|
|
2
|
-
from dataclasses import dataclass, field
|
|
3
|
-
from typing import Any, AsyncContextManager, Callable, ContextManager, Hashable, Protocol, TypeAlias, TypedDict, TypeVar
|
|
4
|
-
|
|
5
|
-
Number: TypeAlias = int | float
|
|
6
|
-
CacheKeyFunction: TypeAlias = Callable[[tuple, dict], Hashable]
|
|
7
|
-
|
|
8
|
-
F = TypeVar("F", bound=Callable[..., Any])
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
@dataclass
|
|
12
|
-
class CacheEntry:
|
|
13
|
-
"""Base cache entry with TTL and expiration tracking."""
|
|
14
|
-
|
|
15
|
-
result: Any
|
|
16
|
-
ttl: float | None
|
|
17
|
-
|
|
18
|
-
cached_at: float = field(init=False)
|
|
19
|
-
expires_at: float = field(init=False)
|
|
20
|
-
|
|
21
|
-
@classmethod
|
|
22
|
-
def time(cls) -> float:
|
|
23
|
-
return time.monotonic()
|
|
24
|
-
|
|
25
|
-
def __post_init__(self):
|
|
26
|
-
self.cached_at = self.time()
|
|
27
|
-
self.expires_at = 0 if self.ttl is None else self.cached_at + self.ttl
|
|
28
|
-
|
|
29
|
-
def is_expired(self) -> bool:
|
|
30
|
-
if self.ttl is None:
|
|
31
|
-
return False
|
|
32
|
-
|
|
33
|
-
return self.time() > self.expires_at
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
@dataclass(frozen=True, slots=True)
|
|
37
|
-
class CacheConfig:
|
|
38
|
-
"""Configuration for cache, grouping storage, lock, and never_die registration."""
|
|
39
|
-
|
|
40
|
-
storage: "CacheStorage"
|
|
41
|
-
sync_lock: Callable[[str], ContextManager]
|
|
42
|
-
async_lock: Callable[[str], AsyncContextManager]
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
class CacheEntryProtocol(Protocol):
|
|
46
|
-
"""Protocol for cache entry objects."""
|
|
47
|
-
|
|
48
|
-
result: Any
|
|
49
|
-
|
|
50
|
-
def is_expired(self) -> bool: ...
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
class CacheStorage(Protocol):
|
|
54
|
-
"""Protocol defining the interface for cache storage."""
|
|
55
|
-
|
|
56
|
-
def get(self, cache_key: str, skip_cache: bool) -> CacheEntryProtocol | None:
|
|
57
|
-
"""Retrieve a cache entry. Returns None if not found, expired, or skip_cache is True."""
|
|
58
|
-
...
|
|
59
|
-
|
|
60
|
-
def set(self, cache_key: str, result: Any, ttl: Number | None):
|
|
61
|
-
"""Store a result in the cache with optional TTL."""
|
|
62
|
-
...
|
|
63
|
-
|
|
64
|
-
async def aget(self, cache_key: str, skip_cache: bool) -> CacheEntryProtocol | None:
|
|
65
|
-
"""Async version of get."""
|
|
66
|
-
...
|
|
67
|
-
|
|
68
|
-
async def aset(self, cache_key: str, result: Any, ttl: Number | None):
|
|
69
|
-
"""Async version of set."""
|
|
70
|
-
...
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
class CacheKwargs(TypedDict, total=False):
|
|
74
|
-
"""
|
|
75
|
-
### Description
|
|
76
|
-
This type can be used in conjuction with `Unpack` to provide static type
|
|
77
|
-
checking for the parameters added by the `@cache()` decorator.
|
|
78
|
-
|
|
79
|
-
This type is completely optional and `skip_cache` will work regardless
|
|
80
|
-
of what static type checkers complain about.
|
|
81
|
-
|
|
82
|
-
### Example
|
|
83
|
-
```
|
|
84
|
-
@cache()
|
|
85
|
-
def function_with_cache(**_: Unpack[CacheKwargs]): ...
|
|
86
|
-
|
|
87
|
-
# pylance/pyright should not complain
|
|
88
|
-
function_with_cache(skip_cache=True)
|
|
89
|
-
```
|
|
90
|
-
|
|
91
|
-
### Notes
|
|
92
|
-
Prior to Python 3.11, `Unpack` is only available with typing_extensions
|
|
93
|
-
"""
|
|
94
|
-
|
|
95
|
-
skip_cache: bool
|
|
1
|
+
import time
|
|
2
|
+
from dataclasses import dataclass, field
|
|
3
|
+
from typing import Any, AsyncContextManager, Callable, ContextManager, Hashable, Protocol, TypeAlias, TypedDict, TypeVar
|
|
4
|
+
|
|
5
|
+
Number: TypeAlias = int | float
|
|
6
|
+
CacheKeyFunction: TypeAlias = Callable[[tuple, dict], Hashable]
|
|
7
|
+
|
|
8
|
+
F = TypeVar("F", bound=Callable[..., Any])
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class CacheEntry:
|
|
13
|
+
"""Base cache entry with TTL and expiration tracking."""
|
|
14
|
+
|
|
15
|
+
result: Any
|
|
16
|
+
ttl: float | None
|
|
17
|
+
|
|
18
|
+
cached_at: float = field(init=False)
|
|
19
|
+
expires_at: float = field(init=False)
|
|
20
|
+
|
|
21
|
+
@classmethod
|
|
22
|
+
def time(cls) -> float:
|
|
23
|
+
return time.monotonic()
|
|
24
|
+
|
|
25
|
+
def __post_init__(self):
|
|
26
|
+
self.cached_at = self.time()
|
|
27
|
+
self.expires_at = 0 if self.ttl is None else self.cached_at + self.ttl
|
|
28
|
+
|
|
29
|
+
def is_expired(self) -> bool:
|
|
30
|
+
if self.ttl is None:
|
|
31
|
+
return False
|
|
32
|
+
|
|
33
|
+
return self.time() > self.expires_at
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@dataclass(frozen=True, slots=True)
|
|
37
|
+
class CacheConfig:
|
|
38
|
+
"""Configuration for cache, grouping storage, lock, and never_die registration."""
|
|
39
|
+
|
|
40
|
+
storage: "CacheStorage"
|
|
41
|
+
sync_lock: Callable[[str], ContextManager]
|
|
42
|
+
async_lock: Callable[[str], AsyncContextManager]
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class CacheEntryProtocol(Protocol):
|
|
46
|
+
"""Protocol for cache entry objects."""
|
|
47
|
+
|
|
48
|
+
result: Any
|
|
49
|
+
|
|
50
|
+
def is_expired(self) -> bool: ...
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class CacheStorage(Protocol):
|
|
54
|
+
"""Protocol defining the interface for cache storage."""
|
|
55
|
+
|
|
56
|
+
def get(self, cache_key: str, skip_cache: bool) -> CacheEntryProtocol | None:
|
|
57
|
+
"""Retrieve a cache entry. Returns None if not found, expired, or skip_cache is True."""
|
|
58
|
+
...
|
|
59
|
+
|
|
60
|
+
def set(self, cache_key: str, result: Any, ttl: Number | None):
|
|
61
|
+
"""Store a result in the cache with optional TTL."""
|
|
62
|
+
...
|
|
63
|
+
|
|
64
|
+
async def aget(self, cache_key: str, skip_cache: bool) -> CacheEntryProtocol | None:
|
|
65
|
+
"""Async version of get."""
|
|
66
|
+
...
|
|
67
|
+
|
|
68
|
+
async def aset(self, cache_key: str, result: Any, ttl: Number | None):
|
|
69
|
+
"""Async version of set."""
|
|
70
|
+
...
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class CacheKwargs(TypedDict, total=False):
|
|
74
|
+
"""
|
|
75
|
+
### Description
|
|
76
|
+
This type can be used in conjuction with `Unpack` to provide static type
|
|
77
|
+
checking for the parameters added by the `@cache()` decorator.
|
|
78
|
+
|
|
79
|
+
This type is completely optional and `skip_cache` will work regardless
|
|
80
|
+
of what static type checkers complain about.
|
|
81
|
+
|
|
82
|
+
### Example
|
|
83
|
+
```
|
|
84
|
+
@cache()
|
|
85
|
+
def function_with_cache(**_: Unpack[CacheKwargs]): ...
|
|
86
|
+
|
|
87
|
+
# pylance/pyright should not complain
|
|
88
|
+
function_with_cache(skip_cache=True)
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
### Notes
|
|
92
|
+
Prior to Python 3.11, `Unpack` is only available with typing_extensions
|
|
93
|
+
"""
|
|
94
|
+
|
|
95
|
+
skip_cache: bool
|
cachify/utils/arguments.py
CHANGED
|
@@ -1,65 +1,65 @@
|
|
|
1
|
-
import hashlib
|
|
2
|
-
import inspect
|
|
3
|
-
import pickle
|
|
4
|
-
from collections.abc import Callable, Generator
|
|
5
|
-
from inspect import Signature
|
|
6
|
-
from typing import Any
|
|
7
|
-
|
|
8
|
-
from cachify.types import CacheKeyFunction
|
|
9
|
-
from cachify.utils.functions import get_function_id
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
def _cache_key_fingerprint(value: object) -> str:
|
|
13
|
-
payload = pickle.dumps(value, protocol=pickle.HIGHEST_PROTOCOL)
|
|
14
|
-
return hashlib.blake2b(payload, digest_size=16).hexdigest()
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
def _iter_arguments(
|
|
18
|
-
function_signature: Signature,
|
|
19
|
-
args: tuple,
|
|
20
|
-
kwargs: dict,
|
|
21
|
-
ignore_fields: tuple[str, ...],
|
|
22
|
-
) -> Generator[Any, None, None]:
|
|
23
|
-
bound = function_signature.bind_partial(*args, **kwargs)
|
|
24
|
-
bound.apply_defaults()
|
|
25
|
-
|
|
26
|
-
for name, value in bound.arguments.items():
|
|
27
|
-
if name in ignore_fields:
|
|
28
|
-
continue
|
|
29
|
-
|
|
30
|
-
param = function_signature.parameters[name]
|
|
31
|
-
|
|
32
|
-
# Positional variable arguments can just be yielded like so
|
|
33
|
-
if param.kind == param.VAR_POSITIONAL:
|
|
34
|
-
yield from value
|
|
35
|
-
continue
|
|
36
|
-
|
|
37
|
-
# Keyword variable arguments need to be unpacked from .items()
|
|
38
|
-
if param.kind == param.VAR_KEYWORD:
|
|
39
|
-
yield from value.items()
|
|
40
|
-
continue
|
|
41
|
-
|
|
42
|
-
yield name, value
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
def create_cache_key(
|
|
46
|
-
function: Callable[..., Any],
|
|
47
|
-
cache_key_func: CacheKeyFunction | None,
|
|
48
|
-
ignore_fields: tuple[str, ...],
|
|
49
|
-
args: tuple,
|
|
50
|
-
kwargs: dict,
|
|
51
|
-
) -> str:
|
|
52
|
-
function_id = get_function_id(function)
|
|
53
|
-
|
|
54
|
-
if not cache_key_func:
|
|
55
|
-
function_signature = inspect.signature(function)
|
|
56
|
-
items = tuple(_iter_arguments(function_signature, args, kwargs, ignore_fields))
|
|
57
|
-
return f"{function_id}:{_cache_key_fingerprint(items)}"
|
|
58
|
-
|
|
59
|
-
cache_key = cache_key_func(args, kwargs)
|
|
60
|
-
try:
|
|
61
|
-
return f"{function_id}:{_cache_key_fingerprint(cache_key)}"
|
|
62
|
-
except TypeError as exc:
|
|
63
|
-
raise ValueError(
|
|
64
|
-
"Cache key function must return a hashable cache key - be careful with mutable types (list, dict, set) and non built-in types"
|
|
65
|
-
) from exc
|
|
1
|
+
import hashlib
|
|
2
|
+
import inspect
|
|
3
|
+
import pickle
|
|
4
|
+
from collections.abc import Callable, Generator
|
|
5
|
+
from inspect import Signature
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from cachify.types import CacheKeyFunction
|
|
9
|
+
from cachify.utils.functions import get_function_id
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def _cache_key_fingerprint(value: object) -> str:
|
|
13
|
+
payload = pickle.dumps(value, protocol=pickle.HIGHEST_PROTOCOL)
|
|
14
|
+
return hashlib.blake2b(payload, digest_size=16).hexdigest()
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def _iter_arguments(
|
|
18
|
+
function_signature: Signature,
|
|
19
|
+
args: tuple,
|
|
20
|
+
kwargs: dict,
|
|
21
|
+
ignore_fields: tuple[str, ...],
|
|
22
|
+
) -> Generator[Any, None, None]:
|
|
23
|
+
bound = function_signature.bind_partial(*args, **kwargs)
|
|
24
|
+
bound.apply_defaults()
|
|
25
|
+
|
|
26
|
+
for name, value in bound.arguments.items():
|
|
27
|
+
if name in ignore_fields:
|
|
28
|
+
continue
|
|
29
|
+
|
|
30
|
+
param = function_signature.parameters[name]
|
|
31
|
+
|
|
32
|
+
# Positional variable arguments can just be yielded like so
|
|
33
|
+
if param.kind == param.VAR_POSITIONAL:
|
|
34
|
+
yield from value
|
|
35
|
+
continue
|
|
36
|
+
|
|
37
|
+
# Keyword variable arguments need to be unpacked from .items()
|
|
38
|
+
if param.kind == param.VAR_KEYWORD:
|
|
39
|
+
yield from value.items()
|
|
40
|
+
continue
|
|
41
|
+
|
|
42
|
+
yield name, value
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def create_cache_key(
|
|
46
|
+
function: Callable[..., Any],
|
|
47
|
+
cache_key_func: CacheKeyFunction | None,
|
|
48
|
+
ignore_fields: tuple[str, ...],
|
|
49
|
+
args: tuple,
|
|
50
|
+
kwargs: dict,
|
|
51
|
+
) -> str:
|
|
52
|
+
function_id = get_function_id(function)
|
|
53
|
+
|
|
54
|
+
if not cache_key_func:
|
|
55
|
+
function_signature = inspect.signature(function)
|
|
56
|
+
items = tuple(_iter_arguments(function_signature, args, kwargs, ignore_fields))
|
|
57
|
+
return f"{function_id}:{_cache_key_fingerprint(items)}"
|
|
58
|
+
|
|
59
|
+
cache_key = cache_key_func(args, kwargs)
|
|
60
|
+
try:
|
|
61
|
+
return f"{function_id}:{_cache_key_fingerprint(cache_key)}"
|
|
62
|
+
except TypeError as exc:
|
|
63
|
+
raise ValueError(
|
|
64
|
+
"Cache key function must return a hashable cache key - be careful with mutable types (list, dict, set) and non built-in types"
|
|
65
|
+
) from exc
|