ab-cache 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ab_cache-0.1.1.dist-info/METADATA +24 -0
- ab_cache-0.1.1.dist-info/RECORD +17 -0
- ab_cache-0.1.1.dist-info/WHEEL +4 -0
- ab_cache-0.1.1.dist-info/licenses/LICENSE +21 -0
- ab_core/cache/__init__.py +0 -0
- ab_core/cache/caches/__init__.py +10 -0
- ab_core/cache/caches/base.py +134 -0
- ab_core/cache/caches/disk.py +407 -0
- ab_core/cache/caches/inmemory.py +371 -0
- ab_core/cache/caches/redis.py +371 -0
- ab_core/cache/caches/template.py +143 -0
- ab_core/cache/codec.py +22 -0
- ab_core/cache/exceptions.py +16 -0
- ab_core/cache/namespace.py +22 -0
- ab_core/cache/schema/__init__.py +0 -0
- ab_core/cache/schema/cache_type.py +8 -0
- ab_core/cache/session_context.py +75 -0
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: ab-cache
|
|
3
|
+
Version: 0.1.1
|
|
4
|
+
Author-email: Matthew Coulter <53892067+mattcoulter7@users.noreply.github.com>
|
|
5
|
+
License-File: LICENSE
|
|
6
|
+
Requires-Python: <4,>=3.12
|
|
7
|
+
Requires-Dist: ab-dependency
|
|
8
|
+
Requires-Dist: diskcache<6,>=5.6.3
|
|
9
|
+
Requires-Dist: orjson<4,>=3.11.2
|
|
10
|
+
Requires-Dist: pydantic<3,>=2.11.7
|
|
11
|
+
Requires-Dist: slugify<0.0.2,>=0.0.1
|
|
12
|
+
Requires-Dist: sqlmodel<0.0.25,>=0.0.24
|
|
13
|
+
Requires-Dist: uuid7<0.2,>=0.1.0
|
|
14
|
+
Provides-Extra: all
|
|
15
|
+
Requires-Dist: redis<7,>=6.4.0; extra == 'all'
|
|
16
|
+
Provides-Extra: redis-async
|
|
17
|
+
Requires-Dist: redis<7,>=6.4.0; extra == 'redis-async'
|
|
18
|
+
Provides-Extra: redis-sync
|
|
19
|
+
Requires-Dist: redis<7,>=6.4.0; extra == 'redis-sync'
|
|
20
|
+
Description-Content-Type: text/markdown
|
|
21
|
+
|
|
22
|
+
# Open Banking, Opened | Database
|
|
23
|
+
|
|
24
|
+
Database Package for Open Banking, Opened API packages.
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
ab_core/cache/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
ab_core/cache/codec.py,sha256=F5NcEK0iuI4s1Tld-sgbPNZJHSX8LNhL5iI9xBEEeXo,503
|
|
3
|
+
ab_core/cache/exceptions.py,sha256=y58IFOUCrs3PcoS2UTen-wiIieVmIeUPjaRDNjSgvVU,323
|
|
4
|
+
ab_core/cache/namespace.py,sha256=QDbyOz8gtOfiGTS5RMNHVYFR8eF1tp2bZWtXGPqsUfM,628
|
|
5
|
+
ab_core/cache/session_context.py,sha256=RllVnajJ1IURpdbcfCSd17EV1GYdwzd-Nz531VnhYZI,1859
|
|
6
|
+
ab_core/cache/caches/__init__.py,sha256=Bok3B34ievCDZrI1siSbume2m4Mxioe32M0UR8hIy4o,305
|
|
7
|
+
ab_core/cache/caches/base.py,sha256=kcxXTLaDU-GdxtAna2X-e7GiBlzi6qh6QqUAE43kd-4,3285
|
|
8
|
+
ab_core/cache/caches/disk.py,sha256=Gh3hjrBTONbAHvpR88QndzVZ5tcFLebdrJJu2_cYIv0,15097
|
|
9
|
+
ab_core/cache/caches/inmemory.py,sha256=F7QltcRl86BI-wKyTgXOwrmxjLuLfof83iRldFL_pPE,12331
|
|
10
|
+
ab_core/cache/caches/redis.py,sha256=FN5YZb597Pzo4KxncODsqVvpWFa_ZAhItx4GRgwe_HE,11914
|
|
11
|
+
ab_core/cache/caches/template.py,sha256=3vuhEoQZQW4b1_LO4XMMF3nCtDV7j1px6lQS0FKmwOk,4890
|
|
12
|
+
ab_core/cache/schema/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
13
|
+
ab_core/cache/schema/cache_type.py,sha256=VdQDSUaJ9X0Uclg78GH_UYNN__-brUdR7rM7faCcKZw,143
|
|
14
|
+
ab_cache-0.1.1.dist-info/METADATA,sha256=0XVeSUZzBLmLazVoULt6jN6TFjTVFR_rPOjaOg8oSN4,785
|
|
15
|
+
ab_cache-0.1.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
16
|
+
ab_cache-0.1.1.dist-info/licenses/LICENSE,sha256=REBFOD_HdKi9ZZ38nP3A_NjpLFaijF1A6_PDiMglbLk,1072
|
|
17
|
+
ab_cache-0.1.1.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Matthew Coulter
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
File without changes
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
from typing import Annotated, Union
|
|
2
|
+
|
|
3
|
+
from pydantic import Discriminator
|
|
4
|
+
|
|
5
|
+
from .disk import DiskCache
|
|
6
|
+
from .inmemory import InMemoryCache
|
|
7
|
+
from .redis import RedisCache
|
|
8
|
+
from .template import TemplateCache
|
|
9
|
+
|
|
10
|
+
Cache = Annotated[Union[RedisCache, TemplateCache, InMemoryCache, DiskCache], Discriminator("type")]
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
from typing import (
|
|
3
|
+
Any,
|
|
4
|
+
AsyncContextManager,
|
|
5
|
+
ContextManager,
|
|
6
|
+
Generic,
|
|
7
|
+
Optional,
|
|
8
|
+
Self,
|
|
9
|
+
TypeVar,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
from pydantic import BaseModel, Field
|
|
13
|
+
|
|
14
|
+
from ab_core.cache.namespace import CacheNamespace
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class CacheSession(BaseModel, ABC):
|
|
18
|
+
namespace: CacheNamespace = Field(
|
|
19
|
+
default_factory=CacheNamespace,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
@abstractmethod
|
|
23
|
+
def get(self, key: str): ...
|
|
24
|
+
|
|
25
|
+
@abstractmethod
|
|
26
|
+
def set(self, key: str, value, expiry: Optional[int] = None) -> bool: ...
|
|
27
|
+
|
|
28
|
+
@abstractmethod
|
|
29
|
+
def set_if_not_exists(self, key: str, value, expiry: Optional[int] = None) -> bool: ...
|
|
30
|
+
|
|
31
|
+
@abstractmethod
|
|
32
|
+
def delete(self, key: str) -> int: ...
|
|
33
|
+
|
|
34
|
+
@abstractmethod
|
|
35
|
+
def increment(
|
|
36
|
+
self,
|
|
37
|
+
key: str,
|
|
38
|
+
*,
|
|
39
|
+
increment_by: int = 1,
|
|
40
|
+
initial_value: Optional[int] = None,
|
|
41
|
+
expiry: Optional[int] = None,
|
|
42
|
+
) -> int: ...
|
|
43
|
+
|
|
44
|
+
@abstractmethod
|
|
45
|
+
def get_keys(self, pattern: str = "*"): ...
|
|
46
|
+
|
|
47
|
+
@abstractmethod
|
|
48
|
+
def delete_keys(self, pattern: str = "*") -> int: ...
|
|
49
|
+
|
|
50
|
+
@abstractmethod
|
|
51
|
+
def get_ttl(self, key: str) -> int: ...
|
|
52
|
+
|
|
53
|
+
@abstractmethod
|
|
54
|
+
def expire(self, key: str, ttl: int) -> bool: ...
|
|
55
|
+
|
|
56
|
+
@abstractmethod
|
|
57
|
+
def close(self) -> None: ...
|
|
58
|
+
|
|
59
|
+
def __enter__(self: Self) -> Self:
|
|
60
|
+
return self
|
|
61
|
+
|
|
62
|
+
def __exit__(self, type_: Any, value: Any, traceback: Any) -> None:
|
|
63
|
+
self.close()
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class CacheAsyncSession(BaseModel, ABC):
|
|
67
|
+
namespace: CacheNamespace = Field(
|
|
68
|
+
default_factory=CacheNamespace,
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
@abstractmethod
|
|
72
|
+
async def get(self, key: str): ...
|
|
73
|
+
|
|
74
|
+
@abstractmethod
|
|
75
|
+
async def set(self, key: str, value, expiry: Optional[int] = None) -> bool: ...
|
|
76
|
+
|
|
77
|
+
@abstractmethod
|
|
78
|
+
async def set_if_not_exists(self, key: str, value, expiry: Optional[int] = None) -> bool: ...
|
|
79
|
+
|
|
80
|
+
@abstractmethod
|
|
81
|
+
async def delete(self, key: str) -> int: ...
|
|
82
|
+
|
|
83
|
+
@abstractmethod
|
|
84
|
+
async def increment(
|
|
85
|
+
self,
|
|
86
|
+
key: str,
|
|
87
|
+
*,
|
|
88
|
+
increment_by: int = 1,
|
|
89
|
+
initial_value: Optional[int] = None,
|
|
90
|
+
expiry: Optional[int] = None,
|
|
91
|
+
) -> int: ...
|
|
92
|
+
|
|
93
|
+
@abstractmethod
|
|
94
|
+
async def get_keys(self, pattern: str = "*"): ...
|
|
95
|
+
|
|
96
|
+
@abstractmethod
|
|
97
|
+
async def delete_keys(self, pattern: str = "*") -> int: ...
|
|
98
|
+
|
|
99
|
+
@abstractmethod
|
|
100
|
+
async def get_ttl(self, key: str) -> int: ...
|
|
101
|
+
|
|
102
|
+
@abstractmethod
|
|
103
|
+
async def expire(self, key: str, ttl: int) -> bool: ...
|
|
104
|
+
|
|
105
|
+
@abstractmethod
|
|
106
|
+
async def close(self) -> None: ...
|
|
107
|
+
|
|
108
|
+
async def __aenter__(self: Self) -> Self:
|
|
109
|
+
return self
|
|
110
|
+
|
|
111
|
+
async def __aexit__(self, type_: Any, value: Any, traceback: Any) -> None:
|
|
112
|
+
await self.close()
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
SYNC_SESSION = TypeVar("SYNC_SESSION", bound=CacheSession)
|
|
116
|
+
ASYNC_SESSION = TypeVar("ASYNC_SESSION", bound=CacheAsyncSession)
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
class CacheBase(BaseModel, Generic[SYNC_SESSION, ASYNC_SESSION], ABC):
|
|
120
|
+
namespace: CacheNamespace = Field(default_factory=CacheNamespace)
|
|
121
|
+
|
|
122
|
+
@abstractmethod
|
|
123
|
+
def sync_session(
|
|
124
|
+
self,
|
|
125
|
+
*,
|
|
126
|
+
current_session: Optional[SYNC_SESSION] = None,
|
|
127
|
+
) -> ContextManager[SYNC_SESSION]: ...
|
|
128
|
+
|
|
129
|
+
@abstractmethod
|
|
130
|
+
async def async_session(
|
|
131
|
+
self,
|
|
132
|
+
*,
|
|
133
|
+
current_session: Optional[ASYNC_SESSION] = None,
|
|
134
|
+
) -> AsyncContextManager[ASYNC_SESSION]: ...
|
|
@@ -0,0 +1,407 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import fnmatch
|
|
3
|
+
import time
|
|
4
|
+
from contextlib import asynccontextmanager, contextmanager
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import AsyncIterator, Iterator, Literal, Optional, override
|
|
7
|
+
|
|
8
|
+
import diskcache # pip install diskcache
|
|
9
|
+
from pydantic import ConfigDict, Field
|
|
10
|
+
|
|
11
|
+
from ab_core.cache.codec import DecodedT, safe_decode, safe_encode
|
|
12
|
+
from ab_core.cache.exceptions import GenericCacheReadError, GenericCacheWriteError
|
|
13
|
+
|
|
14
|
+
from ..schema.cache_type import CacheType
|
|
15
|
+
from .base import CacheAsyncSession, CacheBase, CacheSession
|
|
16
|
+
|
|
17
|
+
# ────────────────────────────────────────────────────────────────────────────────
|
|
18
|
+
# Sync Session
|
|
19
|
+
# ────────────────────────────────────────────────────────────────────────────────
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class DiskCacheSyncSession(CacheSession):
|
|
23
|
+
"""
|
|
24
|
+
Synchronous disk-backed session using `diskcache.Cache | diskcache.FanoutCache`.
|
|
25
|
+
|
|
26
|
+
- Keys are namespaced.
|
|
27
|
+
- Values are stored as EncodedT (bytes-like) via `safe_encode`.
|
|
28
|
+
- Expiration is handled by DiskCache per-item `expire` seconds.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
32
|
+
|
|
33
|
+
cache: diskcache.Cache | diskcache.FanoutCache = Field(..., exclude=True)
|
|
34
|
+
|
|
35
|
+
def _get_with_expire_time(self, k: str):
|
|
36
|
+
# Returns `None` if missing, else a tuple (value, expire_time)
|
|
37
|
+
_missing = object()
|
|
38
|
+
res = self.cache.get(k, default=_missing, expire_time=True)
|
|
39
|
+
return None if res is _missing else res # type: ignore[return-value]
|
|
40
|
+
|
|
41
|
+
@override
|
|
42
|
+
def get(self, key: str) -> DecodedT:
|
|
43
|
+
k = self.namespace.apply(key)
|
|
44
|
+
try:
|
|
45
|
+
res = self._get_with_expire_time(k)
|
|
46
|
+
if res is None:
|
|
47
|
+
raise KeyError(f"No data found for key `{k}`")
|
|
48
|
+
value, expire_time = res
|
|
49
|
+
# If value is present but already expired by race, DiskCache returns default.
|
|
50
|
+
return safe_decode(value)
|
|
51
|
+
except KeyError:
|
|
52
|
+
raise
|
|
53
|
+
except Exception as e:
|
|
54
|
+
raise GenericCacheReadError(e) from e
|
|
55
|
+
|
|
56
|
+
@override
|
|
57
|
+
def set(self, key: str, value, expiry: Optional[int] = None) -> bool:
|
|
58
|
+
k = self.namespace.apply(key)
|
|
59
|
+
try:
|
|
60
|
+
return bool(self.cache.set(k, safe_encode(value), expire=expiry))
|
|
61
|
+
except Exception as e:
|
|
62
|
+
raise GenericCacheWriteError(e) from e
|
|
63
|
+
|
|
64
|
+
@override
|
|
65
|
+
def set_if_not_exists(self, key: str, value, expiry: Optional[int] = None) -> bool:
|
|
66
|
+
k = self.namespace.apply(key)
|
|
67
|
+
try:
|
|
68
|
+
# `add` stores only if missing
|
|
69
|
+
return bool(self.cache.add(k, safe_encode(value), expire=expiry))
|
|
70
|
+
except Exception as e:
|
|
71
|
+
raise GenericCacheWriteError(e) from e
|
|
72
|
+
|
|
73
|
+
@override
|
|
74
|
+
def delete(self, key: str) -> int:
|
|
75
|
+
k = self.namespace.apply(key)
|
|
76
|
+
try:
|
|
77
|
+
return 1 if self.cache.delete(k) else 0
|
|
78
|
+
except Exception as e:
|
|
79
|
+
raise GenericCacheWriteError(e) from e
|
|
80
|
+
|
|
81
|
+
@override
|
|
82
|
+
def increment(
|
|
83
|
+
self,
|
|
84
|
+
key: str,
|
|
85
|
+
*,
|
|
86
|
+
increment_by: int = 1,
|
|
87
|
+
initial_value: Optional[int] = None,
|
|
88
|
+
expiry: Optional[int] = None,
|
|
89
|
+
) -> int:
|
|
90
|
+
k = self.namespace.apply(key)
|
|
91
|
+
try:
|
|
92
|
+
# mirror in-memory semantics (store integer as utf-8 bytes)
|
|
93
|
+
with self.cache.transact():
|
|
94
|
+
res = self._get_with_expire_time(k)
|
|
95
|
+
if res is None:
|
|
96
|
+
new_val = int(initial_value or 0)
|
|
97
|
+
self.cache.set(k, str(new_val).encode("utf-8"), expire=expiry)
|
|
98
|
+
return new_val
|
|
99
|
+
value, _ = res
|
|
100
|
+
try:
|
|
101
|
+
current = int(safe_decode(value))
|
|
102
|
+
except Exception as conv:
|
|
103
|
+
raise GenericCacheWriteError(f"Value for key `{k}` is not an integer") from conv
|
|
104
|
+
new_val = current + int(increment_by)
|
|
105
|
+
# do NOT change expiry for existing key (parity with Redis impl)
|
|
106
|
+
self.cache.set(k, str(new_val).encode("utf-8"))
|
|
107
|
+
return new_val
|
|
108
|
+
except GenericCacheWriteError:
|
|
109
|
+
raise
|
|
110
|
+
except Exception as e:
|
|
111
|
+
raise GenericCacheWriteError(e) from e
|
|
112
|
+
|
|
113
|
+
@override
|
|
114
|
+
def get_keys(self, pattern: str = "*") -> list[str]:
|
|
115
|
+
pat = self.namespace.apply(pattern)
|
|
116
|
+
try:
|
|
117
|
+
out: list[str] = []
|
|
118
|
+
# iterkeys() may include expired; verify liveness via get(...)
|
|
119
|
+
for k in self.cache.iterkeys():
|
|
120
|
+
if not isinstance(k, str):
|
|
121
|
+
continue
|
|
122
|
+
if not fnmatch.fnmatch(k, pat):
|
|
123
|
+
continue
|
|
124
|
+
res = self._get_with_expire_time(k)
|
|
125
|
+
if res is None:
|
|
126
|
+
continue # missing/expired
|
|
127
|
+
out.append(self.namespace.strip(k))
|
|
128
|
+
return out
|
|
129
|
+
except Exception as e:
|
|
130
|
+
raise GenericCacheReadError(e) from e
|
|
131
|
+
|
|
132
|
+
@override
|
|
133
|
+
def delete_keys(self, pattern: str = "*") -> int:
|
|
134
|
+
pat = self.namespace.apply(pattern)
|
|
135
|
+
try:
|
|
136
|
+
deleted = 0
|
|
137
|
+
for k in list(self.cache.iterkeys()):
|
|
138
|
+
if isinstance(k, str) and fnmatch.fnmatch(k, pat):
|
|
139
|
+
if self.cache.delete(k):
|
|
140
|
+
deleted += 1
|
|
141
|
+
return deleted
|
|
142
|
+
except Exception as e:
|
|
143
|
+
raise GenericCacheWriteError(e) from e
|
|
144
|
+
|
|
145
|
+
@override
|
|
146
|
+
def get_ttl(self, key: str) -> int:
|
|
147
|
+
k = self.namespace.apply(key)
|
|
148
|
+
try:
|
|
149
|
+
res = self._get_with_expire_time(k)
|
|
150
|
+
if res is None:
|
|
151
|
+
raise KeyError(f"No data found for key `{k}`")
|
|
152
|
+
_, exp = res
|
|
153
|
+
if exp is None:
|
|
154
|
+
return -1 # no expiration
|
|
155
|
+
ttl = int(exp - time.time())
|
|
156
|
+
if ttl < 0:
|
|
157
|
+
# already expired in-between; treat as missing now
|
|
158
|
+
self.cache.delete(k)
|
|
159
|
+
raise KeyError(f"No data found for key `{k}`")
|
|
160
|
+
return ttl
|
|
161
|
+
except KeyError:
|
|
162
|
+
raise
|
|
163
|
+
except Exception as e:
|
|
164
|
+
raise GenericCacheReadError(e) from e
|
|
165
|
+
|
|
166
|
+
@override
|
|
167
|
+
def expire(self, key: str, ttl: int) -> bool:
|
|
168
|
+
k = self.namespace.apply(key)
|
|
169
|
+
try:
|
|
170
|
+
return bool(self.cache.touch(k, expire=int(ttl)))
|
|
171
|
+
except Exception as e:
|
|
172
|
+
raise GenericCacheWriteError(e) from e
|
|
173
|
+
|
|
174
|
+
@override
|
|
175
|
+
def close(self) -> None:
|
|
176
|
+
try:
|
|
177
|
+
self.cache.close()
|
|
178
|
+
except Exception:
|
|
179
|
+
pass
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
# ────────────────────────────────────────────────────────────────────────────────
|
|
183
|
+
# Async Session (wrap sync DiskCache with to_thread)
|
|
184
|
+
# ────────────────────────────────────────────────────────────────────────────────
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
class DiskCacheAsyncSession(CacheAsyncSession):
|
|
188
|
+
"""
|
|
189
|
+
Async facade for `diskcache.Cache | diskcache.FanoutCache` wrapped with `asyncio.to_thread`.
|
|
190
|
+
"""
|
|
191
|
+
|
|
192
|
+
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
193
|
+
|
|
194
|
+
cache: diskcache.Cache | diskcache.FanoutCache = Field(..., exclude=True)
|
|
195
|
+
|
|
196
|
+
def _ns(self, key: str) -> str:
|
|
197
|
+
return self.namespace.apply(key)
|
|
198
|
+
|
|
199
|
+
@override
|
|
200
|
+
async def get(self, key: str) -> DecodedT:
|
|
201
|
+
k = self._ns(key)
|
|
202
|
+
|
|
203
|
+
def _work():
|
|
204
|
+
_missing = object()
|
|
205
|
+
res = self.cache.get(k, default=_missing, expire_time=True)
|
|
206
|
+
if res is _missing:
|
|
207
|
+
raise KeyError(f"No data found for key `{k}`")
|
|
208
|
+
value, _ = res
|
|
209
|
+
return safe_decode(value)
|
|
210
|
+
|
|
211
|
+
try:
|
|
212
|
+
return await asyncio.to_thread(_work)
|
|
213
|
+
except KeyError:
|
|
214
|
+
raise
|
|
215
|
+
except Exception as e:
|
|
216
|
+
raise GenericCacheReadError(e) from e
|
|
217
|
+
|
|
218
|
+
@override
|
|
219
|
+
async def set(self, key: str, value, expiry: Optional[int] = None) -> bool:
|
|
220
|
+
k = self._ns(key)
|
|
221
|
+
try:
|
|
222
|
+
return await asyncio.to_thread(self.cache.set, k, safe_encode(value), expiry)
|
|
223
|
+
except Exception as e:
|
|
224
|
+
raise GenericCacheWriteError(e) from e
|
|
225
|
+
|
|
226
|
+
@override
|
|
227
|
+
async def set_if_not_exists(self, key: str, value, expiry: Optional[int] = None) -> bool:
|
|
228
|
+
k = self._ns(key)
|
|
229
|
+
try:
|
|
230
|
+
return await asyncio.to_thread(self.cache.add, k, safe_encode(value), expiry)
|
|
231
|
+
except Exception as e:
|
|
232
|
+
raise GenericCacheWriteError(e) from e
|
|
233
|
+
|
|
234
|
+
@override
|
|
235
|
+
async def delete(self, key: str) -> int:
|
|
236
|
+
k = self._ns(key)
|
|
237
|
+
try:
|
|
238
|
+
ok = await asyncio.to_thread(self.cache.delete, k)
|
|
239
|
+
return 1 if ok else 0
|
|
240
|
+
except Exception as e:
|
|
241
|
+
raise GenericCacheWriteError(e) from e
|
|
242
|
+
|
|
243
|
+
@override
|
|
244
|
+
async def increment(
|
|
245
|
+
self,
|
|
246
|
+
key: str,
|
|
247
|
+
*,
|
|
248
|
+
increment_by: int = 1,
|
|
249
|
+
initial_value: Optional[int] = None,
|
|
250
|
+
expiry: Optional[int] = None,
|
|
251
|
+
) -> int:
|
|
252
|
+
k = self._ns(key)
|
|
253
|
+
|
|
254
|
+
def _work() -> int:
|
|
255
|
+
with self.cache.transact():
|
|
256
|
+
_missing = object()
|
|
257
|
+
res = self.cache.get(k, default=_missing, expire_time=True)
|
|
258
|
+
if res is _missing:
|
|
259
|
+
new_val = int(initial_value or 0)
|
|
260
|
+
self.cache.set(k, str(new_val).encode("utf-8"), expire=expiry)
|
|
261
|
+
return new_val
|
|
262
|
+
value, _ = res
|
|
263
|
+
try:
|
|
264
|
+
current = int(safe_decode(value))
|
|
265
|
+
except Exception as conv:
|
|
266
|
+
raise GenericCacheWriteError(f"Value for key `{k}` is not an integer") from conv
|
|
267
|
+
new_val = current + int(increment_by)
|
|
268
|
+
self.cache.set(k, str(new_val).encode("utf-8"))
|
|
269
|
+
return new_val
|
|
270
|
+
|
|
271
|
+
try:
|
|
272
|
+
return await asyncio.to_thread(_work)
|
|
273
|
+
except GenericCacheWriteError:
|
|
274
|
+
raise
|
|
275
|
+
except Exception as e:
|
|
276
|
+
raise GenericCacheWriteError(e) from e
|
|
277
|
+
|
|
278
|
+
@override
|
|
279
|
+
async def get_keys(self, pattern: str = "*") -> list[str]:
|
|
280
|
+
pat = self.namespace.apply(pattern)
|
|
281
|
+
|
|
282
|
+
def _work() -> list[str]:
|
|
283
|
+
out: list[str] = []
|
|
284
|
+
for k in self.cache.iterkeys():
|
|
285
|
+
if not isinstance(k, str):
|
|
286
|
+
continue
|
|
287
|
+
if not fnmatch.fnmatch(k, pat):
|
|
288
|
+
continue
|
|
289
|
+
_missing = object()
|
|
290
|
+
res = self.cache.get(k, default=_missing, expire_time=True)
|
|
291
|
+
if res is _missing:
|
|
292
|
+
continue
|
|
293
|
+
out.append(self.namespace.strip(k))
|
|
294
|
+
return out
|
|
295
|
+
|
|
296
|
+
try:
|
|
297
|
+
return await asyncio.to_thread(_work)
|
|
298
|
+
except Exception as e:
|
|
299
|
+
raise GenericCacheReadError(e) from e
|
|
300
|
+
|
|
301
|
+
@override
|
|
302
|
+
async def delete_keys(self, pattern: str = "*") -> int:
|
|
303
|
+
pat = self.namespace.apply(pattern)
|
|
304
|
+
|
|
305
|
+
def _work() -> int:
|
|
306
|
+
deleted = 0
|
|
307
|
+
for k in list(self.cache.iterkeys()):
|
|
308
|
+
if isinstance(k, str) and fnmatch.fnmatch(k, pat):
|
|
309
|
+
if self.cache.delete(k):
|
|
310
|
+
deleted += 1
|
|
311
|
+
return deleted
|
|
312
|
+
|
|
313
|
+
try:
|
|
314
|
+
return await asyncio.to_thread(_work)
|
|
315
|
+
except Exception as e:
|
|
316
|
+
raise GenericCacheWriteError(e) from e
|
|
317
|
+
|
|
318
|
+
@override
|
|
319
|
+
async def get_ttl(self, key: str) -> int:
|
|
320
|
+
k = self._ns(key)
|
|
321
|
+
|
|
322
|
+
def _work() -> int:
|
|
323
|
+
_missing = object()
|
|
324
|
+
res = self.cache.get(k, default=_missing, expire_time=True)
|
|
325
|
+
if res is _missing:
|
|
326
|
+
raise KeyError(f"No data found for key `{k}`")
|
|
327
|
+
_, exp = res
|
|
328
|
+
if exp is None:
|
|
329
|
+
return -1
|
|
330
|
+
ttl = int(exp - time.time())
|
|
331
|
+
if ttl < 0:
|
|
332
|
+
self.cache.delete(k)
|
|
333
|
+
raise KeyError(f"No data found for key `{k}`")
|
|
334
|
+
return ttl
|
|
335
|
+
|
|
336
|
+
try:
|
|
337
|
+
return await asyncio.to_thread(_work)
|
|
338
|
+
except KeyError:
|
|
339
|
+
raise
|
|
340
|
+
except Exception as e:
|
|
341
|
+
raise GenericCacheReadError(e) from e
|
|
342
|
+
|
|
343
|
+
@override
|
|
344
|
+
async def expire(self, key: str, ttl: int) -> bool:
|
|
345
|
+
k = self._ns(key)
|
|
346
|
+
try:
|
|
347
|
+
return await asyncio.to_thread(self.cache.touch, k, int(ttl))
|
|
348
|
+
except Exception as e:
|
|
349
|
+
raise GenericCacheWriteError(e) from e
|
|
350
|
+
|
|
351
|
+
@override
|
|
352
|
+
async def close(self) -> None:
|
|
353
|
+
try:
|
|
354
|
+
await asyncio.to_thread(self.cache.close)
|
|
355
|
+
except Exception:
|
|
356
|
+
pass
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
# ────────────────────────────────────────────────────────────────────────────────
|
|
360
|
+
# Cache factory
|
|
361
|
+
# ────────────────────────────────────────────────────────────────────────────────
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
class DiskCache(CacheBase[DiskCacheSyncSession, DiskCacheAsyncSession]):
|
|
365
|
+
# Adjust enum name if your CacheType differs, e.g. DISKCACHE or DISK_CACHE
|
|
366
|
+
type: Literal[CacheType.DISK] = CacheType.DISK
|
|
367
|
+
|
|
368
|
+
# Where to persist on disk (e.g. a container volume path)
|
|
369
|
+
directory: Path = Field(..., description="Directory for diskcache storage")
|
|
370
|
+
timeout: int = Field(default=60, description="Timeout for path cache")
|
|
371
|
+
|
|
372
|
+
# Fanout options
|
|
373
|
+
fanout: bool = Field(False, description="Use diskcache.FanoutCache instead of Cache")
|
|
374
|
+
shards: int = Field(8, description="Number of shards when using FanoutCache")
|
|
375
|
+
|
|
376
|
+
def _new_cache(self) -> diskcache.Cache | diskcache.FanoutCache:
|
|
377
|
+
if self.fanout:
|
|
378
|
+
return diskcache.FanoutCache(self.directory, timeout=self.timeout, shards=self.shards)
|
|
379
|
+
return diskcache.Cache(self.directory, timeout=self.timeout)
|
|
380
|
+
|
|
381
|
+
@override
|
|
382
|
+
@contextmanager
|
|
383
|
+
def sync_session(
|
|
384
|
+
self,
|
|
385
|
+
*,
|
|
386
|
+
current_session: Optional[DiskCacheSyncSession] = None,
|
|
387
|
+
) -> Iterator[DiskCacheSyncSession]:
|
|
388
|
+
if current_session:
|
|
389
|
+
yield current_session
|
|
390
|
+
else:
|
|
391
|
+
with DiskCacheSyncSession(namespace=self.namespace, cache=self._new_cache()) as session:
|
|
392
|
+
yield session
|
|
393
|
+
|
|
394
|
+
@override
|
|
395
|
+
@asynccontextmanager
|
|
396
|
+
async def async_session(
|
|
397
|
+
self,
|
|
398
|
+
*,
|
|
399
|
+
current_session: Optional[DiskCacheAsyncSession] = None,
|
|
400
|
+
) -> AsyncIterator[DiskCacheAsyncSession]:
|
|
401
|
+
if current_session:
|
|
402
|
+
yield current_session
|
|
403
|
+
else:
|
|
404
|
+
async with DiskCacheAsyncSession(
|
|
405
|
+
namespace=self.namespace, cache=self._new_cache()
|
|
406
|
+
) as session:
|
|
407
|
+
yield session
|