fastapi-cachekit 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fast_cache/__init__.py +13 -0
- fast_cache/backends/__init__.py +0 -0
- fast_cache/backends/backend.py +126 -0
- fast_cache/backends/memcached.py +126 -0
- fast_cache/backends/memory.py +291 -0
- fast_cache/backends/postgres.py +230 -0
- fast_cache/backends/redis.py +257 -0
- fast_cache/integration.py +199 -0
- fastapi_cachekit-0.1.0.dist-info/METADATA +289 -0
- fastapi_cachekit-0.1.0.dist-info/RECORD +12 -0
- fastapi_cachekit-0.1.0.dist-info/WHEEL +5 -0
- fastapi_cachekit-0.1.0.dist-info/top_level.txt +1 -0
fast_cache/__init__.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from .integration import FastAPICache
|
|
2
|
+
from .backends.backend import CacheBackend
|
|
3
|
+
|
|
4
|
+
from .backends.redis import RedisBackend
|
|
5
|
+
from .backends.memory import InMemoryBackend
|
|
6
|
+
from .backends.postgres import PostgresBackend
|
|
7
|
+
from .backends.memcached import MemcachedBackend
|
|
8
|
+
|
|
9
|
+
__all__ = ["FastAPICache", "RedisBackend", "CacheBackend", "InMemoryBackend","PostgresBackend", "cache","MemcachedBackend" ]
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
# Create global cache instance
|
|
13
|
+
cache = FastAPICache()
|
|
File without changes
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
from typing import Any, Optional, Union
|
|
3
|
+
from datetime import timedelta
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class CacheBackend(ABC):
|
|
7
|
+
"""
|
|
8
|
+
Abstract base class for cache backends.
|
|
9
|
+
|
|
10
|
+
All cache backend implementations must inherit from this class and implement
|
|
11
|
+
both synchronous and asynchronous methods for cache operations.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
@abstractmethod
|
|
15
|
+
async def aget(self, key: str) -> Optional[Any]:
|
|
16
|
+
"""
|
|
17
|
+
Asynchronously retrieve a value from the cache.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
key (str): The key to retrieve.
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
Optional[Any]: The cached value, or None if not found.
|
|
24
|
+
"""
|
|
25
|
+
pass
|
|
26
|
+
|
|
27
|
+
@abstractmethod
|
|
28
|
+
def get(self, key: str) -> Optional[Any]:
|
|
29
|
+
"""
|
|
30
|
+
Synchronously retrieve a value from the cache.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
key (str): The key to retrieve.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
Optional[Any]: The cached value, or None if not found.
|
|
37
|
+
"""
|
|
38
|
+
pass
|
|
39
|
+
|
|
40
|
+
@abstractmethod
|
|
41
|
+
async def aset(
|
|
42
|
+
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
43
|
+
) -> None:
|
|
44
|
+
"""
|
|
45
|
+
Asynchronously set a value in the cache.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
key (str): The key under which to store the value.
|
|
49
|
+
value (Any): The value to store.
|
|
50
|
+
expire (Optional[Union[int, timedelta]]): Expiration time in seconds or as timedelta.
|
|
51
|
+
"""
|
|
52
|
+
pass
|
|
53
|
+
|
|
54
|
+
@abstractmethod
|
|
55
|
+
def set(
|
|
56
|
+
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
57
|
+
) -> None:
|
|
58
|
+
"""
|
|
59
|
+
Synchronously set a value in the cache.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
key (str): The key under which to store the value.
|
|
63
|
+
value (Any): The value to store.
|
|
64
|
+
expire (Optional[Union[int, timedelta]]): Expiration time in seconds or as timedelta.
|
|
65
|
+
"""
|
|
66
|
+
pass
|
|
67
|
+
|
|
68
|
+
@abstractmethod
|
|
69
|
+
async def adelete(self, key: str) -> None:
|
|
70
|
+
"""
|
|
71
|
+
Asynchronously delete a value from the cache.
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
key (str): The key to delete.
|
|
75
|
+
"""
|
|
76
|
+
pass
|
|
77
|
+
|
|
78
|
+
@abstractmethod
|
|
79
|
+
def delete(self, key: str) -> None:
|
|
80
|
+
"""
|
|
81
|
+
Synchronously delete a value from the cache.
|
|
82
|
+
|
|
83
|
+
Args:
|
|
84
|
+
key (str): The key to delete.
|
|
85
|
+
"""
|
|
86
|
+
pass
|
|
87
|
+
|
|
88
|
+
@abstractmethod
|
|
89
|
+
async def aclear(self) -> None:
|
|
90
|
+
"""
|
|
91
|
+
Asynchronously clear all values from the cache.
|
|
92
|
+
"""
|
|
93
|
+
pass
|
|
94
|
+
|
|
95
|
+
@abstractmethod
|
|
96
|
+
def clear(self) -> None:
|
|
97
|
+
"""
|
|
98
|
+
Synchronously clear all values from the cache.
|
|
99
|
+
"""
|
|
100
|
+
pass
|
|
101
|
+
|
|
102
|
+
@abstractmethod
|
|
103
|
+
async def ahas(self, key: str) -> bool:
|
|
104
|
+
"""
|
|
105
|
+
Asynchronously check if a key exists in the cache.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
key (str): The key to check.
|
|
109
|
+
|
|
110
|
+
Returns:
|
|
111
|
+
bool: True if the key exists, False otherwise.
|
|
112
|
+
"""
|
|
113
|
+
pass
|
|
114
|
+
|
|
115
|
+
@abstractmethod
|
|
116
|
+
def has(self, key: str) -> bool:
|
|
117
|
+
"""
|
|
118
|
+
Synchronously check if a key exists in the cache.
|
|
119
|
+
|
|
120
|
+
Args:
|
|
121
|
+
key (str): The key to check.
|
|
122
|
+
|
|
123
|
+
Returns:
|
|
124
|
+
bool: True if the key exists, False otherwise.
|
|
125
|
+
"""
|
|
126
|
+
pass
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import pickle
|
|
2
|
+
from typing import Any, Optional, Union, Mapping
|
|
3
|
+
from datetime import timedelta
|
|
4
|
+
from .backend import CacheBackend
|
|
5
|
+
|
|
6
|
+
class MemcachedBackend(CacheBackend):
|
|
7
|
+
"""
|
|
8
|
+
Memcached cache backend with both sync and async support.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
host: str,
|
|
14
|
+
port: int,
|
|
15
|
+
*,
|
|
16
|
+
pool_size: int = 10,
|
|
17
|
+
pool_minsize: int = 1,
|
|
18
|
+
namespace: str = "fastapi_cache",
|
|
19
|
+
) -> None:
|
|
20
|
+
try:
|
|
21
|
+
import aiomcache
|
|
22
|
+
from pymemcache.client.base import PooledClient
|
|
23
|
+
except ImportError:
|
|
24
|
+
raise ImportError(
|
|
25
|
+
"MemcachedBackend requires 'aiomcache' and 'pymemcache'. "
|
|
26
|
+
"Install with: pip install fast-cache[memcached]"
|
|
27
|
+
)
|
|
28
|
+
self._namespace = namespace
|
|
29
|
+
self._host = host
|
|
30
|
+
self._port = port
|
|
31
|
+
|
|
32
|
+
# Sync client
|
|
33
|
+
self._sync_client = PooledClient(
|
|
34
|
+
(host, port),
|
|
35
|
+
max_pool_size=10,
|
|
36
|
+
)
|
|
37
|
+
self._async_client = aiomcache.Client(
|
|
38
|
+
host,
|
|
39
|
+
port,
|
|
40
|
+
pool_size=pool_size,
|
|
41
|
+
pool_minsize=pool_minsize,
|
|
42
|
+
)
|
|
43
|
+
# Async client will be created per event loop
|
|
44
|
+
|
|
45
|
+
def _make_key(self, key: str) -> bytes:
|
|
46
|
+
return f"{self._namespace}:{key}".encode()
|
|
47
|
+
|
|
48
|
+
def get(self, key: str) -> Optional[Any]:
|
|
49
|
+
try:
|
|
50
|
+
value = self._sync_client.get(self._make_key(key))
|
|
51
|
+
return pickle.loads(value) if value else None
|
|
52
|
+
except Exception:
|
|
53
|
+
return None
|
|
54
|
+
|
|
55
|
+
def set(
|
|
56
|
+
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
57
|
+
) -> None:
|
|
58
|
+
try:
|
|
59
|
+
exptime = int(expire.total_seconds()) if isinstance(expire, timedelta) else (expire or 0)
|
|
60
|
+
self._sync_client.set(self._make_key(key), pickle.dumps(value), expire=exptime)
|
|
61
|
+
except Exception:
|
|
62
|
+
pass
|
|
63
|
+
|
|
64
|
+
def delete(self, key: str) -> None:
|
|
65
|
+
try:
|
|
66
|
+
self._sync_client.delete(self._make_key(key))
|
|
67
|
+
except Exception:
|
|
68
|
+
pass
|
|
69
|
+
|
|
70
|
+
def clear(self) -> None:
|
|
71
|
+
# Memcached does not support namespace flush, so flush all
|
|
72
|
+
try:
|
|
73
|
+
self._sync_client.flush_all()
|
|
74
|
+
except Exception:
|
|
75
|
+
pass
|
|
76
|
+
|
|
77
|
+
def has(self, key: str) -> bool:
|
|
78
|
+
try:
|
|
79
|
+
return self._sync_client.get(self._make_key(key)) is not None
|
|
80
|
+
except Exception:
|
|
81
|
+
return False
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
async def aget(self, key: str) -> Optional[Any]:
|
|
85
|
+
try:
|
|
86
|
+
value = await self._async_client.get(self._make_key(key))
|
|
87
|
+
return pickle.loads(value) if value else None
|
|
88
|
+
except Exception:
|
|
89
|
+
return None
|
|
90
|
+
|
|
91
|
+
async def aset(
|
|
92
|
+
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
93
|
+
) -> None:
|
|
94
|
+
try:
|
|
95
|
+
exptime = int(expire.total_seconds()) if isinstance(expire, timedelta) else (expire or 0)
|
|
96
|
+
await self._async_client.set(self._make_key(key), pickle.dumps(value), exptime=exptime)
|
|
97
|
+
except Exception:
|
|
98
|
+
pass
|
|
99
|
+
|
|
100
|
+
async def adelete(self, key: str) -> None:
|
|
101
|
+
try:
|
|
102
|
+
await self._async_client.delete(self._make_key(key))
|
|
103
|
+
except Exception:
|
|
104
|
+
pass
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
async def aclear(self) -> None:
|
|
108
|
+
try:
|
|
109
|
+
await self._async_client.flush_all()
|
|
110
|
+
except Exception:
|
|
111
|
+
pass
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
async def ahas(self, key: str) -> bool:
|
|
115
|
+
try:
|
|
116
|
+
value = await self._async_client.get(self._make_key(key))
|
|
117
|
+
return value is not None
|
|
118
|
+
except Exception:
|
|
119
|
+
return False
|
|
120
|
+
|
|
121
|
+
async def close(self) -> None:
|
|
122
|
+
try:
|
|
123
|
+
await self._async_client.close()
|
|
124
|
+
self._sync_client.close()
|
|
125
|
+
except Exception:
|
|
126
|
+
pass
|
|
@@ -0,0 +1,291 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import threading
|
|
3
|
+
import time
|
|
4
|
+
from collections import OrderedDict
|
|
5
|
+
from datetime import timedelta
|
|
6
|
+
from typing import Any, Dict, Optional, Union, Tuple
|
|
7
|
+
from .backend import CacheBackend
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class InMemoryBackend(CacheBackend):
|
|
11
|
+
"""
|
|
12
|
+
In-memory cache backend implementation with namespace support,
|
|
13
|
+
thread/async safety, and efficient expiration cleanup.
|
|
14
|
+
|
|
15
|
+
Attributes:
|
|
16
|
+
_namespace (str): Namespace prefix for all keys.
|
|
17
|
+
_cache (OrderedDict[str, Tuple[Any, Optional[float]]]): The in-memory cache store.
|
|
18
|
+
_lock (threading.Lock): Lock for thread safety.
|
|
19
|
+
_async_lock (asyncio.Lock): Lock for async safety.
|
|
20
|
+
_cleanup_task (Optional[asyncio.Task]): Background cleanup task.
|
|
21
|
+
_max_size (Optional[int]): Maximum number of items (for LRU eviction).
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
def __init__(
|
|
25
|
+
self, namespace: str = "fastapi-cache", max_size: Optional[int] = None
|
|
26
|
+
) -> None:
|
|
27
|
+
"""
|
|
28
|
+
Initialize the in-memory cache backend.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
namespace (str): Namespace prefix for all keys (default: "fastapi-cache").
|
|
32
|
+
max_size (Optional[int]): Optional maximum number of items (LRU eviction if set).
|
|
33
|
+
"""
|
|
34
|
+
self._namespace = namespace
|
|
35
|
+
self._cache: "OrderedDict[str, Tuple[Any, Optional[float]]]" = OrderedDict()
|
|
36
|
+
self._lock = threading.Lock()
|
|
37
|
+
self._async_lock = asyncio.Lock()
|
|
38
|
+
self._cleanup_task: Optional[asyncio.Task] = None
|
|
39
|
+
self._max_size = max_size
|
|
40
|
+
|
|
41
|
+
def _make_key(self, key: str) -> str:
|
|
42
|
+
"""
|
|
43
|
+
Create a namespaced cache key.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
key (str): The original key.
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
str: The namespaced key.
|
|
50
|
+
"""
|
|
51
|
+
return f"{self._namespace}:{key}"
|
|
52
|
+
|
|
53
|
+
def _is_expired(self, expire_time: Optional[float]) -> bool:
|
|
54
|
+
"""
|
|
55
|
+
Check if a cache entry is expired.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
expire_time (Optional[float]): The expiration timestamp.
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
bool: True if expired, False otherwise.
|
|
62
|
+
"""
|
|
63
|
+
if expire_time is None:
|
|
64
|
+
return False
|
|
65
|
+
return time.monotonic() > expire_time
|
|
66
|
+
|
|
67
|
+
def _get_expire_time(
|
|
68
|
+
self, expire: Optional[Union[int, timedelta]]
|
|
69
|
+
) -> Optional[float]:
|
|
70
|
+
"""
|
|
71
|
+
Calculate the expiration timestamp.
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
expire (Optional[Union[int, timedelta]]): Expiration in seconds or timedelta.
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
Optional[float]: The expiration timestamp, or None if no expiration.
|
|
78
|
+
"""
|
|
79
|
+
if expire is None:
|
|
80
|
+
return None
|
|
81
|
+
seconds = expire.total_seconds() if isinstance(expire, timedelta) else expire
|
|
82
|
+
return time.monotonic() + seconds
|
|
83
|
+
|
|
84
|
+
def _evict_if_needed(self):
|
|
85
|
+
"""
|
|
86
|
+
Evict the least recently used items if the cache exceeds max_size.
|
|
87
|
+
"""
|
|
88
|
+
if self._max_size is not None:
|
|
89
|
+
while len(self._cache) > self._max_size:
|
|
90
|
+
self._cache.popitem(last=False) # Remove oldest (LRU)
|
|
91
|
+
|
|
92
|
+
def _cleanup(self) -> None:
|
|
93
|
+
"""
|
|
94
|
+
Remove expired items from the cache.
|
|
95
|
+
"""
|
|
96
|
+
now = time.monotonic()
|
|
97
|
+
keys_to_delete = [
|
|
98
|
+
k
|
|
99
|
+
for k, (_, exp) in list(self._cache.items())
|
|
100
|
+
if exp is not None and now > exp
|
|
101
|
+
]
|
|
102
|
+
for k in keys_to_delete:
|
|
103
|
+
self._cache.pop(k, None)
|
|
104
|
+
|
|
105
|
+
async def _cleanup_expired(self) -> None:
|
|
106
|
+
"""
|
|
107
|
+
Periodically clean up expired items in the background.
|
|
108
|
+
"""
|
|
109
|
+
while True:
|
|
110
|
+
await asyncio.sleep(60)
|
|
111
|
+
async with self._async_lock:
|
|
112
|
+
self._cleanup()
|
|
113
|
+
|
|
114
|
+
def get(self, key: str) -> Optional[Any]:
|
|
115
|
+
"""
|
|
116
|
+
Synchronously retrieve a value from the cache.
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
key (str): The key to retrieve.
|
|
120
|
+
|
|
121
|
+
Returns:
|
|
122
|
+
Optional[Any]: The cached value, or None if not found or expired.
|
|
123
|
+
"""
|
|
124
|
+
k = self._make_key(key)
|
|
125
|
+
with self._lock:
|
|
126
|
+
item = self._cache.get(k)
|
|
127
|
+
if item:
|
|
128
|
+
value, expire_time = item
|
|
129
|
+
if not self._is_expired(expire_time):
|
|
130
|
+
# Move to end for LRU
|
|
131
|
+
self._cache.move_to_end(k)
|
|
132
|
+
return value
|
|
133
|
+
self._cache.pop(k, None)
|
|
134
|
+
return None
|
|
135
|
+
|
|
136
|
+
def set(
|
|
137
|
+
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
138
|
+
) -> None:
|
|
139
|
+
"""
|
|
140
|
+
Synchronously set a value in the cache.
|
|
141
|
+
|
|
142
|
+
Args:
|
|
143
|
+
key (str): The key under which to store the value.
|
|
144
|
+
value (Any): The value to store.
|
|
145
|
+
expire (Optional[Union[int, timedelta]]): Expiration time in seconds or as timedelta.
|
|
146
|
+
"""
|
|
147
|
+
k = self._make_key(key)
|
|
148
|
+
expire_time = self._get_expire_time(expire)
|
|
149
|
+
with self._lock:
|
|
150
|
+
self._cache[k] = (value, expire_time)
|
|
151
|
+
self._cache.move_to_end(k)
|
|
152
|
+
self._evict_if_needed()
|
|
153
|
+
self._cleanup()
|
|
154
|
+
|
|
155
|
+
def delete(self, key: str) -> None:
|
|
156
|
+
"""
|
|
157
|
+
Synchronously delete a value from the cache.
|
|
158
|
+
|
|
159
|
+
Args:
|
|
160
|
+
key (str): The key to delete.
|
|
161
|
+
"""
|
|
162
|
+
k = self._make_key(key)
|
|
163
|
+
with self._lock:
|
|
164
|
+
self._cache.pop(k, None)
|
|
165
|
+
|
|
166
|
+
def clear(self) -> None:
|
|
167
|
+
"""
|
|
168
|
+
Synchronously clear all values from the cache.
|
|
169
|
+
"""
|
|
170
|
+
prefix = f"{self._namespace}:"
|
|
171
|
+
with self._lock:
|
|
172
|
+
keys_to_delete = [k for k in self._cache if k.startswith(prefix)]
|
|
173
|
+
for k in keys_to_delete:
|
|
174
|
+
self._cache.pop(k, None)
|
|
175
|
+
|
|
176
|
+
def has(self, key: str) -> bool:
|
|
177
|
+
"""
|
|
178
|
+
Synchronously check if a key exists in the cache.
|
|
179
|
+
|
|
180
|
+
Args:
|
|
181
|
+
key (str): The key to check.
|
|
182
|
+
|
|
183
|
+
Returns:
|
|
184
|
+
bool: True if the key exists and is not expired, False otherwise.
|
|
185
|
+
"""
|
|
186
|
+
k = self._make_key(key)
|
|
187
|
+
with self._lock:
|
|
188
|
+
item = self._cache.get(k)
|
|
189
|
+
if item:
|
|
190
|
+
_, expire_time = item
|
|
191
|
+
if not self._is_expired(expire_time):
|
|
192
|
+
self._cache.move_to_end(k)
|
|
193
|
+
return True
|
|
194
|
+
self._cache.pop(k, None)
|
|
195
|
+
return False
|
|
196
|
+
|
|
197
|
+
async def aget(self, key: str) -> Optional[Any]:
|
|
198
|
+
"""
|
|
199
|
+
Asynchronously retrieve a value from the cache.
|
|
200
|
+
|
|
201
|
+
Args:
|
|
202
|
+
key (str): The key to retrieve.
|
|
203
|
+
|
|
204
|
+
Returns:
|
|
205
|
+
Optional[Any]: The cached value, or None if not found or expired.
|
|
206
|
+
"""
|
|
207
|
+
k = self._make_key(key)
|
|
208
|
+
async with self._async_lock:
|
|
209
|
+
item = self._cache.get(k)
|
|
210
|
+
if item:
|
|
211
|
+
value, expire_time = item
|
|
212
|
+
if not self._is_expired(expire_time):
|
|
213
|
+
self._cache.move_to_end(k)
|
|
214
|
+
return value
|
|
215
|
+
self._cache.pop(k, None)
|
|
216
|
+
return None
|
|
217
|
+
|
|
218
|
+
async def aset(
|
|
219
|
+
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
220
|
+
) -> None:
|
|
221
|
+
"""
|
|
222
|
+
Asynchronously set a value in the cache.
|
|
223
|
+
|
|
224
|
+
Args:
|
|
225
|
+
key (str): The key under which to store the value.
|
|
226
|
+
value (Any): The value to store.
|
|
227
|
+
expire (Optional[Union[int, timedelta]]): Expiration time in seconds or as timedelta.
|
|
228
|
+
"""
|
|
229
|
+
k = self._make_key(key)
|
|
230
|
+
expire_time = self._get_expire_time(expire)
|
|
231
|
+
async with self._async_lock:
|
|
232
|
+
self._cache[k] = (value, expire_time)
|
|
233
|
+
self._cache.move_to_end(k)
|
|
234
|
+
self._evict_if_needed()
|
|
235
|
+
self._cleanup()
|
|
236
|
+
# Start cleanup task if not already running
|
|
237
|
+
if self._cleanup_task is None or self._cleanup_task.done():
|
|
238
|
+
self._cleanup_task = asyncio.create_task(self._cleanup_expired())
|
|
239
|
+
|
|
240
|
+
async def adelete(self, key: str) -> None:
|
|
241
|
+
"""
|
|
242
|
+
Asynchronously delete a value from the cache.
|
|
243
|
+
|
|
244
|
+
Args:
|
|
245
|
+
key (str): The key to delete.
|
|
246
|
+
"""
|
|
247
|
+
k = self._make_key(key)
|
|
248
|
+
async with self._async_lock:
|
|
249
|
+
self._cache.pop(k, None)
|
|
250
|
+
|
|
251
|
+
async def aclear(self) -> None:
|
|
252
|
+
"""
|
|
253
|
+
Asynchronously clear all values from the cache.
|
|
254
|
+
"""
|
|
255
|
+
prefix = f"{self._namespace}:"
|
|
256
|
+
async with self._async_lock:
|
|
257
|
+
keys_to_delete = [k for k in self._cache if k.startswith(prefix)]
|
|
258
|
+
for k in keys_to_delete:
|
|
259
|
+
self._cache.pop(k, None)
|
|
260
|
+
|
|
261
|
+
async def ahas(self, key: str) -> bool:
|
|
262
|
+
"""
|
|
263
|
+
Asynchronously check if a key exists in the cache.
|
|
264
|
+
|
|
265
|
+
Args:
|
|
266
|
+
key (str): The key to check.
|
|
267
|
+
|
|
268
|
+
Returns:
|
|
269
|
+
bool: True if the key exists and is not expired, False otherwise.
|
|
270
|
+
"""
|
|
271
|
+
k = self._make_key(key)
|
|
272
|
+
async with self._async_lock:
|
|
273
|
+
item = self._cache.get(k)
|
|
274
|
+
if item:
|
|
275
|
+
_, expire_time = item
|
|
276
|
+
if not self._is_expired(expire_time):
|
|
277
|
+
self._cache.move_to_end(k)
|
|
278
|
+
return True
|
|
279
|
+
self._cache.pop(k, None)
|
|
280
|
+
return False
|
|
281
|
+
|
|
282
|
+
async def close(self) -> None:
|
|
283
|
+
"""
|
|
284
|
+
Asynchronously close the backend and cancel the cleanup task if running.
|
|
285
|
+
"""
|
|
286
|
+
if self._cleanup_task and not self._cleanup_task.done():
|
|
287
|
+
self._cleanup_task.cancel()
|
|
288
|
+
try:
|
|
289
|
+
await self._cleanup_task
|
|
290
|
+
except asyncio.CancelledError:
|
|
291
|
+
pass
|