fastapi-cachekit 0.1.5__tar.gz → 0.2.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {fastapi_cachekit-0.1.5/fastapi_cachekit.egg-info → fastapi_cachekit-0.2.0}/PKG-INFO +2 -1
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/fast_cache/__init__.py +4 -0
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/fast_cache/backends/backend.py +6 -4
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/fast_cache/backends/dynamodb.py +52 -33
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/fast_cache/backends/google_firestore.py +13 -12
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/fast_cache/backends/memcached.py +37 -28
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/fast_cache/backends/memory.py +19 -18
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/fast_cache/backends/mongodb.py +10 -8
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/fast_cache/backends/postgres.py +21 -12
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/fast_cache/backends/redis.py +163 -24
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/fast_cache/integration.py +86 -10
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0/fastapi_cachekit.egg-info}/PKG-INFO +2 -1
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/pyproject.toml +13 -1
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/LICENSE.md +0 -0
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/README.md +0 -0
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/fast_cache/backends/__init__.py +0 -0
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/fastapi_cachekit.egg-info/SOURCES.txt +0 -0
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/fastapi_cachekit.egg-info/dependency_links.txt +0 -0
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/fastapi_cachekit.egg-info/requires.txt +0 -0
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/fastapi_cachekit.egg-info/top_level.txt +0 -0
- {fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: fastapi-cachekit
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.2.0
|
|
4
4
|
Summary: High-performance caching solution for FastAPI applications
|
|
5
5
|
Author-email: Bijay Nayak <bijay6779@gmail.com>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -18,6 +18,7 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
18
18
|
Classifier: Programming Language :: Python :: 3.11
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.12
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.13
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
21
22
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
22
23
|
Classifier: Topic :: Internet :: WWW/HTTP
|
|
23
24
|
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
|
@@ -1,6 +1,10 @@
|
|
|
1
|
+
from importlib.metadata import version
|
|
2
|
+
|
|
1
3
|
from .integration import FastAPICache
|
|
2
4
|
from .backends.backend import CacheBackend
|
|
3
5
|
|
|
6
|
+
__version__ = version("fastapi-cachekit")
|
|
7
|
+
|
|
4
8
|
from .backends.redis import RedisBackend
|
|
5
9
|
from .backends.memory import InMemoryBackend
|
|
6
10
|
from .backends.postgres import PostgresBackend
|
|
@@ -12,28 +12,30 @@ class CacheBackend(ABC):
|
|
|
12
12
|
"""
|
|
13
13
|
|
|
14
14
|
@abstractmethod
|
|
15
|
-
async def aget(self, key: str) ->
|
|
15
|
+
async def aget(self, key: str, default: Any = None) -> Any:
|
|
16
16
|
"""
|
|
17
17
|
Asynchronously retrieve a value from the cache.
|
|
18
18
|
|
|
19
19
|
Args:
|
|
20
20
|
key (str): The key to retrieve.
|
|
21
|
+
default (Any): Value to return if key is not found. Defaults to None.
|
|
21
22
|
|
|
22
23
|
Returns:
|
|
23
|
-
|
|
24
|
+
Any: The cached value, or default if not found.
|
|
24
25
|
"""
|
|
25
26
|
pass
|
|
26
27
|
|
|
27
28
|
@abstractmethod
|
|
28
|
-
def get(self, key: str) ->
|
|
29
|
+
def get(self, key: str, default: Any = None) -> Any:
|
|
29
30
|
"""
|
|
30
31
|
Synchronously retrieve a value from the cache.
|
|
31
32
|
|
|
32
33
|
Args:
|
|
33
34
|
key (str): The key to retrieve.
|
|
35
|
+
default (Any): Value to return if key is not found. Defaults to None.
|
|
34
36
|
|
|
35
37
|
Returns:
|
|
36
|
-
|
|
38
|
+
Any: The cached value, or default if not found.
|
|
37
39
|
"""
|
|
38
40
|
pass
|
|
39
41
|
|
|
@@ -1,4 +1,6 @@
|
|
|
1
|
+
import asyncio
|
|
1
2
|
import hashlib
|
|
3
|
+
import logging
|
|
2
4
|
from typing import Any, Optional, Union
|
|
3
5
|
from datetime import timedelta
|
|
4
6
|
import pickle
|
|
@@ -6,6 +8,8 @@ import time
|
|
|
6
8
|
|
|
7
9
|
from .backend import CacheBackend
|
|
8
10
|
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
9
13
|
|
|
10
14
|
class DynamoDBBackend(CacheBackend):
|
|
11
15
|
"""
|
|
@@ -79,23 +83,32 @@ class DynamoDBBackend(CacheBackend):
|
|
|
79
83
|
self._async_resource = None
|
|
80
84
|
self._async_table = None
|
|
81
85
|
self._async_session = aioboto3.Session()
|
|
86
|
+
self._async_table_lock = asyncio.Lock()
|
|
82
87
|
|
|
83
88
|
# Create table if requested
|
|
84
89
|
if create_table:
|
|
85
90
|
self._ensure_table_exists()
|
|
86
91
|
|
|
87
92
|
async def _get_async_table(self):
|
|
88
|
-
if self._async_table is None:
|
|
89
|
-
|
|
93
|
+
if self._async_table is not None:
|
|
94
|
+
return self._async_table
|
|
95
|
+
async with self._async_table_lock:
|
|
96
|
+
if self._async_table is not None:
|
|
97
|
+
return self._async_table
|
|
98
|
+
# Create the resource context manager
|
|
90
99
|
self._async_resource = self._async_session.resource(
|
|
91
100
|
"dynamodb", **self._connection_params
|
|
92
101
|
)
|
|
93
102
|
|
|
94
|
-
# Enter the context and get the actual resource
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
103
|
+
# Enter the context and get the actual resource,
|
|
104
|
+
# ensuring cleanup on failure to prevent leaks
|
|
105
|
+
try:
|
|
106
|
+
actual_resource = await self._async_resource.__aenter__()
|
|
107
|
+
self._async_table = await actual_resource.Table(self._table_name)
|
|
108
|
+
except BaseException:
|
|
109
|
+
await self._async_resource.__aexit__(None, None, None)
|
|
110
|
+
self._async_resource = None
|
|
111
|
+
raise
|
|
99
112
|
|
|
100
113
|
return self._async_table
|
|
101
114
|
|
|
@@ -233,60 +246,64 @@ class DynamoDBBackend(CacheBackend):
|
|
|
233
246
|
|
|
234
247
|
return item
|
|
235
248
|
|
|
236
|
-
def get(self, key: str) ->
|
|
249
|
+
def get(self, key: str, default: Any = None) -> Any:
|
|
237
250
|
"""
|
|
238
251
|
Synchronously retrieve a value from the cache.
|
|
239
252
|
|
|
240
253
|
Args:
|
|
241
254
|
key (str): The key to retrieve.
|
|
255
|
+
default (Any): Value to return if key is not found. Defaults to None.
|
|
242
256
|
|
|
243
257
|
Returns:
|
|
244
|
-
|
|
258
|
+
Any: The cached value, or default if not found.
|
|
245
259
|
"""
|
|
246
260
|
try:
|
|
247
261
|
response = self._sync_table.get_item(Key={"cache_key": self._make_key(key)})
|
|
248
262
|
|
|
249
263
|
if "Item" not in response:
|
|
250
|
-
return
|
|
264
|
+
return default
|
|
251
265
|
|
|
252
266
|
item = response["Item"]
|
|
253
267
|
|
|
254
268
|
# Check if item has expired and delete if so
|
|
255
269
|
if self._is_expired(item):
|
|
256
270
|
self.delete(key)
|
|
257
|
-
return
|
|
271
|
+
return default
|
|
258
272
|
value = self._deserialize_value(item["value"])
|
|
259
273
|
return value
|
|
260
|
-
except Exception:
|
|
261
|
-
|
|
274
|
+
except Exception as e:
|
|
275
|
+
logger.warning("Cache get failed: %s", e)
|
|
276
|
+
return default
|
|
262
277
|
|
|
263
|
-
async def aget(self, key: str) ->
|
|
278
|
+
async def aget(self, key: str, default: Any = None) -> Any:
|
|
264
279
|
"""
|
|
265
280
|
Asynchronously retrieve a value from the cache.
|
|
266
281
|
|
|
267
282
|
Args:
|
|
268
283
|
key (str): The key to retrieve.
|
|
284
|
+
default (Any): Value to return if key is not found. Defaults to None.
|
|
269
285
|
|
|
270
286
|
Returns:
|
|
271
|
-
|
|
287
|
+
Any: The cached value, or default if not found.
|
|
272
288
|
"""
|
|
273
289
|
try:
|
|
274
290
|
table = await self._get_async_table()
|
|
275
291
|
response = await table.get_item(Key={"cache_key": self._make_key(key)})
|
|
276
292
|
|
|
277
293
|
if "Item" not in response:
|
|
278
|
-
return
|
|
294
|
+
return default
|
|
279
295
|
|
|
280
296
|
item = response["Item"]
|
|
281
297
|
|
|
282
298
|
# Check if item has expired and delete if so
|
|
283
299
|
if self._is_expired(item):
|
|
284
300
|
await self.adelete(key)
|
|
285
|
-
return
|
|
301
|
+
return default
|
|
286
302
|
|
|
287
303
|
return self._deserialize_value(item["value"])
|
|
288
|
-
except Exception:
|
|
289
|
-
|
|
304
|
+
except Exception as e:
|
|
305
|
+
logger.warning("Cache aget failed: %s", e)
|
|
306
|
+
return default
|
|
290
307
|
|
|
291
308
|
def set(
|
|
292
309
|
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
@@ -302,8 +319,8 @@ class DynamoDBBackend(CacheBackend):
|
|
|
302
319
|
try:
|
|
303
320
|
item = self._build_item(key, value, expire)
|
|
304
321
|
self._sync_table.put_item(Item=item)
|
|
305
|
-
except Exception:
|
|
306
|
-
|
|
322
|
+
except Exception as e:
|
|
323
|
+
logger.warning("Cache set failed: %s", e)
|
|
307
324
|
|
|
308
325
|
async def aset(
|
|
309
326
|
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
@@ -320,8 +337,8 @@ class DynamoDBBackend(CacheBackend):
|
|
|
320
337
|
table = await self._get_async_table()
|
|
321
338
|
item = self._build_item(key, value, expire)
|
|
322
339
|
await table.put_item(Item=item)
|
|
323
|
-
except Exception:
|
|
324
|
-
|
|
340
|
+
except Exception as e:
|
|
341
|
+
logger.warning("Cache aset failed: %s", e)
|
|
325
342
|
|
|
326
343
|
def delete(self, key: str) -> None:
|
|
327
344
|
"""
|
|
@@ -332,8 +349,8 @@ class DynamoDBBackend(CacheBackend):
|
|
|
332
349
|
"""
|
|
333
350
|
try:
|
|
334
351
|
self._sync_table.delete_item(Key={"cache_key": self._make_key(key)})
|
|
335
|
-
except Exception:
|
|
336
|
-
|
|
352
|
+
except Exception as e:
|
|
353
|
+
logger.warning("Cache delete failed: %s", e)
|
|
337
354
|
|
|
338
355
|
async def adelete(self, key: str) -> None:
|
|
339
356
|
"""
|
|
@@ -345,8 +362,8 @@ class DynamoDBBackend(CacheBackend):
|
|
|
345
362
|
try:
|
|
346
363
|
table = await self._get_async_table()
|
|
347
364
|
await table.delete_item(Key={"cache_key": self._make_key(key)})
|
|
348
|
-
except Exception:
|
|
349
|
-
|
|
365
|
+
except Exception as e:
|
|
366
|
+
logger.warning("Cache adelete failed: %s", e)
|
|
350
367
|
|
|
351
368
|
def has(self, key: str) -> bool:
|
|
352
369
|
"""
|
|
@@ -376,7 +393,8 @@ class DynamoDBBackend(CacheBackend):
|
|
|
376
393
|
return False
|
|
377
394
|
|
|
378
395
|
return True
|
|
379
|
-
except Exception:
|
|
396
|
+
except Exception as e:
|
|
397
|
+
logger.warning("Cache has failed: %s", e)
|
|
380
398
|
return False
|
|
381
399
|
|
|
382
400
|
async def ahas(self, key: str) -> bool:
|
|
@@ -408,7 +426,8 @@ class DynamoDBBackend(CacheBackend):
|
|
|
408
426
|
return False
|
|
409
427
|
|
|
410
428
|
return True
|
|
411
|
-
except Exception:
|
|
429
|
+
except Exception as e:
|
|
430
|
+
logger.warning("Cache ahas failed: %s", e)
|
|
412
431
|
return False
|
|
413
432
|
|
|
414
433
|
def clear(self) -> None:
|
|
@@ -443,8 +462,8 @@ class DynamoDBBackend(CacheBackend):
|
|
|
443
462
|
for item in response["Items"]:
|
|
444
463
|
batch.delete_item(Key={"cache_key": item["cache_key"]})
|
|
445
464
|
|
|
446
|
-
except Exception:
|
|
447
|
-
|
|
465
|
+
except Exception as e:
|
|
466
|
+
logger.warning("Cache clear failed: %s", e)
|
|
448
467
|
|
|
449
468
|
async def aclear(self) -> None:
|
|
450
469
|
"""
|
|
@@ -482,8 +501,8 @@ class DynamoDBBackend(CacheBackend):
|
|
|
482
501
|
Key={"cache_key": item["cache_key"]}
|
|
483
502
|
)
|
|
484
503
|
|
|
485
|
-
except Exception:
|
|
486
|
-
|
|
504
|
+
except Exception as e:
|
|
505
|
+
logger.warning("Cache aclear failed: %s", e)
|
|
487
506
|
|
|
488
507
|
async def close(self) -> None:
|
|
489
508
|
"""
|
|
@@ -148,18 +148,19 @@ class FirestoreBackend(CacheBackend):
|
|
|
148
148
|
"""
|
|
149
149
|
return expires_at is not None and expires_at < time.time()
|
|
150
150
|
|
|
151
|
-
def get(self, key: str) ->
|
|
151
|
+
def get(self, key: str, default: Any = None) -> Any:
|
|
152
152
|
"""
|
|
153
153
|
Synchronously retrieves a value from the cache by key.
|
|
154
154
|
|
|
155
|
-
If the key does not exist or the entry has expired, returns
|
|
155
|
+
If the key does not exist or the entry has expired, returns default. If the
|
|
156
156
|
entry is expired, it is not automatically deleted.
|
|
157
157
|
|
|
158
158
|
Args:
|
|
159
159
|
key (str): The cache key to retrieve.
|
|
160
|
+
default (Any): Value to return if key is not found. Defaults to None.
|
|
160
161
|
|
|
161
162
|
Returns:
|
|
162
|
-
|
|
163
|
+
Any: The cached Python object, or default if not found or expired.
|
|
163
164
|
|
|
164
165
|
Notes:
|
|
165
166
|
- The value is deserialized using pickle.
|
|
@@ -176,8 +177,8 @@ class FirestoreBackend(CacheBackend):
|
|
|
176
177
|
try:
|
|
177
178
|
return pickle.loads(data["value"])
|
|
178
179
|
except (pickle.UnpicklingError, KeyError):
|
|
179
|
-
return
|
|
180
|
-
return
|
|
180
|
+
return default
|
|
181
|
+
return default
|
|
181
182
|
|
|
182
183
|
def set(
|
|
183
184
|
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
@@ -268,18 +269,19 @@ class FirestoreBackend(CacheBackend):
|
|
|
268
269
|
return not self._is_expired(data.get("expires_at"))
|
|
269
270
|
return False
|
|
270
271
|
|
|
271
|
-
async def aget(self, key: str) ->
|
|
272
|
+
async def aget(self, key: str, default: Any = None) -> Any:
|
|
272
273
|
"""
|
|
273
274
|
Asynchronously retrieves a value from the cache by key.
|
|
274
275
|
|
|
275
|
-
If the key does not exist or the entry has expired, returns
|
|
276
|
+
If the key does not exist or the entry has expired, returns default. If the
|
|
276
277
|
entry is expired, it is not automatically deleted.
|
|
277
278
|
|
|
278
279
|
Args:
|
|
279
280
|
key (str): The cache key to retrieve.
|
|
281
|
+
default (Any): Value to return if key is not found. Defaults to None.
|
|
280
282
|
|
|
281
283
|
Returns:
|
|
282
|
-
|
|
284
|
+
Any: The cached Python object, or default if not found or expired.
|
|
283
285
|
|
|
284
286
|
Notes:
|
|
285
287
|
- The value is deserialized using pickle.
|
|
@@ -296,9 +298,8 @@ class FirestoreBackend(CacheBackend):
|
|
|
296
298
|
try:
|
|
297
299
|
return pickle.loads(data["value"])
|
|
298
300
|
except (pickle.UnpicklingError, KeyError):
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
return None
|
|
301
|
+
return default
|
|
302
|
+
return default
|
|
302
303
|
|
|
303
304
|
async def aset(
|
|
304
305
|
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
@@ -499,7 +500,7 @@ class FirestoreBackend(CacheBackend):
|
|
|
499
500
|
count += 1
|
|
500
501
|
if count == 500:
|
|
501
502
|
batch.commit()
|
|
502
|
-
batch = self.
|
|
503
|
+
batch = self._sync_db.batch()
|
|
503
504
|
count = 0
|
|
504
505
|
if count > 0:
|
|
505
506
|
batch.commit()
|
|
@@ -1,8 +1,11 @@
|
|
|
1
|
+
import logging
|
|
1
2
|
import pickle
|
|
2
3
|
from typing import Any, Optional, Union
|
|
3
4
|
from datetime import timedelta
|
|
4
5
|
from .backend import CacheBackend
|
|
5
6
|
|
|
7
|
+
logger = logging.getLogger(__name__)
|
|
8
|
+
|
|
6
9
|
|
|
7
10
|
class MemcachedBackend(CacheBackend):
|
|
8
11
|
"""
|
|
@@ -80,17 +83,18 @@ class MemcachedBackend(CacheBackend):
|
|
|
80
83
|
"""
|
|
81
84
|
return f"{self._namespace}:{key}".encode()
|
|
82
85
|
|
|
83
|
-
def get(self, key: str) ->
|
|
86
|
+
def get(self, key: str, default: Any = None) -> Any:
|
|
84
87
|
"""
|
|
85
88
|
Synchronously retrieves a value from the cache by key.
|
|
86
89
|
|
|
87
|
-
If the key does not exist, returns
|
|
90
|
+
If the key does not exist, returns default.
|
|
88
91
|
|
|
89
92
|
Args:
|
|
90
93
|
key (str): The cache key to retrieve.
|
|
94
|
+
default (Any): Value to return if key is not found. Defaults to None.
|
|
91
95
|
|
|
92
96
|
Returns:
|
|
93
|
-
|
|
97
|
+
Any: The cached Python object, or default if not found.
|
|
94
98
|
|
|
95
99
|
Notes:
|
|
96
100
|
- The value is deserialized using pickle.
|
|
@@ -99,9 +103,10 @@ class MemcachedBackend(CacheBackend):
|
|
|
99
103
|
"""
|
|
100
104
|
try:
|
|
101
105
|
value = self._sync_client.get(self._make_key(key))
|
|
102
|
-
return pickle.loads(value) if value else
|
|
103
|
-
except Exception:
|
|
104
|
-
|
|
106
|
+
return pickle.loads(value) if value else default
|
|
107
|
+
except Exception as e:
|
|
108
|
+
logger.warning("Cache get failed: %s", e)
|
|
109
|
+
return default
|
|
105
110
|
|
|
106
111
|
def set(
|
|
107
112
|
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
@@ -134,8 +139,8 @@ class MemcachedBackend(CacheBackend):
|
|
|
134
139
|
self._sync_client.set(
|
|
135
140
|
self._make_key(key), pickle.dumps(value), expire=exptime
|
|
136
141
|
)
|
|
137
|
-
except Exception:
|
|
138
|
-
|
|
142
|
+
except Exception as e:
|
|
143
|
+
logger.warning("Cache set failed: %s", e)
|
|
139
144
|
|
|
140
145
|
def delete(self, key: str) -> None:
|
|
141
146
|
"""
|
|
@@ -152,8 +157,8 @@ class MemcachedBackend(CacheBackend):
|
|
|
152
157
|
"""
|
|
153
158
|
try:
|
|
154
159
|
self._sync_client.delete(self._make_key(key))
|
|
155
|
-
except Exception:
|
|
156
|
-
|
|
160
|
+
except Exception as e:
|
|
161
|
+
logger.warning("Cache delete failed: %s", e)
|
|
157
162
|
|
|
158
163
|
def clear(self) -> None:
|
|
159
164
|
"""
|
|
@@ -170,8 +175,8 @@ class MemcachedBackend(CacheBackend):
|
|
|
170
175
|
|
|
171
176
|
try:
|
|
172
177
|
self._sync_client.flush_all()
|
|
173
|
-
except Exception:
|
|
174
|
-
|
|
178
|
+
except Exception as e:
|
|
179
|
+
logger.warning("Cache clear failed: %s", e)
|
|
175
180
|
|
|
176
181
|
def has(self, key: str) -> bool:
|
|
177
182
|
"""
|
|
@@ -189,20 +194,22 @@ class MemcachedBackend(CacheBackend):
|
|
|
189
194
|
"""
|
|
190
195
|
try:
|
|
191
196
|
return self._sync_client.get(self._make_key(key)) is not None
|
|
192
|
-
except Exception:
|
|
197
|
+
except Exception as e:
|
|
198
|
+
logger.warning("Cache has failed: %s", e)
|
|
193
199
|
return False
|
|
194
200
|
|
|
195
|
-
async def aget(self, key: str) ->
|
|
201
|
+
async def aget(self, key: str, default: Any = None) -> Any:
|
|
196
202
|
"""
|
|
197
203
|
Asynchronously retrieves a value from the cache by key.
|
|
198
204
|
|
|
199
|
-
If the key does not exist, returns
|
|
205
|
+
If the key does not exist, returns default.
|
|
200
206
|
|
|
201
207
|
Args:
|
|
202
208
|
key (str): The cache key to retrieve.
|
|
209
|
+
default (Any): Value to return if key is not found. Defaults to None.
|
|
203
210
|
|
|
204
211
|
Returns:
|
|
205
|
-
|
|
212
|
+
Any: The cached Python object, or default if not found.
|
|
206
213
|
|
|
207
214
|
Notes:
|
|
208
215
|
- The value is deserialized using pickle.
|
|
@@ -211,9 +218,10 @@ class MemcachedBackend(CacheBackend):
|
|
|
211
218
|
"""
|
|
212
219
|
try:
|
|
213
220
|
value = await self._async_client.get(self._make_key(key))
|
|
214
|
-
return pickle.loads(value) if value else
|
|
215
|
-
except Exception:
|
|
216
|
-
|
|
221
|
+
return pickle.loads(value) if value else default
|
|
222
|
+
except Exception as e:
|
|
223
|
+
logger.warning("Cache aget failed: %s", e)
|
|
224
|
+
return default
|
|
217
225
|
|
|
218
226
|
async def aset(
|
|
219
227
|
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
@@ -246,8 +254,8 @@ class MemcachedBackend(CacheBackend):
|
|
|
246
254
|
await self._async_client.set(
|
|
247
255
|
self._make_key(key), pickle.dumps(value), exptime=exptime
|
|
248
256
|
)
|
|
249
|
-
except Exception:
|
|
250
|
-
|
|
257
|
+
except Exception as e:
|
|
258
|
+
logger.warning("Cache aset failed: %s", e)
|
|
251
259
|
|
|
252
260
|
async def adelete(self, key: str) -> None:
|
|
253
261
|
"""
|
|
@@ -264,8 +272,8 @@ class MemcachedBackend(CacheBackend):
|
|
|
264
272
|
"""
|
|
265
273
|
try:
|
|
266
274
|
await self._async_client.delete(self._make_key(key))
|
|
267
|
-
except Exception:
|
|
268
|
-
|
|
275
|
+
except Exception as e:
|
|
276
|
+
logger.warning("Cache adelete failed: %s", e)
|
|
269
277
|
|
|
270
278
|
async def aclear(self) -> None:
|
|
271
279
|
"""
|
|
@@ -281,8 +289,8 @@ class MemcachedBackend(CacheBackend):
|
|
|
281
289
|
"""
|
|
282
290
|
try:
|
|
283
291
|
await self._async_client.flush_all()
|
|
284
|
-
except Exception:
|
|
285
|
-
|
|
292
|
+
except Exception as e:
|
|
293
|
+
logger.warning("Cache aclear failed: %s", e)
|
|
286
294
|
|
|
287
295
|
async def ahas(self, key: str) -> bool:
|
|
288
296
|
"""
|
|
@@ -301,7 +309,8 @@ class MemcachedBackend(CacheBackend):
|
|
|
301
309
|
try:
|
|
302
310
|
value = await self._async_client.get(self._make_key(key))
|
|
303
311
|
return value is not None
|
|
304
|
-
except Exception:
|
|
312
|
+
except Exception as e:
|
|
313
|
+
logger.warning("Cache ahas failed: %s", e)
|
|
305
314
|
return False
|
|
306
315
|
|
|
307
316
|
async def close(self) -> None:
|
|
@@ -318,5 +327,5 @@ class MemcachedBackend(CacheBackend):
|
|
|
318
327
|
try:
|
|
319
328
|
await self._async_client.close()
|
|
320
329
|
self._sync_client.close()
|
|
321
|
-
except Exception:
|
|
322
|
-
|
|
330
|
+
except Exception as e:
|
|
331
|
+
logger.warning("Cache close failed: %s", e)
|
|
@@ -123,15 +123,14 @@ class InMemoryBackend(CacheBackend):
|
|
|
123
123
|
- Only entries with a non-null expiration time and an expiration
|
|
124
124
|
time earlier than the current time are deleted.
|
|
125
125
|
"""
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
self._cache.pop(k, None)
|
|
126
|
+
now = time.monotonic()
|
|
127
|
+
keys_to_delete = [
|
|
128
|
+
k
|
|
129
|
+
for k, (_, exp) in list(self._cache.items())
|
|
130
|
+
if exp is not None and now > exp
|
|
131
|
+
]
|
|
132
|
+
for k in keys_to_delete:
|
|
133
|
+
self._cache.pop(k, None)
|
|
135
134
|
|
|
136
135
|
def _make_key(self, key: str) -> str:
|
|
137
136
|
"""
|
|
@@ -206,19 +205,20 @@ class InMemoryBackend(CacheBackend):
|
|
|
206
205
|
while len(self._cache) > self._max_size:
|
|
207
206
|
self._cache.popitem(last=False) # Remove oldest (LRU)
|
|
208
207
|
|
|
209
|
-
def get(self, key: str) ->
|
|
208
|
+
def get(self, key: str, default: Any = None) -> Any:
|
|
210
209
|
"""
|
|
211
210
|
Synchronously retrieves a value from the cache by key.
|
|
212
211
|
|
|
213
|
-
If the key does not exist or the entry has expired, returns
|
|
212
|
+
If the key does not exist or the entry has expired, returns default. If the
|
|
214
213
|
entry is expired, it is deleted from the cache (lazy deletion). Accessing
|
|
215
214
|
an item moves it to the end of the LRU order.
|
|
216
215
|
|
|
217
216
|
Args:
|
|
218
217
|
key (str): The cache key to retrieve.
|
|
218
|
+
default (Any): Value to return if key is not found. Defaults to None.
|
|
219
219
|
|
|
220
220
|
Returns:
|
|
221
|
-
|
|
221
|
+
Any: The cached Python object, or default if not found or expired.
|
|
222
222
|
|
|
223
223
|
Notes:
|
|
224
224
|
- Thread-safe.
|
|
@@ -234,7 +234,7 @@ class InMemoryBackend(CacheBackend):
|
|
|
234
234
|
self._cache.move_to_end(k)
|
|
235
235
|
return value
|
|
236
236
|
self._cache.pop(k, None)
|
|
237
|
-
return
|
|
237
|
+
return default
|
|
238
238
|
|
|
239
239
|
def set(
|
|
240
240
|
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
@@ -326,19 +326,20 @@ class InMemoryBackend(CacheBackend):
|
|
|
326
326
|
self._cache.pop(k, None)
|
|
327
327
|
return False
|
|
328
328
|
|
|
329
|
-
async def aget(self, key: str) ->
|
|
329
|
+
async def aget(self, key: str, default: Any = None) -> Any:
|
|
330
330
|
"""
|
|
331
331
|
Asynchronously retrieves a value from the cache by key.
|
|
332
332
|
|
|
333
|
-
If the key does not exist or the entry has expired, returns
|
|
333
|
+
If the key does not exist or the entry has expired, returns default. If the
|
|
334
334
|
entry is expired, it is deleted from the cache (lazy deletion). Accessing
|
|
335
335
|
an item moves it to the end of the LRU order.
|
|
336
336
|
|
|
337
337
|
Args:
|
|
338
338
|
key (str): The cache key to retrieve.
|
|
339
|
+
default (Any): Value to return if key is not found. Defaults to None.
|
|
339
340
|
|
|
340
341
|
Returns:
|
|
341
|
-
|
|
342
|
+
Any: The cached Python object, or default if not found or expired.
|
|
342
343
|
|
|
343
344
|
Notes:
|
|
344
345
|
- Asyncio-safe.
|
|
@@ -354,7 +355,7 @@ class InMemoryBackend(CacheBackend):
|
|
|
354
355
|
self._cache.move_to_end(k)
|
|
355
356
|
return value
|
|
356
357
|
self._cache.pop(k, None)
|
|
357
|
-
return
|
|
358
|
+
return default
|
|
358
359
|
|
|
359
360
|
async def aset(
|
|
360
361
|
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
@@ -458,4 +459,4 @@ class InMemoryBackend(CacheBackend):
|
|
|
458
459
|
- The background cleanup scheduler is stopped.
|
|
459
460
|
"""
|
|
460
461
|
self._stop_cleanup_scheduler()
|
|
461
|
-
self._cache
|
|
462
|
+
self._cache.clear()
|
|
@@ -61,23 +61,24 @@ class MongoDBBackend(CacheBackend):
|
|
|
61
61
|
"""
|
|
62
62
|
return f"{self._namespace}:{key}"
|
|
63
63
|
|
|
64
|
-
def get(self, key: str) ->
|
|
64
|
+
def get(self, key: str, default: Any = None) -> Any:
|
|
65
65
|
"""
|
|
66
66
|
Synchronously retrieve a value from the cache.
|
|
67
67
|
|
|
68
68
|
Args:
|
|
69
69
|
key (str): The cache key.
|
|
70
|
+
default (Any): Value to return if key is not found. Defaults to None.
|
|
70
71
|
|
|
71
72
|
Returns:
|
|
72
|
-
|
|
73
|
+
Any: The cached value, or default if not found or expired.
|
|
73
74
|
"""
|
|
74
75
|
doc = self._sync_collection.find_one({"_id": self._make_key(key)})
|
|
75
76
|
if doc and (doc.get("expires_at", float("inf")) > time.time()):
|
|
76
77
|
try:
|
|
77
78
|
return pickle.loads(doc["value"])
|
|
78
79
|
except Exception:
|
|
79
|
-
return
|
|
80
|
-
return
|
|
80
|
+
return default
|
|
81
|
+
return default
|
|
81
82
|
|
|
82
83
|
def set(
|
|
83
84
|
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
@@ -128,23 +129,24 @@ class MongoDBBackend(CacheBackend):
|
|
|
128
129
|
doc = self._sync_collection.find_one({"_id": self._make_key(key)})
|
|
129
130
|
return bool(doc and (doc.get("expires_at", float("inf")) > time.time()))
|
|
130
131
|
|
|
131
|
-
async def aget(self, key: str) ->
|
|
132
|
+
async def aget(self, key: str, default: Any = None) -> Any:
|
|
132
133
|
"""
|
|
133
134
|
Asynchronously retrieve a value from the cache.
|
|
134
135
|
|
|
135
136
|
Args:
|
|
136
137
|
key (str): The cache key.
|
|
138
|
+
default (Any): Value to return if key is not found. Defaults to None.
|
|
137
139
|
|
|
138
140
|
Returns:
|
|
139
|
-
|
|
141
|
+
Any: The cached value, or default if not found or expired.
|
|
140
142
|
"""
|
|
141
143
|
doc = await self._async_collection.find_one({"_id": self._make_key(key)})
|
|
142
144
|
if doc and (doc.get("expires_at", float("inf")) > time.time()):
|
|
143
145
|
try:
|
|
144
146
|
return pickle.loads(doc["value"])
|
|
145
147
|
except Exception:
|
|
146
|
-
return
|
|
147
|
-
return
|
|
148
|
+
return default
|
|
149
|
+
return default
|
|
148
150
|
|
|
149
151
|
async def aset(
|
|
150
152
|
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import asyncio
|
|
1
2
|
import pickle
|
|
2
3
|
import re
|
|
3
4
|
import threading
|
|
@@ -91,6 +92,7 @@ class PostgresBackend(CacheBackend):
|
|
|
91
92
|
|
|
92
93
|
self._scheduler = None
|
|
93
94
|
self._scheduler_lock = threading.Lock()
|
|
95
|
+
self._async_pool_lock = asyncio.Lock()
|
|
94
96
|
|
|
95
97
|
if self._auto_cleanup:
|
|
96
98
|
self._start_cleanup_scheduler()
|
|
@@ -236,18 +238,19 @@ class PostgresBackend(CacheBackend):
|
|
|
236
238
|
)
|
|
237
239
|
conn.commit()
|
|
238
240
|
|
|
239
|
-
def get(self, key: str) ->
|
|
241
|
+
def get(self, key: str, default: Any = None) -> Any:
|
|
240
242
|
"""
|
|
241
243
|
Retrieves a value from the cache by key.
|
|
242
244
|
|
|
243
|
-
If the key does not exist or the entry has expired, returns
|
|
245
|
+
If the key does not exist or the entry has expired, returns default. If the
|
|
244
246
|
entry is expired, it is deleted from the cache (lazy deletion).
|
|
245
247
|
|
|
246
248
|
Args:
|
|
247
249
|
key (str): The cache key to retrieve.
|
|
250
|
+
default (Any): Value to return if key is not found. Defaults to None.
|
|
248
251
|
|
|
249
252
|
Returns:
|
|
250
|
-
|
|
253
|
+
Any: The cached Python object, or default if not found or expired.
|
|
251
254
|
|
|
252
255
|
Notes:
|
|
253
256
|
- The value is deserialized using pickle.
|
|
@@ -261,11 +264,11 @@ class PostgresBackend(CacheBackend):
|
|
|
261
264
|
)
|
|
262
265
|
row = cur.fetchone()
|
|
263
266
|
if not row:
|
|
264
|
-
return
|
|
267
|
+
return default
|
|
265
268
|
value, expire_at = row
|
|
266
269
|
if self._is_expired(expire_at):
|
|
267
270
|
self.delete(key) # Lazy delete
|
|
268
|
-
return
|
|
271
|
+
return default
|
|
269
272
|
return pickle.loads(value)
|
|
270
273
|
|
|
271
274
|
def delete(self, key: str) -> None:
|
|
@@ -371,18 +374,19 @@ class PostgresBackend(CacheBackend):
|
|
|
371
374
|
)
|
|
372
375
|
await conn.commit()
|
|
373
376
|
|
|
374
|
-
async def aget(self, key: str) ->
|
|
377
|
+
async def aget(self, key: str, default: Any = None) -> Any:
|
|
375
378
|
"""
|
|
376
379
|
Asynchronously retrieves a value from the cache by key.
|
|
377
380
|
|
|
378
|
-
If the key does not exist or the entry has expired, returns
|
|
381
|
+
If the key does not exist or the entry has expired, returns default. If the
|
|
379
382
|
entry is expired, it is deleted from the cache (lazy deletion).
|
|
380
383
|
|
|
381
384
|
Args:
|
|
382
385
|
key (str): The cache key to retrieve.
|
|
386
|
+
default (Any): Value to return if key is not found. Defaults to None.
|
|
383
387
|
|
|
384
388
|
Returns:
|
|
385
|
-
|
|
389
|
+
Any: The cached Python object, or default if not found or expired.
|
|
386
390
|
|
|
387
391
|
Notes:
|
|
388
392
|
- Uses the asynchronous connection pool.
|
|
@@ -398,11 +402,11 @@ class PostgresBackend(CacheBackend):
|
|
|
398
402
|
)
|
|
399
403
|
row = await cur.fetchone()
|
|
400
404
|
if not row:
|
|
401
|
-
return
|
|
405
|
+
return default
|
|
402
406
|
value, expire_at = row
|
|
403
407
|
if self._is_expired(expire_at):
|
|
404
408
|
await self.adelete(key) # Lazy delete
|
|
405
|
-
return
|
|
409
|
+
return default
|
|
406
410
|
return pickle.loads(value)
|
|
407
411
|
|
|
408
412
|
async def adelete(self, key: str) -> None:
|
|
@@ -537,10 +541,15 @@ class PostgresBackend(CacheBackend):
|
|
|
537
541
|
Ensures that the asynchronous connection pool is open before use.
|
|
538
542
|
|
|
539
543
|
If the pool is not already open, it is opened asynchronously.
|
|
544
|
+
Uses a lock to prevent concurrent coroutines from racing to open
|
|
545
|
+
the pool simultaneously.
|
|
540
546
|
|
|
541
547
|
Notes:
|
|
542
548
|
- Used internally by all asynchronous methods.
|
|
543
549
|
- Prevents errors from using a closed or uninitialized pool.
|
|
544
550
|
"""
|
|
545
|
-
if
|
|
546
|
-
|
|
551
|
+
if self._async_pool._opened:
|
|
552
|
+
return
|
|
553
|
+
async with self._async_pool_lock:
|
|
554
|
+
if not self._async_pool._opened:
|
|
555
|
+
await self._async_pool.open()
|
|
@@ -1,9 +1,25 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
import time
|
|
4
|
+
import uuid
|
|
1
5
|
from typing import Any, Optional, Union
|
|
2
6
|
from datetime import timedelta
|
|
3
7
|
import pickle
|
|
4
8
|
|
|
5
9
|
from .backend import CacheBackend
|
|
6
10
|
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
# Atomic unlock: only delete if the caller's token still matches.
|
|
14
|
+
# Prevents releasing a lock that expired and was re-acquired by another process.
|
|
15
|
+
_UNLOCK_SCRIPT = """
|
|
16
|
+
if redis.call("get", KEYS[1]) == ARGV[1] then
|
|
17
|
+
return redis.call("del", KEYS[1])
|
|
18
|
+
else
|
|
19
|
+
return 0
|
|
20
|
+
end
|
|
21
|
+
"""
|
|
22
|
+
|
|
7
23
|
|
|
8
24
|
class RedisBackend(CacheBackend):
|
|
9
25
|
"""
|
|
@@ -93,37 +109,41 @@ class RedisBackend(CacheBackend):
|
|
|
93
109
|
break
|
|
94
110
|
return keys
|
|
95
111
|
|
|
96
|
-
async def aget(self, key: str) ->
|
|
112
|
+
async def aget(self, key: str, default: Any = None) -> Any:
|
|
97
113
|
"""
|
|
98
114
|
Asynchronously retrieve a value from the cache.
|
|
99
115
|
|
|
100
116
|
Args:
|
|
101
117
|
key (str): The key to retrieve.
|
|
118
|
+
default (Any): Value to return if key is not found. Defaults to None.
|
|
102
119
|
|
|
103
120
|
Returns:
|
|
104
|
-
|
|
121
|
+
Any: The cached value, or default if not found.
|
|
105
122
|
"""
|
|
106
123
|
try:
|
|
107
124
|
result = await self._async_client.get(self._make_key(key))
|
|
108
|
-
return pickle.loads(result) if result else
|
|
109
|
-
except Exception:
|
|
110
|
-
|
|
125
|
+
return pickle.loads(result) if result else default
|
|
126
|
+
except Exception as e:
|
|
127
|
+
logger.warning("Cache aget failed: %s", e)
|
|
128
|
+
return default
|
|
111
129
|
|
|
112
|
-
def get(self, key: str) ->
|
|
130
|
+
def get(self, key: str, default: Any = None) -> Any:
|
|
113
131
|
"""
|
|
114
132
|
Synchronously retrieve a value from the cache.
|
|
115
133
|
|
|
116
134
|
Args:
|
|
117
135
|
key (str): The key to retrieve.
|
|
136
|
+
default (Any): Value to return if key is not found. Defaults to None.
|
|
118
137
|
|
|
119
138
|
Returns:
|
|
120
|
-
|
|
139
|
+
Any: The cached value, or default if not found.
|
|
121
140
|
"""
|
|
122
141
|
try:
|
|
123
142
|
result = self._sync_client.get(self._make_key(key))
|
|
124
|
-
return pickle.loads(result) if result else
|
|
125
|
-
except Exception:
|
|
126
|
-
|
|
143
|
+
return pickle.loads(result) if result else default
|
|
144
|
+
except Exception as e:
|
|
145
|
+
logger.warning("Cache get failed: %s", e)
|
|
146
|
+
return default
|
|
127
147
|
|
|
128
148
|
async def aset(
|
|
129
149
|
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
@@ -141,8 +161,8 @@ class RedisBackend(CacheBackend):
|
|
|
141
161
|
await self._async_client.set(
|
|
142
162
|
self._make_key(key), pickle.dumps(value), ex=ex
|
|
143
163
|
)
|
|
144
|
-
except Exception:
|
|
145
|
-
|
|
164
|
+
except Exception as e:
|
|
165
|
+
logger.warning("Cache aset failed: %s", e)
|
|
146
166
|
|
|
147
167
|
def set(
|
|
148
168
|
self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
|
|
@@ -158,8 +178,8 @@ class RedisBackend(CacheBackend):
|
|
|
158
178
|
try:
|
|
159
179
|
ex = expire.total_seconds() if isinstance(expire, timedelta) else expire
|
|
160
180
|
self._sync_client.set(self._make_key(key), pickle.dumps(value), ex=ex)
|
|
161
|
-
except Exception:
|
|
162
|
-
|
|
181
|
+
except Exception as e:
|
|
182
|
+
logger.warning("Cache set failed: %s", e)
|
|
163
183
|
|
|
164
184
|
async def adelete(self, key: str) -> None:
|
|
165
185
|
"""
|
|
@@ -170,8 +190,8 @@ class RedisBackend(CacheBackend):
|
|
|
170
190
|
"""
|
|
171
191
|
try:
|
|
172
192
|
await self._async_client.delete(self._make_key(key))
|
|
173
|
-
except Exception:
|
|
174
|
-
|
|
193
|
+
except Exception as e:
|
|
194
|
+
logger.warning("Cache adelete failed: %s", e)
|
|
175
195
|
|
|
176
196
|
def delete(self, key: str) -> None:
|
|
177
197
|
"""
|
|
@@ -182,8 +202,8 @@ class RedisBackend(CacheBackend):
|
|
|
182
202
|
"""
|
|
183
203
|
try:
|
|
184
204
|
self._sync_client.delete(self._make_key(key))
|
|
185
|
-
except Exception:
|
|
186
|
-
|
|
205
|
+
except Exception as e:
|
|
206
|
+
logger.warning("Cache delete failed: %s", e)
|
|
187
207
|
|
|
188
208
|
async def aclear(self) -> None:
|
|
189
209
|
"""
|
|
@@ -193,8 +213,8 @@ class RedisBackend(CacheBackend):
|
|
|
193
213
|
keys = await self._scan_keys()
|
|
194
214
|
if keys:
|
|
195
215
|
await self._async_client.delete(*keys)
|
|
196
|
-
except Exception:
|
|
197
|
-
|
|
216
|
+
except Exception as e:
|
|
217
|
+
logger.warning("Cache aclear failed: %s", e)
|
|
198
218
|
|
|
199
219
|
def clear(self) -> None:
|
|
200
220
|
"""
|
|
@@ -212,8 +232,8 @@ class RedisBackend(CacheBackend):
|
|
|
212
232
|
self._sync_client.delete(*keys)
|
|
213
233
|
if cursor == 0:
|
|
214
234
|
break
|
|
215
|
-
except Exception:
|
|
216
|
-
|
|
235
|
+
except Exception as e:
|
|
236
|
+
logger.warning("Cache clear failed: %s", e)
|
|
217
237
|
|
|
218
238
|
async def ahas(self, key: str) -> bool:
|
|
219
239
|
"""
|
|
@@ -227,7 +247,8 @@ class RedisBackend(CacheBackend):
|
|
|
227
247
|
"""
|
|
228
248
|
try:
|
|
229
249
|
return await self._async_client.exists(self._make_key(key)) > 0
|
|
230
|
-
except Exception:
|
|
250
|
+
except Exception as e:
|
|
251
|
+
logger.warning("Cache ahas failed: %s", e)
|
|
231
252
|
return False
|
|
232
253
|
|
|
233
254
|
def has(self, key: str) -> bool:
|
|
@@ -242,7 +263,125 @@ class RedisBackend(CacheBackend):
|
|
|
242
263
|
"""
|
|
243
264
|
try:
|
|
244
265
|
return self._sync_client.exists(self._make_key(key)) > 0
|
|
245
|
-
except Exception:
|
|
266
|
+
except Exception as e:
|
|
267
|
+
logger.warning("Cache has failed: %s", e)
|
|
268
|
+
return False
|
|
269
|
+
|
|
270
|
+
def acquire_lock(
|
|
271
|
+
self,
|
|
272
|
+
key: str,
|
|
273
|
+
timeout: int = 30,
|
|
274
|
+
wait: float = 5.0,
|
|
275
|
+
poll_interval: float = 0.05,
|
|
276
|
+
) -> Optional[str]:
|
|
277
|
+
"""
|
|
278
|
+
Acquire a distributed lock for stampede protection.
|
|
279
|
+
|
|
280
|
+
Args:
|
|
281
|
+
key: Cache key to lock (lock suffix added internally).
|
|
282
|
+
timeout: Lock auto-expiry in seconds (deadlock protection).
|
|
283
|
+
wait: Max seconds to poll before giving up.
|
|
284
|
+
poll_interval: Sleep between poll attempts in seconds.
|
|
285
|
+
|
|
286
|
+
Returns:
|
|
287
|
+
A token string if acquired, None if the lock could not be
|
|
288
|
+
obtained within the wait period or on Redis failure.
|
|
289
|
+
"""
|
|
290
|
+
lock_key = self._make_key(f"{key}:_lock")
|
|
291
|
+
token = uuid.uuid4().hex
|
|
292
|
+
deadline = time.monotonic() + wait
|
|
293
|
+
|
|
294
|
+
try:
|
|
295
|
+
while True:
|
|
296
|
+
if self._sync_client.set(lock_key, token, nx=True, ex=timeout):
|
|
297
|
+
return token
|
|
298
|
+
if time.monotonic() >= deadline:
|
|
299
|
+
return None
|
|
300
|
+
time.sleep(poll_interval)
|
|
301
|
+
except Exception as e:
|
|
302
|
+
logger.warning("Lock acquire failed: %s", e)
|
|
303
|
+
return None
|
|
304
|
+
|
|
305
|
+
def release_lock(self, key: str, token: str) -> bool:
|
|
306
|
+
"""
|
|
307
|
+
Release a distributed lock only if the caller still owns it.
|
|
308
|
+
|
|
309
|
+
Uses a Lua script to atomically check the token and delete,
|
|
310
|
+
preventing release of a lock re-acquired by another process.
|
|
311
|
+
|
|
312
|
+
Args:
|
|
313
|
+
key: Cache key that was locked.
|
|
314
|
+
token: The token returned by acquire_lock.
|
|
315
|
+
|
|
316
|
+
Returns:
|
|
317
|
+
True if the lock was released, False otherwise.
|
|
318
|
+
"""
|
|
319
|
+
lock_key = self._make_key(f"{key}:_lock")
|
|
320
|
+
try:
|
|
321
|
+
return self._sync_client.eval(_UNLOCK_SCRIPT, 1, lock_key, token) == 1
|
|
322
|
+
except Exception as e:
|
|
323
|
+
logger.warning("Lock release failed: %s", e)
|
|
324
|
+
return False
|
|
325
|
+
|
|
326
|
+
async def aacquire_lock(
|
|
327
|
+
self,
|
|
328
|
+
key: str,
|
|
329
|
+
timeout: int = 30,
|
|
330
|
+
wait: float = 5.0,
|
|
331
|
+
poll_interval: float = 0.05,
|
|
332
|
+
) -> Optional[str]:
|
|
333
|
+
"""
|
|
334
|
+
Asynchronously acquire a distributed lock for stampede protection.
|
|
335
|
+
|
|
336
|
+
Args:
|
|
337
|
+
key: Cache key to lock (lock suffix added internally).
|
|
338
|
+
timeout: Lock auto-expiry in seconds (deadlock protection).
|
|
339
|
+
wait: Max seconds to poll before giving up.
|
|
340
|
+
poll_interval: Sleep between poll attempts in seconds.
|
|
341
|
+
|
|
342
|
+
Returns:
|
|
343
|
+
A token string if acquired, None if the lock could not be
|
|
344
|
+
obtained within the wait period or on Redis failure.
|
|
345
|
+
"""
|
|
346
|
+
lock_key = self._make_key(f"{key}:_lock")
|
|
347
|
+
token = uuid.uuid4().hex
|
|
348
|
+
deadline = time.monotonic() + wait
|
|
349
|
+
|
|
350
|
+
try:
|
|
351
|
+
while True:
|
|
352
|
+
if await self._async_client.set(
|
|
353
|
+
lock_key, token, nx=True, ex=timeout
|
|
354
|
+
):
|
|
355
|
+
return token
|
|
356
|
+
if time.monotonic() >= deadline:
|
|
357
|
+
return None
|
|
358
|
+
await asyncio.sleep(poll_interval)
|
|
359
|
+
except Exception as e:
|
|
360
|
+
logger.warning("Lock aacquire failed: %s", e)
|
|
361
|
+
return None
|
|
362
|
+
|
|
363
|
+
async def arelease_lock(self, key: str, token: str) -> bool:
|
|
364
|
+
"""
|
|
365
|
+
Asynchronously release a distributed lock only if the caller still owns it.
|
|
366
|
+
|
|
367
|
+
Uses a Lua script to atomically check the token and delete,
|
|
368
|
+
preventing release of a lock re-acquired by another process.
|
|
369
|
+
|
|
370
|
+
Args:
|
|
371
|
+
key: Cache key that was locked.
|
|
372
|
+
token: The token returned by aacquire_lock.
|
|
373
|
+
|
|
374
|
+
Returns:
|
|
375
|
+
True if the lock was released, False otherwise.
|
|
376
|
+
"""
|
|
377
|
+
lock_key = self._make_key(f"{key}:_lock")
|
|
378
|
+
try:
|
|
379
|
+
result = await self._async_client.eval(
|
|
380
|
+
_UNLOCK_SCRIPT, 1, lock_key, token
|
|
381
|
+
)
|
|
382
|
+
return result == 1
|
|
383
|
+
except Exception as e:
|
|
384
|
+
logger.warning("Lock arelease failed: %s", e)
|
|
246
385
|
return False
|
|
247
386
|
|
|
248
387
|
async def close(self) -> None:
|
|
@@ -6,6 +6,8 @@ import inspect
|
|
|
6
6
|
from functools import wraps
|
|
7
7
|
from .backends.backend import CacheBackend
|
|
8
8
|
|
|
9
|
+
_CACHE_MISS = object()
|
|
10
|
+
|
|
9
11
|
|
|
10
12
|
class FastAPICache:
|
|
11
13
|
"""
|
|
@@ -42,6 +44,9 @@ class FastAPICache:
|
|
|
42
44
|
expire: Optional[Union[int, timedelta]] = None,
|
|
43
45
|
key_builder: Optional[Callable[..., str]] = None,
|
|
44
46
|
namespace: Optional[str] = None,
|
|
47
|
+
stampede_protection: bool = True,
|
|
48
|
+
lock_timeout: int = 30,
|
|
49
|
+
lock_wait: float = 5.0,
|
|
45
50
|
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
|
46
51
|
"""
|
|
47
52
|
Decorator for caching function results.
|
|
@@ -50,6 +55,12 @@ class FastAPICache:
|
|
|
50
55
|
expire (Optional[Union[int, timedelta]]): Expiration time in seconds or as a timedelta.
|
|
51
56
|
key_builder (Optional[Callable[..., str]]): Custom function to build the cache key.
|
|
52
57
|
namespace (Optional[str]): Optional namespace for the cache key.
|
|
58
|
+
stampede_protection (bool): Enable distributed lock to prevent thundering herd
|
|
59
|
+
on cache miss. Requires a backend that supports locking (e.g., Redis).
|
|
60
|
+
Silently skipped for backends without lock support. Defaults to True.
|
|
61
|
+
lock_timeout (int): Lock auto-expiry in seconds (deadlock protection). Defaults to 30.
|
|
62
|
+
lock_wait (float): Max seconds to wait for the lock before falling back
|
|
63
|
+
to a direct function call. Defaults to 5.0.
|
|
53
64
|
|
|
54
65
|
Returns:
|
|
55
66
|
Callable: A decorator that caches the function result.
|
|
@@ -89,6 +100,11 @@ class FastAPICache:
|
|
|
89
100
|
|
|
90
101
|
return key
|
|
91
102
|
|
|
103
|
+
def _backend_supports_locking() -> bool:
|
|
104
|
+
return self._backend is not None and hasattr(
|
|
105
|
+
self._backend, "acquire_lock"
|
|
106
|
+
)
|
|
107
|
+
|
|
92
108
|
@wraps(func)
|
|
93
109
|
async def async_wrapper(*args, **kwargs) -> Any:
|
|
94
110
|
"""
|
|
@@ -109,16 +125,46 @@ class FastAPICache:
|
|
|
109
125
|
return await func(*args, **kwargs)
|
|
110
126
|
|
|
111
127
|
cache_key = build_cache_key(*args, **kwargs)
|
|
128
|
+
effective_expire = expire or self._default_expire
|
|
112
129
|
|
|
113
|
-
#
|
|
114
|
-
cached_value = await self._backend.aget(
|
|
115
|
-
|
|
130
|
+
# Fast path: cache hit
|
|
131
|
+
cached_value = await self._backend.aget(
|
|
132
|
+
cache_key, default=_CACHE_MISS
|
|
133
|
+
)
|
|
134
|
+
if cached_value is not _CACHE_MISS:
|
|
116
135
|
return cached_value
|
|
117
136
|
|
|
118
|
-
#
|
|
137
|
+
# Stampede protection: distributed lock
|
|
138
|
+
if stampede_protection and _backend_supports_locking():
|
|
139
|
+
token = await self._backend.aacquire_lock(
|
|
140
|
+
cache_key, timeout=lock_timeout, wait=lock_wait
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
if token is not None:
|
|
144
|
+
# We are the lock holder — rebuild the value
|
|
145
|
+
try:
|
|
146
|
+
result = await func(*args, **kwargs)
|
|
147
|
+
await self._backend.aset(
|
|
148
|
+
cache_key, result, expire=effective_expire
|
|
149
|
+
)
|
|
150
|
+
return result
|
|
151
|
+
finally:
|
|
152
|
+
await self._backend.arelease_lock(cache_key, token)
|
|
153
|
+
else:
|
|
154
|
+
# Another process is rebuilding — check if it finished
|
|
155
|
+
cached_value = await self._backend.aget(
|
|
156
|
+
cache_key, default=_CACHE_MISS
|
|
157
|
+
)
|
|
158
|
+
if cached_value is not _CACHE_MISS:
|
|
159
|
+
return cached_value
|
|
160
|
+
|
|
161
|
+
# Still no value — fallback: call function directly
|
|
162
|
+
return await func(*args, **kwargs)
|
|
163
|
+
|
|
164
|
+
# No stampede protection — original behavior
|
|
119
165
|
result = await func(*args, **kwargs)
|
|
120
166
|
await self._backend.aset(
|
|
121
|
-
cache_key, result, expire=
|
|
167
|
+
cache_key, result, expire=effective_expire
|
|
122
168
|
)
|
|
123
169
|
return result
|
|
124
170
|
|
|
@@ -142,16 +188,46 @@ class FastAPICache:
|
|
|
142
188
|
return func(*args, **kwargs)
|
|
143
189
|
|
|
144
190
|
cache_key = build_cache_key(*args, **kwargs)
|
|
191
|
+
effective_expire = expire or self._default_expire
|
|
145
192
|
|
|
146
|
-
#
|
|
147
|
-
cached_value = self._backend.get(
|
|
148
|
-
|
|
193
|
+
# Fast path: cache hit
|
|
194
|
+
cached_value = self._backend.get(
|
|
195
|
+
cache_key, default=_CACHE_MISS
|
|
196
|
+
)
|
|
197
|
+
if cached_value is not _CACHE_MISS:
|
|
149
198
|
return cached_value
|
|
150
199
|
|
|
151
|
-
#
|
|
200
|
+
# Stampede protection: distributed lock
|
|
201
|
+
if stampede_protection and _backend_supports_locking():
|
|
202
|
+
token = self._backend.acquire_lock(
|
|
203
|
+
cache_key, timeout=lock_timeout, wait=lock_wait
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
if token is not None:
|
|
207
|
+
# We are the lock holder — rebuild the value
|
|
208
|
+
try:
|
|
209
|
+
result = func(*args, **kwargs)
|
|
210
|
+
self._backend.set(
|
|
211
|
+
cache_key, result, expire=effective_expire
|
|
212
|
+
)
|
|
213
|
+
return result
|
|
214
|
+
finally:
|
|
215
|
+
self._backend.release_lock(cache_key, token)
|
|
216
|
+
else:
|
|
217
|
+
# Another process is rebuilding — check if it finished
|
|
218
|
+
cached_value = self._backend.get(
|
|
219
|
+
cache_key, default=_CACHE_MISS
|
|
220
|
+
)
|
|
221
|
+
if cached_value is not _CACHE_MISS:
|
|
222
|
+
return cached_value
|
|
223
|
+
|
|
224
|
+
# Still no value — fallback: call function directly
|
|
225
|
+
return func(*args, **kwargs)
|
|
226
|
+
|
|
227
|
+
# No stampede protection — original behavior
|
|
152
228
|
result = func(*args, **kwargs)
|
|
153
229
|
self._backend.set(
|
|
154
|
-
cache_key, result, expire=
|
|
230
|
+
cache_key, result, expire=effective_expire
|
|
155
231
|
)
|
|
156
232
|
return result
|
|
157
233
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: fastapi-cachekit
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.2.0
|
|
4
4
|
Summary: High-performance caching solution for FastAPI applications
|
|
5
5
|
Author-email: Bijay Nayak <bijay6779@gmail.com>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -18,6 +18,7 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
18
18
|
Classifier: Programming Language :: Python :: 3.11
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.12
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.13
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
21
22
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
22
23
|
Classifier: Topic :: Internet :: WWW/HTTP
|
|
23
24
|
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "fastapi-cachekit"
|
|
3
|
-
version = "0.
|
|
3
|
+
version = "0.2.0"
|
|
4
4
|
description = "High-performance caching solution for FastAPI applications"
|
|
5
5
|
readme = "README.md"
|
|
6
6
|
license = "MIT"
|
|
@@ -19,6 +19,7 @@ classifiers = [
|
|
|
19
19
|
"Programming Language :: Python :: 3.11",
|
|
20
20
|
"Programming Language :: Python :: 3.12",
|
|
21
21
|
"Programming Language :: Python :: 3.13",
|
|
22
|
+
"Programming Language :: Python :: 3.14",
|
|
22
23
|
"Topic :: Software Development :: Libraries :: Python Modules",
|
|
23
24
|
"Topic :: Internet :: WWW/HTTP",
|
|
24
25
|
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
|
|
@@ -92,6 +93,7 @@ build-backend = "setuptools.build_meta"
|
|
|
92
93
|
|
|
93
94
|
[dependency-groups]
|
|
94
95
|
dev = [
|
|
96
|
+
{include-group = "all-backends"},
|
|
95
97
|
"httpx>=0.28.1",
|
|
96
98
|
"mkdocs-material>=9.6.14",
|
|
97
99
|
"mkdocstrings[python]>=0.29.1",
|
|
@@ -105,3 +107,13 @@ dev = [
|
|
|
105
107
|
"twine>=6.1.0",
|
|
106
108
|
"uvicorn[standard]>=0.34.3",
|
|
107
109
|
]
|
|
110
|
+
all-backends = [
|
|
111
|
+
"redis>=4.2.0",
|
|
112
|
+
"psycopg[pool]>=3.2.9",
|
|
113
|
+
"aiomcache>=0.8.1",
|
|
114
|
+
"pymemcache>=4.0.0",
|
|
115
|
+
"pymongo[snappy,gssapi,srv]>=4.6.0",
|
|
116
|
+
"google-cloud-firestore>=2.3.0",
|
|
117
|
+
"boto3>=1.10.0",
|
|
118
|
+
"aioboto3>=6.0.0",
|
|
119
|
+
]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{fastapi_cachekit-0.1.5 → fastapi_cachekit-0.2.0}/fastapi_cachekit.egg-info/dependency_links.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|