fastapi-cachekit 0.1.2__tar.gz → 0.1.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (21) hide show
  1. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/PKG-INFO +33 -9
  2. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/README.md +24 -8
  3. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/fast_cache/__init__.py +14 -1
  4. fastapi_cachekit-0.1.4/fast_cache/backends/dynamodb.py +495 -0
  5. fastapi_cachekit-0.1.4/fast_cache/backends/google_firestore.py +351 -0
  6. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/fast_cache/backends/memcached.py +18 -8
  7. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/fast_cache/backends/memory.py +56 -29
  8. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/fast_cache/backends/mongodb.py +30 -30
  9. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/fast_cache/backends/postgres.py +70 -39
  10. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/fast_cache/integration.py +16 -3
  11. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/fastapi_cachekit.egg-info/PKG-INFO +33 -9
  12. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/fastapi_cachekit.egg-info/SOURCES.txt +2 -0
  13. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/fastapi_cachekit.egg-info/requires.txt +10 -0
  14. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/pyproject.toml +12 -2
  15. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/LICENSE.md +0 -0
  16. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/fast_cache/backends/__init__.py +0 -0
  17. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/fast_cache/backends/backend.py +0 -0
  18. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/fast_cache/backends/redis.py +0 -0
  19. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/fastapi_cachekit.egg-info/dependency_links.txt +0 -0
  20. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/fastapi_cachekit.egg-info/top_level.txt +0 -0
  21. {fastapi_cachekit-0.1.2 → fastapi_cachekit-0.1.4}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fastapi-cachekit
3
- Version: 0.1.2
3
+ Version: 0.1.4
4
4
  Summary: High-performance caching solution for FastAPI applications
5
5
  Author-email: Bijay Nayak <bijay6779@gmail.com>
6
6
  License-Expression: MIT
@@ -37,12 +37,20 @@ Requires-Dist: aiomcache>=0.8.1; extra == "memcached"
37
37
  Requires-Dist: pymemcache>=4.0.0; extra == "memcached"
38
38
  Provides-Extra: mongodb
39
39
  Requires-Dist: pymongo[gssapi,snappy,srv]>=4.6.0; extra == "mongodb"
40
+ Provides-Extra: firestore
41
+ Requires-Dist: google-cloud-firestore>=2.3.0; extra == "firestore"
42
+ Provides-Extra: dynamodb
43
+ Requires-Dist: boto3>=1.10.0; extra == "dynamodb"
44
+ Requires-Dist: aioboto3>=6.0.0; extra == "dynamodb"
40
45
  Provides-Extra: all
41
46
  Requires-Dist: redis>=4.2.0; extra == "all"
42
47
  Requires-Dist: psycopg[pool]>=3.2.9; extra == "all"
43
48
  Requires-Dist: aiomcache>=0.8.1; extra == "all"
44
49
  Requires-Dist: pymemcache>=4.0.0; extra == "all"
45
50
  Requires-Dist: pymongo[gssapi,snappy,srv]>=4.6.0; extra == "all"
51
+ Requires-Dist: google-cloud-firestore>=2.3.0; extra == "all"
52
+ Requires-Dist: boto3>=1.10.0; extra == "all"
53
+ Requires-Dist: aioboto3>=6.0.0; extra == "all"
46
54
  Dynamic: license-file
47
55
 
48
56
  # fastapi-cachekit
@@ -58,7 +66,7 @@ A high-performance, flexible caching solution for FastAPI applications. fastapi-
58
66
  ## Features
59
67
 
60
68
  - ✅ Full async/sync support for all operations
61
- - ✅ Redis backend with connection pooling
69
+ - ✅ Multiple backend Support So you can use the same tech stack as your app
62
70
  - ✅ Function result caching with decorator syntax
63
71
  - ✅ FastAPI dependency injection support
64
72
  - ✅ Namespace support for isolating cache entries
@@ -67,13 +75,15 @@ A high-performance, flexible caching solution for FastAPI applications. fastapi-
67
75
  - ✅ Expiration time support (seconds or timedelta)
68
76
 
69
77
  ## 📦 Backends & Sync/Async Support
70
-
71
- | Backend | Sync API | Async API | Install Extra |
72
- |--------------------|:--------:|:---------:|----------------------|
73
- | `InMemoryBackend` | ✅ | ✅ | _built-in_ |
74
- | `RedisBackend` | ✅ | ✅ | `redis` |
75
- | `PostgresBackend` | ✅ | ✅ | `postgres` |
76
- | `MemcachedBackend` | ✅ | ✅ | `memcached` |
78
+ | Backend | Sync API | Async API | Install Extra |
79
+ |--------------------|:--------:|:---------:|---------------|
80
+ | `InMemoryBackend` | ✅ | ✅ | _built-in_ |
81
+ | `RedisBackend` | ✅ | ✅ | `redis` |
82
+ | `PostgresBackend` | ✅ | ✅ | `postgres` |
83
+ | `MemcachedBackend` | ✅ | ✅ | `memcached` |
84
+ | `MongoDB` | ✅ | ✅ | `mongodb` |
85
+ | `FireStore` | ✅ | ✅ | `firestore` |
86
+ | `DynamoDBBackend` | ✅ | ✅ | `dynamodb` |
77
87
 
78
88
  ---
79
89
 
@@ -98,6 +108,20 @@ pip install fastapi-cachekit[postgres]
98
108
  ```bash
99
109
  pip install fastapi-cachekit[memcached]
100
110
  ```
111
+ **With MongoDB:**
112
+ ```bash
113
+ pip install fastapi-cachekit[mongodb]
114
+ ```
115
+
116
+ **With FireStore:**
117
+ ```bash
118
+ pip install fastapi-cachekit[firestore]
119
+ ```
120
+
121
+ **With DynamoDB:**
122
+ ```bash
123
+ pip install fastapi-cachekit[dynamodb]
124
+ ```
101
125
 
102
126
  **All backends:**
103
127
  ```bash
@@ -11,7 +11,7 @@ A high-performance, flexible caching solution for FastAPI applications. fastapi-
11
11
  ## Features
12
12
 
13
13
  - ✅ Full async/sync support for all operations
14
- - ✅ Redis backend with connection pooling
14
+ - ✅ Multiple backend Support So you can use the same tech stack as your app
15
15
  - ✅ Function result caching with decorator syntax
16
16
  - ✅ FastAPI dependency injection support
17
17
  - ✅ Namespace support for isolating cache entries
@@ -20,13 +20,15 @@ A high-performance, flexible caching solution for FastAPI applications. fastapi-
20
20
  - ✅ Expiration time support (seconds or timedelta)
21
21
 
22
22
  ## 📦 Backends & Sync/Async Support
23
-
24
- | Backend | Sync API | Async API | Install Extra |
25
- |--------------------|:--------:|:---------:|----------------------|
26
- | `InMemoryBackend` | ✅ | ✅ | _built-in_ |
27
- | `RedisBackend` | ✅ | ✅ | `redis` |
28
- | `PostgresBackend` | ✅ | ✅ | `postgres` |
29
- | `MemcachedBackend` | ✅ | ✅ | `memcached` |
23
+ | Backend | Sync API | Async API | Install Extra |
24
+ |--------------------|:--------:|:---------:|---------------|
25
+ | `InMemoryBackend` | ✅ | ✅ | _built-in_ |
26
+ | `RedisBackend` | ✅ | ✅ | `redis` |
27
+ | `PostgresBackend` | ✅ | ✅ | `postgres` |
28
+ | `MemcachedBackend` | ✅ | ✅ | `memcached` |
29
+ | `MongoDB` | ✅ | ✅ | `mongodb` |
30
+ | `FireStore` | ✅ | ✅ | `firestore` |
31
+ | `DynamoDBBackend` | ✅ | ✅ | `dynamodb` |
30
32
 
31
33
  ---
32
34
 
@@ -51,6 +53,20 @@ pip install fastapi-cachekit[postgres]
51
53
  ```bash
52
54
  pip install fastapi-cachekit[memcached]
53
55
  ```
56
+ **With MongoDB:**
57
+ ```bash
58
+ pip install fastapi-cachekit[mongodb]
59
+ ```
60
+
61
+ **With FireStore:**
62
+ ```bash
63
+ pip install fastapi-cachekit[firestore]
64
+ ```
65
+
66
+ **With DynamoDB:**
67
+ ```bash
68
+ pip install fastapi-cachekit[dynamodb]
69
+ ```
54
70
 
55
71
  **All backends:**
56
72
  ```bash
@@ -6,8 +6,21 @@ from .backends.memory import InMemoryBackend
6
6
  from .backends.postgres import PostgresBackend
7
7
  from .backends.memcached import MemcachedBackend
8
8
  from .backends.mongodb import MongoDBBackend
9
+ from .backends.google_firestore import FirestoreBackend
10
+ from .backends.dynamodb import DynamoDBBackend
9
11
 
10
- __all__ = ["FastAPICache", "RedisBackend", "CacheBackend", "InMemoryBackend","PostgresBackend", "cache","MemcachedBackend", "MongoDBBackend" ]
12
+ __all__ = [
13
+ "FastAPICache",
14
+ "RedisBackend",
15
+ "CacheBackend",
16
+ "InMemoryBackend",
17
+ "PostgresBackend",
18
+ "cache",
19
+ "MemcachedBackend",
20
+ "MongoDBBackend",
21
+ "FirestoreBackend",
22
+ "DynamoDBBackend"
23
+ ]
11
24
 
12
25
 
13
26
  # Create global cache instance
@@ -0,0 +1,495 @@
1
+ import hashlib
2
+ from typing import Any, Optional, Union
3
+ from datetime import timedelta
4
+ import pickle
5
+ import time
6
+
7
+ from .backend import CacheBackend
8
+
9
+
10
+ class DynamoDBBackend(CacheBackend):
11
+ """
12
+ DynamoDB cache backend implementation with namespace support.
13
+
14
+ Attributes:
15
+ _namespace (str): Namespace prefix for all keys.
16
+ _table_name (str): DynamoDB table name.
17
+ _sync_client (boto3.client): Synchronous DynamoDB client.
18
+ _async_client (aioboto3.client): Asynchronous DynamoDB client.
19
+ _sync_resource (boto3.resource): Synchronous DynamoDB resource.
20
+ _async_resource (aioboto3.resource): Asynchronous DynamoDB resource.
21
+ """
22
+
23
+ def __init__(
24
+ self,
25
+ table_name: str,
26
+ region_name: str,
27
+ namespace: str = "cache",
28
+ aws_access_key_id: Optional[str] = None,
29
+ aws_secret_access_key: Optional[str] = None,
30
+ endpoint_url: Optional[str] = None,
31
+ create_table: bool = True,
32
+ ) -> None:
33
+ """
34
+ Initialize DynamoDB backend with table and connection settings.
35
+
36
+ Args:
37
+ table_name (str): DynamoDB table name for cache storage.
38
+ namespace (str): Namespace prefix for all keys (default: "fastapi-cache").
39
+ region_name (str): AWS region name (default: "us-east-1").
40
+ aws_access_key_id (Optional[str]): AWS access key ID.
41
+ aws_secret_access_key (Optional[str]): AWS secret access key.
42
+ endpoint_url (Optional[str]): Custom endpoint URL (for local DynamoDB).
43
+ create_table (bool): Whether to create table if it doesn't exist.
44
+ """
45
+ try:
46
+ import boto3
47
+ import aioboto3
48
+ except ImportError:
49
+ raise ImportError(
50
+ "DynamoDBBackend requires the 'boto3' and 'aioboto3' packages. "
51
+ "Install them with: pip install fast-cache[dynamodb]"
52
+ )
53
+
54
+ self._namespace = namespace
55
+ self._table_name = table_name
56
+
57
+ # Connection parameters
58
+ self._connection_params = {
59
+ "region_name": region_name,
60
+ "endpoint_url": endpoint_url,
61
+ }
62
+
63
+ if aws_access_key_id and aws_secret_access_key:
64
+ self._connection_params.update(
65
+ {
66
+ "aws_access_key_id": aws_access_key_id,
67
+ "aws_secret_access_key": aws_secret_access_key,
68
+ }
69
+ )
70
+
71
+ # Sync client for table management only
72
+ self._sync_client = boto3.client("dynamodb", **self._connection_params)
73
+
74
+ # Sync resource/table for sync cache operations
75
+ self._sync_resource = boto3.resource("dynamodb", **self._connection_params)
76
+ self._sync_table = self._sync_resource.Table(table_name)
77
+
78
+ # Initialize async session
79
+ self._async_resource = None
80
+ self._async_table = None
81
+ self._async_session = aioboto3.Session()
82
+
83
+ # Create table if requested
84
+ if create_table:
85
+ self._ensure_table_exists()
86
+
87
+ async def _get_async_table(self):
88
+ if self._async_table is None:
89
+ # Create the resource context
90
+ self._async_resource = self._async_session.resource(
91
+ "dynamodb", **self._connection_params
92
+ )
93
+
94
+ # Enter the context and get the actual resource
95
+ actual_resource = await self._async_resource.__aenter__()
96
+
97
+ # Create the table from the actual resource
98
+ self._async_table = await actual_resource.Table(self._table_name)
99
+
100
+
101
+ return self._async_table
102
+
103
+ def _ensure_table_exists(self) -> None:
104
+ """
105
+ Ensure the DynamoDB table exists, create if it doesn't.
106
+ """
107
+
108
+ try:
109
+ self._sync_client.describe_table(TableName=self._table_name)
110
+ except self._sync_client.exceptions.ResourceNotFoundException:
111
+ # Table doesn't exist, create it
112
+ self._sync_client.create_table(
113
+ TableName=self._table_name,
114
+ KeySchema=[{"AttributeName": "cache_key", "KeyType": "HASH"}],
115
+ AttributeDefinitions=[
116
+ {"AttributeName": "cache_key", "AttributeType": "S"}
117
+ ],
118
+ BillingMode="PAY_PER_REQUEST",
119
+ )
120
+
121
+ # Wait for table to be created
122
+ waiter = self._sync_client.get_waiter("table_exists")
123
+ waiter.wait(TableName=self._table_name)
124
+
125
+ try:
126
+ self._sync_client.update_time_to_live(
127
+ TableName=self._table_name,
128
+ TimeToLiveSpecification={"Enabled": True, "AttributeName": "ttl"},
129
+ )
130
+ except Exception:
131
+ pass
132
+
133
+ def _make_key(self, key: str) -> str:
134
+ """
135
+ Create a namespaced key with optional hashing for long keys.
136
+
137
+ Args:
138
+ key (str): The original key.
139
+
140
+ Returns:
141
+ str: The namespaced key, hashed if too long.
142
+ """
143
+ namespaced_key = f"{self._namespace}:{key}"
144
+
145
+ # DynamoDB has a 2KB limit for partition keys
146
+ if len(namespaced_key.encode("utf-8")) > 1024:
147
+ key_hash = hashlib.sha256(key.encode("utf-8")).hexdigest()
148
+ namespaced_key = f"{self._namespace}:hash:{key_hash}"
149
+
150
+ return namespaced_key
151
+
152
+ def _get_ttl(self, expire: Optional[Union[int, timedelta]]) -> Optional[int]:
153
+ """
154
+ Calculate TTL timestamp for DynamoDB.
155
+
156
+ Args:
157
+ expire (Optional[Union[int, timedelta]]): Expiration time.
158
+
159
+ Returns:
160
+ Optional[int]: TTL timestamp or None if no expiration.
161
+ """
162
+ if expire is None:
163
+ return None
164
+
165
+ if isinstance(expire, timedelta):
166
+ expire = int(expire.total_seconds())
167
+
168
+ if expire <= 0:
169
+ return None
170
+
171
+ return int(time.time()) + expire
172
+
173
+ def _is_expired(self, item: dict) -> bool:
174
+ """
175
+ Check if an item has expired based on TTL.
176
+
177
+ Args:
178
+ item (dict): DynamoDB item.
179
+
180
+ Returns:
181
+ bool: True if expired, False otherwise.
182
+ """
183
+ if "ttl" not in item:
184
+ return False
185
+
186
+ return time.time() > item["ttl"]
187
+
188
+ def _serialize_value(self, value: Any) -> bytes:
189
+ """
190
+ Serialize value for storage in DynamoDB.
191
+
192
+ Args:
193
+ value (Any): Value to serialize.
194
+
195
+ Returns:
196
+ bytes: Serialized value.
197
+ """
198
+ return pickle.dumps(value, protocol=pickle.HIGHEST_PROTOCOL)
199
+
200
+ def _deserialize_value(self, data: bytes) -> Any:
201
+ """
202
+ Deserialize value from DynamoDB storage.
203
+
204
+ Args:
205
+ data (bytes): Serialized data.
206
+
207
+ Returns:
208
+ Any: Deserialized value.
209
+ """
210
+ return pickle.loads(bytes(data))
211
+
212
+ def _build_item(
213
+ self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
214
+ ) -> dict:
215
+ """
216
+ Build a DynamoDB item for storage.
217
+
218
+ Args:
219
+ key (str): Cache key.
220
+ value (Any): Value to store.
221
+ expire (Optional[Union[int, timedelta]]): Expiration time.
222
+
223
+ Returns:
224
+ dict: DynamoDB item.
225
+ """
226
+ item = {
227
+ "cache_key": self._make_key(key),
228
+ "value": self._serialize_value(value),
229
+ }
230
+
231
+ ttl = self._get_ttl(expire)
232
+ if ttl is not None:
233
+ item["ttl"] = ttl
234
+
235
+ return item
236
+
237
+ def get(self, key: str) -> Optional[Any]:
238
+ """
239
+ Synchronously retrieve a value from the cache.
240
+
241
+ Args:
242
+ key (str): The key to retrieve.
243
+
244
+ Returns:
245
+ Optional[Any]: The cached value, or None if not found.
246
+ """
247
+ try:
248
+ response = self._sync_table.get_item(Key={"cache_key": self._make_key(key)})
249
+
250
+ if "Item" not in response:
251
+ return None
252
+
253
+ item = response["Item"]
254
+
255
+ # Check if item has expired and delete if so
256
+ if self._is_expired(item):
257
+ self.delete(key)
258
+ return None
259
+ value = self._deserialize_value(item["value"])
260
+ return value
261
+ except Exception:
262
+ return None
263
+
264
+ async def aget(self, key: str) -> Optional[Any]:
265
+ """
266
+ Asynchronously retrieve a value from the cache.
267
+
268
+ Args:
269
+ key (str): The key to retrieve.
270
+
271
+ Returns:
272
+ Optional[Any]: The cached value, or None if not found.
273
+ """
274
+ try:
275
+ table = await self._get_async_table()
276
+ response = await table.get_item(Key={"cache_key": self._make_key(key)})
277
+
278
+ if "Item" not in response:
279
+ return None
280
+
281
+ item = response["Item"]
282
+
283
+ # Check if item has expired and delete if so
284
+ if self._is_expired(item):
285
+ await self.adelete(key)
286
+ return None
287
+
288
+ return self._deserialize_value(item["value"])
289
+ except Exception:
290
+ return None
291
+
292
+ def set(
293
+ self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
294
+ ) -> None:
295
+ """
296
+ Synchronously set a value in the cache.
297
+
298
+ Args:
299
+ key (str): The key under which to store the value.
300
+ value (Any): The value to store.
301
+ expire (Optional[Union[int, timedelta]]): Expiration time in seconds or as timedelta.
302
+ """
303
+ try:
304
+ item = self._build_item(key, value, expire)
305
+ self._sync_table.put_item(Item=item)
306
+ except Exception:
307
+ pass
308
+
309
+ async def aset(
310
+ self, key: str, value: Any, expire: Optional[Union[int, timedelta]] = None
311
+ ) -> None:
312
+ """
313
+ Asynchronously set a value in the cache.
314
+
315
+ Args:
316
+ key (str): The key under which to store the value.
317
+ value (Any): The value to store.
318
+ expire (Optional[Union[int, timedelta]]): Expiration time in seconds or as timedelta.
319
+ """
320
+ try:
321
+ table = await self._get_async_table()
322
+ item = self._build_item(key, value, expire)
323
+ await table.put_item(Item=item)
324
+ except Exception:
325
+ pass
326
+
327
+ def delete(self, key: str) -> None:
328
+ """
329
+ Synchronously delete a value from the cache.
330
+
331
+ Args:
332
+ key (str): The key to delete.
333
+ """
334
+ try:
335
+ self._sync_table.delete_item(Key={"cache_key": self._make_key(key)})
336
+ except Exception:
337
+ pass
338
+
339
+ async def adelete(self, key: str) -> None:
340
+ """
341
+ Asynchronously delete a value from the cache.
342
+
343
+ Args:
344
+ key (str): The key to delete.
345
+ """
346
+ try:
347
+ table = await self._get_async_table()
348
+ await table.delete_item(Key={"cache_key": self._make_key(key)})
349
+ except Exception:
350
+ pass
351
+
352
+ def has(self, key: str) -> bool:
353
+ """
354
+ Synchronously check if a key exists in the cache.
355
+
356
+ Args:
357
+ key (str): The key to check.
358
+
359
+ Returns:
360
+ bool: True if the key exists, False otherwise.
361
+ """
362
+ try:
363
+ response = self._sync_table.get_item(
364
+ Key={"cache_key": self._make_key(key)},
365
+ ProjectionExpression="cache_key, #ttl",
366
+ ExpressionAttributeNames={"#ttl": "ttl"},
367
+ )
368
+
369
+
370
+ if "Item" not in response:
371
+ return False
372
+
373
+ item = response["Item"]
374
+
375
+ # Check if item has expired and delete if so
376
+ if self._is_expired(item):
377
+ self.delete(key)
378
+ return False
379
+
380
+ return True
381
+ except Exception:
382
+ return False
383
+
384
+ async def ahas(self, key: str) -> bool:
385
+ """
386
+ Asynchronously check if a key exists in the cache.
387
+
388
+ Args:
389
+ key (str): The key to check.
390
+
391
+ Returns:
392
+ bool: True if the key exists, False otherwise.
393
+ """
394
+ try:
395
+ table = await self._get_async_table()
396
+ response = await table.get_item(
397
+ Key={"cache_key": self._make_key(key)},
398
+ ProjectionExpression="cache_key, #ttl",
399
+ ExpressionAttributeNames={"#ttl": "ttl"},
400
+ )
401
+
402
+ if "Item" not in response:
403
+ return False
404
+
405
+ item = response["Item"]
406
+
407
+ # Check if item has expired and delete if so
408
+ if self._is_expired(item):
409
+ await self.adelete(key)
410
+ return False
411
+
412
+ return True
413
+ except Exception:
414
+ return False
415
+
416
+ def clear(self) -> None:
417
+ """
418
+ Synchronously clear all values from the namespace.
419
+ """
420
+ try:
421
+ # Scan for all items with the namespace prefix
422
+ response = self._sync_table.scan(
423
+ FilterExpression="begins_with(cache_key, :prefix)",
424
+ ExpressionAttributeValues={":prefix": f"{self._namespace}:"},
425
+ ProjectionExpression="cache_key",
426
+ )
427
+
428
+ # Delete items in batches
429
+ if response.get("Items"):
430
+ with self._sync_table.batch_writer() as batch:
431
+ for item in response["Items"]:
432
+ batch.delete_item(Key={"cache_key": item["cache_key"]})
433
+
434
+ # Handle pagination
435
+ while "LastEvaluatedKey" in response:
436
+ response = self._sync_table.scan(
437
+ FilterExpression="begins_with(cache_key, :prefix)",
438
+ ExpressionAttributeValues={":prefix": f"{self._namespace}:"},
439
+ ProjectionExpression="cache_key",
440
+ ExclusiveStartKey=response["LastEvaluatedKey"],
441
+ )
442
+
443
+ if response.get("Items"):
444
+ with self._sync_table.batch_writer() as batch:
445
+ for item in response["Items"]:
446
+ batch.delete_item(Key={"cache_key": item["cache_key"]})
447
+
448
+ except Exception:
449
+ pass
450
+
451
+ async def aclear(self) -> None:
452
+ """
453
+ Asynchronously clear all values from the namespace.
454
+ """
455
+ try:
456
+ table = await self._get_async_table()
457
+
458
+ # Scan for all items with the namespace prefix
459
+ response = await table.scan(
460
+ FilterExpression="begins_with(cache_key, :prefix)",
461
+ ExpressionAttributeValues={":prefix": f"{self._namespace}:"},
462
+ ProjectionExpression="cache_key",
463
+ )
464
+
465
+ # Delete items in batches
466
+ if response.get("Items"):
467
+ async with table.batch_writer() as batch:
468
+ for item in response["Items"]:
469
+ await batch.delete_item(Key={"cache_key": item["cache_key"]})
470
+
471
+ # Handle pagination
472
+ while "LastEvaluatedKey" in response:
473
+ response = await table.scan(
474
+ FilterExpression="begins_with(cache_key, :prefix)",
475
+ ExpressionAttributeValues={":prefix": f"{self._namespace}:"},
476
+ ProjectionExpression="cache_key",
477
+ ExclusiveStartKey=response["LastEvaluatedKey"],
478
+ )
479
+
480
+ if response.get("Items"):
481
+ async with table.batch_writer() as batch:
482
+ for item in response["Items"]:
483
+ await batch.delete_item(Key={"cache_key": item["cache_key"]})
484
+
485
+ except Exception:
486
+ pass
487
+
488
+ async def close(self) -> None:
489
+ """
490
+ Close DynamoDB connections and clean up resources.
491
+ """
492
+ if self._async_resource:
493
+ await self._async_resource.__aexit__(None, None, None)
494
+ self._async_resource = None
495
+ self._async_table = None