cachu 0.2.3__py3-none-any.whl → 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cachu/__init__.py +8 -12
- cachu/backends/__init__.py +65 -5
- cachu/backends/memory.py +159 -57
- cachu/backends/redis.py +160 -28
- cachu/backends/sqlite.py +326 -41
- cachu/config.py +6 -6
- cachu/decorator.py +354 -87
- cachu/keys.py +8 -0
- cachu/mutex.py +247 -0
- cachu/operations.py +171 -23
- {cachu-0.2.3.dist-info → cachu-0.2.5.dist-info}/METADATA +7 -10
- cachu-0.2.5.dist-info/RECORD +15 -0
- cachu/async_decorator.py +0 -262
- cachu/async_operations.py +0 -178
- cachu/backends/async_base.py +0 -50
- cachu/backends/async_memory.py +0 -111
- cachu/backends/async_redis.py +0 -141
- cachu/backends/async_sqlite.py +0 -256
- cachu/backends/file.py +0 -10
- cachu-0.2.3.dist-info/RECORD +0 -21
- {cachu-0.2.3.dist-info → cachu-0.2.5.dist-info}/WHEEL +0 -0
- {cachu-0.2.3.dist-info → cachu-0.2.5.dist-info}/top_level.txt +0 -0
cachu/backends/sqlite.py
CHANGED
|
@@ -1,57 +1,141 @@
|
|
|
1
1
|
"""SQLite-based cache backend.
|
|
2
2
|
"""
|
|
3
|
-
import
|
|
3
|
+
import asyncio
|
|
4
4
|
import pickle
|
|
5
5
|
import sqlite3
|
|
6
6
|
import threading
|
|
7
7
|
import time
|
|
8
|
-
from collections.abc import Iterator
|
|
9
|
-
from typing import Any
|
|
8
|
+
from collections.abc import AsyncIterator, Iterator
|
|
9
|
+
from typing import TYPE_CHECKING, Any
|
|
10
10
|
|
|
11
|
+
from ..mutex import AsyncCacheMutex, AsyncioMutex, CacheMutex, ThreadingMutex
|
|
11
12
|
from . import NO_VALUE, Backend
|
|
12
13
|
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
import aiosqlite
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def _get_aiosqlite_module() -> Any:
|
|
19
|
+
"""Import aiosqlite module, raising helpful error if not installed.
|
|
20
|
+
"""
|
|
21
|
+
try:
|
|
22
|
+
import aiosqlite
|
|
23
|
+
return aiosqlite
|
|
24
|
+
except ImportError as e:
|
|
25
|
+
raise RuntimeError(
|
|
26
|
+
"Async SQLite support requires the 'aiosqlite' package. "
|
|
27
|
+
"Install with: pip install cachu[async]"
|
|
28
|
+
) from e
|
|
29
|
+
|
|
13
30
|
|
|
14
31
|
class SqliteBackend(Backend):
|
|
15
|
-
"""SQLite file-based cache backend.
|
|
32
|
+
"""Unified SQLite file-based cache backend with both sync and async interfaces.
|
|
16
33
|
"""
|
|
17
34
|
|
|
18
35
|
def __init__(self, filepath: str) -> None:
|
|
19
36
|
self._filepath = filepath
|
|
20
|
-
self.
|
|
21
|
-
self.
|
|
37
|
+
self._sync_lock = threading.RLock()
|
|
38
|
+
self._async_lock: asyncio.Lock | None = None
|
|
39
|
+
self._async_write_lock: asyncio.Lock | None = None
|
|
40
|
+
self._async_connection: aiosqlite.Connection | None = None
|
|
41
|
+
self._async_initialized = False
|
|
42
|
+
self._init_sync_db()
|
|
43
|
+
|
|
44
|
+
def _get_async_lock(self) -> asyncio.Lock:
|
|
45
|
+
"""Lazy-create async init lock (must be called from async context).
|
|
46
|
+
"""
|
|
47
|
+
if self._async_lock is None:
|
|
48
|
+
self._async_lock = asyncio.Lock()
|
|
49
|
+
return self._async_lock
|
|
50
|
+
|
|
51
|
+
def _get_async_write_lock(self) -> asyncio.Lock:
|
|
52
|
+
"""Lazy-create async write lock (must be called from async context).
|
|
53
|
+
"""
|
|
54
|
+
if self._async_write_lock is None:
|
|
55
|
+
self._async_write_lock = asyncio.Lock()
|
|
56
|
+
return self._async_write_lock
|
|
22
57
|
|
|
23
|
-
def
|
|
24
|
-
"""Initialize database schema.
|
|
58
|
+
def _init_sync_db(self) -> None:
|
|
59
|
+
"""Initialize sync database schema.
|
|
25
60
|
"""
|
|
26
|
-
with self.
|
|
61
|
+
with self._sync_lock:
|
|
27
62
|
conn = sqlite3.connect(self._filepath)
|
|
28
63
|
try:
|
|
29
|
-
conn.execute(
|
|
64
|
+
conn.execute("""
|
|
30
65
|
CREATE TABLE IF NOT EXISTS cache (
|
|
31
66
|
key TEXT PRIMARY KEY,
|
|
32
67
|
value BLOB NOT NULL,
|
|
33
68
|
created_at REAL NOT NULL,
|
|
34
69
|
expires_at REAL NOT NULL
|
|
35
70
|
)
|
|
36
|
-
|
|
37
|
-
conn.execute(
|
|
71
|
+
""")
|
|
72
|
+
conn.execute("""
|
|
38
73
|
CREATE INDEX IF NOT EXISTS idx_cache_expires
|
|
39
74
|
ON cache(expires_at)
|
|
40
|
-
|
|
75
|
+
""")
|
|
41
76
|
conn.commit()
|
|
42
77
|
finally:
|
|
43
78
|
conn.close()
|
|
44
79
|
|
|
45
|
-
def
|
|
46
|
-
"""
|
|
80
|
+
async def _ensure_async_initialized(self) -> 'aiosqlite.Connection':
|
|
81
|
+
"""Ensure async database is initialized and return connection.
|
|
82
|
+
"""
|
|
83
|
+
async with self._get_async_lock():
|
|
84
|
+
if self._async_connection is None:
|
|
85
|
+
aiosqlite = _get_aiosqlite_module()
|
|
86
|
+
self._async_connection = await aiosqlite.connect(self._filepath)
|
|
87
|
+
await self._async_connection.execute('PRAGMA journal_mode=WAL')
|
|
88
|
+
await self._async_connection.execute('PRAGMA busy_timeout=5000')
|
|
89
|
+
|
|
90
|
+
if not self._async_initialized:
|
|
91
|
+
await self._async_connection.execute("""
|
|
92
|
+
CREATE TABLE IF NOT EXISTS cache (
|
|
93
|
+
key TEXT PRIMARY KEY,
|
|
94
|
+
value BLOB NOT NULL,
|
|
95
|
+
created_at REAL NOT NULL,
|
|
96
|
+
expires_at REAL NOT NULL
|
|
97
|
+
)
|
|
98
|
+
""")
|
|
99
|
+
await self._async_connection.execute("""
|
|
100
|
+
CREATE INDEX IF NOT EXISTS idx_cache_expires
|
|
101
|
+
ON cache(expires_at)
|
|
102
|
+
""")
|
|
103
|
+
await self._async_connection.commit()
|
|
104
|
+
self._async_initialized = True
|
|
105
|
+
|
|
106
|
+
return self._async_connection
|
|
107
|
+
|
|
108
|
+
def _get_sync_connection(self) -> sqlite3.Connection:
|
|
109
|
+
"""Get a sync database connection.
|
|
47
110
|
"""
|
|
48
111
|
return sqlite3.connect(self._filepath)
|
|
49
112
|
|
|
113
|
+
def _fnmatch_to_glob(self, pattern: str) -> str:
|
|
114
|
+
"""Convert fnmatch pattern to SQLite GLOB pattern.
|
|
115
|
+
"""
|
|
116
|
+
return pattern
|
|
117
|
+
|
|
118
|
+
def _schedule_async_delete(self, key: str) -> None:
|
|
119
|
+
"""Schedule a background deletion task (fire-and-forget).
|
|
120
|
+
"""
|
|
121
|
+
async def _delete() -> None:
|
|
122
|
+
try:
|
|
123
|
+
async with self._get_async_write_lock():
|
|
124
|
+
conn = await self._ensure_async_initialized()
|
|
125
|
+
await conn.execute('DELETE FROM cache WHERE key = ?', (key,))
|
|
126
|
+
await conn.commit()
|
|
127
|
+
except Exception:
|
|
128
|
+
pass
|
|
129
|
+
|
|
130
|
+
asyncio.create_task(_delete())
|
|
131
|
+
|
|
132
|
+
# ===== Sync interface =====
|
|
133
|
+
|
|
50
134
|
def get(self, key: str) -> Any:
|
|
51
135
|
"""Get value by key. Returns NO_VALUE if not found or expired.
|
|
52
136
|
"""
|
|
53
|
-
with self.
|
|
54
|
-
conn = self.
|
|
137
|
+
with self._sync_lock:
|
|
138
|
+
conn = self._get_sync_connection()
|
|
55
139
|
try:
|
|
56
140
|
cursor = conn.execute(
|
|
57
141
|
'SELECT value, expires_at FROM cache WHERE key = ?',
|
|
@@ -77,8 +161,8 @@ class SqliteBackend(Backend):
|
|
|
77
161
|
def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
|
|
78
162
|
"""Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
|
|
79
163
|
"""
|
|
80
|
-
with self.
|
|
81
|
-
conn = self.
|
|
164
|
+
with self._sync_lock:
|
|
165
|
+
conn = self._get_sync_connection()
|
|
82
166
|
try:
|
|
83
167
|
cursor = conn.execute(
|
|
84
168
|
'SELECT value, created_at, expires_at FROM cache WHERE key = ?',
|
|
@@ -107,12 +191,12 @@ class SqliteBackend(Backend):
|
|
|
107
191
|
now = time.time()
|
|
108
192
|
value_blob = pickle.dumps(value)
|
|
109
193
|
|
|
110
|
-
with self.
|
|
111
|
-
conn = self.
|
|
194
|
+
with self._sync_lock:
|
|
195
|
+
conn = self._get_sync_connection()
|
|
112
196
|
try:
|
|
113
197
|
conn.execute(
|
|
114
|
-
|
|
115
|
-
VALUES (?, ?, ?, ?)
|
|
198
|
+
"""INSERT OR REPLACE INTO cache (key, value, created_at, expires_at)
|
|
199
|
+
VALUES (?, ?, ?, ?)""",
|
|
116
200
|
(key, value_blob, now, now + ttl),
|
|
117
201
|
)
|
|
118
202
|
conn.commit()
|
|
@@ -122,8 +206,8 @@ class SqliteBackend(Backend):
|
|
|
122
206
|
def delete(self, key: str) -> None:
|
|
123
207
|
"""Delete value by key.
|
|
124
208
|
"""
|
|
125
|
-
with self.
|
|
126
|
-
conn = self.
|
|
209
|
+
with self._sync_lock:
|
|
210
|
+
conn = self._get_sync_connection()
|
|
127
211
|
try:
|
|
128
212
|
conn.execute('DELETE FROM cache WHERE key = ?', (key,))
|
|
129
213
|
conn.commit()
|
|
@@ -135,8 +219,8 @@ class SqliteBackend(Backend):
|
|
|
135
219
|
def clear(self, pattern: str | None = None) -> int:
|
|
136
220
|
"""Clear entries matching pattern. Returns count of cleared entries.
|
|
137
221
|
"""
|
|
138
|
-
with self.
|
|
139
|
-
conn = self.
|
|
222
|
+
with self._sync_lock:
|
|
223
|
+
conn = self._get_sync_connection()
|
|
140
224
|
try:
|
|
141
225
|
if pattern is None:
|
|
142
226
|
cursor = conn.execute('SELECT COUNT(*) FROM cache')
|
|
@@ -164,8 +248,8 @@ class SqliteBackend(Backend):
|
|
|
164
248
|
"""
|
|
165
249
|
now = time.time()
|
|
166
250
|
|
|
167
|
-
with self.
|
|
168
|
-
conn = self.
|
|
251
|
+
with self._sync_lock:
|
|
252
|
+
conn = self._get_sync_connection()
|
|
169
253
|
try:
|
|
170
254
|
if pattern is None:
|
|
171
255
|
cursor = conn.execute(
|
|
@@ -183,16 +267,15 @@ class SqliteBackend(Backend):
|
|
|
183
267
|
finally:
|
|
184
268
|
conn.close()
|
|
185
269
|
|
|
186
|
-
|
|
187
|
-
yield key
|
|
270
|
+
yield from all_keys
|
|
188
271
|
|
|
189
272
|
def count(self, pattern: str | None = None) -> int:
|
|
190
273
|
"""Count keys matching pattern.
|
|
191
274
|
"""
|
|
192
275
|
now = time.time()
|
|
193
276
|
|
|
194
|
-
with self.
|
|
195
|
-
conn = self.
|
|
277
|
+
with self._sync_lock:
|
|
278
|
+
conn = self._get_sync_connection()
|
|
196
279
|
try:
|
|
197
280
|
if pattern is None:
|
|
198
281
|
cursor = conn.execute(
|
|
@@ -212,21 +295,18 @@ class SqliteBackend(Backend):
|
|
|
212
295
|
finally:
|
|
213
296
|
conn.close()
|
|
214
297
|
|
|
215
|
-
def
|
|
216
|
-
"""
|
|
217
|
-
|
|
218
|
-
fnmatch uses * and ? which are the same as SQLite GLOB.
|
|
219
|
-
The main difference is character classes [...] which we don't use.
|
|
298
|
+
def get_mutex(self, key: str) -> CacheMutex:
|
|
299
|
+
"""Get a mutex for dogpile prevention on the given key.
|
|
220
300
|
"""
|
|
221
|
-
return
|
|
301
|
+
return ThreadingMutex(f'sqlite:{self._filepath}:{key}')
|
|
222
302
|
|
|
223
303
|
def cleanup_expired(self) -> int:
|
|
224
304
|
"""Remove expired entries. Returns count of removed entries.
|
|
225
305
|
"""
|
|
226
306
|
now = time.time()
|
|
227
307
|
|
|
228
|
-
with self.
|
|
229
|
-
conn = self.
|
|
308
|
+
with self._sync_lock:
|
|
309
|
+
conn = self._get_sync_connection()
|
|
230
310
|
try:
|
|
231
311
|
cursor = conn.execute(
|
|
232
312
|
'SELECT COUNT(*) FROM cache WHERE expires_at <= ?',
|
|
@@ -238,3 +318,208 @@ class SqliteBackend(Backend):
|
|
|
238
318
|
return count
|
|
239
319
|
finally:
|
|
240
320
|
conn.close()
|
|
321
|
+
|
|
322
|
+
# ===== Async interface =====
|
|
323
|
+
|
|
324
|
+
async def aget(self, key: str) -> Any:
|
|
325
|
+
"""Async get value by key. Returns NO_VALUE if not found or expired.
|
|
326
|
+
"""
|
|
327
|
+
try:
|
|
328
|
+
conn = await self._ensure_async_initialized()
|
|
329
|
+
cursor = await conn.execute(
|
|
330
|
+
'SELECT value, expires_at FROM cache WHERE key = ?',
|
|
331
|
+
(key,),
|
|
332
|
+
)
|
|
333
|
+
row = await cursor.fetchone()
|
|
334
|
+
|
|
335
|
+
if row is None:
|
|
336
|
+
return NO_VALUE
|
|
337
|
+
|
|
338
|
+
value_blob, expires_at = row
|
|
339
|
+
if time.time() > expires_at:
|
|
340
|
+
self._schedule_async_delete(key)
|
|
341
|
+
return NO_VALUE
|
|
342
|
+
|
|
343
|
+
return pickle.loads(value_blob)
|
|
344
|
+
except Exception:
|
|
345
|
+
return NO_VALUE
|
|
346
|
+
|
|
347
|
+
async def aget_with_metadata(self, key: str) -> tuple[Any, float | None]:
|
|
348
|
+
"""Async get value and creation timestamp. Returns (NO_VALUE, None) if not found.
|
|
349
|
+
"""
|
|
350
|
+
try:
|
|
351
|
+
conn = await self._ensure_async_initialized()
|
|
352
|
+
cursor = await conn.execute(
|
|
353
|
+
'SELECT value, created_at, expires_at FROM cache WHERE key = ?',
|
|
354
|
+
(key,),
|
|
355
|
+
)
|
|
356
|
+
row = await cursor.fetchone()
|
|
357
|
+
|
|
358
|
+
if row is None:
|
|
359
|
+
return NO_VALUE, None
|
|
360
|
+
|
|
361
|
+
value_blob, created_at, expires_at = row
|
|
362
|
+
if time.time() > expires_at:
|
|
363
|
+
self._schedule_async_delete(key)
|
|
364
|
+
return NO_VALUE, None
|
|
365
|
+
|
|
366
|
+
return pickle.loads(value_blob), created_at
|
|
367
|
+
except Exception:
|
|
368
|
+
return NO_VALUE, None
|
|
369
|
+
|
|
370
|
+
async def aset(self, key: str, value: Any, ttl: int) -> None:
|
|
371
|
+
"""Async set value with TTL in seconds.
|
|
372
|
+
"""
|
|
373
|
+
now = time.time()
|
|
374
|
+
value_blob = pickle.dumps(value)
|
|
375
|
+
|
|
376
|
+
async with self._get_async_write_lock():
|
|
377
|
+
conn = await self._ensure_async_initialized()
|
|
378
|
+
await conn.execute(
|
|
379
|
+
"""INSERT OR REPLACE INTO cache (key, value, created_at, expires_at)
|
|
380
|
+
VALUES (?, ?, ?, ?)""",
|
|
381
|
+
(key, value_blob, now, now + ttl),
|
|
382
|
+
)
|
|
383
|
+
await conn.commit()
|
|
384
|
+
|
|
385
|
+
async def adelete(self, key: str) -> None:
|
|
386
|
+
"""Async delete value by key.
|
|
387
|
+
"""
|
|
388
|
+
async with self._get_async_write_lock():
|
|
389
|
+
try:
|
|
390
|
+
conn = await self._ensure_async_initialized()
|
|
391
|
+
await conn.execute('DELETE FROM cache WHERE key = ?', (key,))
|
|
392
|
+
await conn.commit()
|
|
393
|
+
except Exception:
|
|
394
|
+
pass
|
|
395
|
+
|
|
396
|
+
async def aclear(self, pattern: str | None = None) -> int:
|
|
397
|
+
"""Async clear entries matching pattern. Returns count of cleared entries.
|
|
398
|
+
"""
|
|
399
|
+
async with self._get_async_write_lock():
|
|
400
|
+
try:
|
|
401
|
+
conn = await self._ensure_async_initialized()
|
|
402
|
+
if pattern is None:
|
|
403
|
+
cursor = await conn.execute('SELECT COUNT(*) FROM cache')
|
|
404
|
+
row = await cursor.fetchone()
|
|
405
|
+
count = row[0]
|
|
406
|
+
await conn.execute('DELETE FROM cache')
|
|
407
|
+
await conn.commit()
|
|
408
|
+
return count
|
|
409
|
+
|
|
410
|
+
glob_pattern = self._fnmatch_to_glob(pattern)
|
|
411
|
+
cursor = await conn.execute(
|
|
412
|
+
'SELECT COUNT(*) FROM cache WHERE key GLOB ?',
|
|
413
|
+
(glob_pattern,),
|
|
414
|
+
)
|
|
415
|
+
row = await cursor.fetchone()
|
|
416
|
+
count = row[0]
|
|
417
|
+
await conn.execute('DELETE FROM cache WHERE key GLOB ?', (glob_pattern,))
|
|
418
|
+
await conn.commit()
|
|
419
|
+
return count
|
|
420
|
+
except Exception:
|
|
421
|
+
return 0
|
|
422
|
+
|
|
423
|
+
async def akeys(self, pattern: str | None = None) -> AsyncIterator[str]:
|
|
424
|
+
"""Async iterate over keys matching pattern.
|
|
425
|
+
"""
|
|
426
|
+
now = time.time()
|
|
427
|
+
conn = await self._ensure_async_initialized()
|
|
428
|
+
|
|
429
|
+
if pattern is None:
|
|
430
|
+
cursor = await conn.execute(
|
|
431
|
+
'SELECT key FROM cache WHERE expires_at > ?',
|
|
432
|
+
(now,),
|
|
433
|
+
)
|
|
434
|
+
else:
|
|
435
|
+
glob_pattern = self._fnmatch_to_glob(pattern)
|
|
436
|
+
cursor = await conn.execute(
|
|
437
|
+
'SELECT key FROM cache WHERE key GLOB ? AND expires_at > ?',
|
|
438
|
+
(glob_pattern, now),
|
|
439
|
+
)
|
|
440
|
+
|
|
441
|
+
all_keys = [row[0] for row in await cursor.fetchall()]
|
|
442
|
+
|
|
443
|
+
for key in all_keys:
|
|
444
|
+
yield key
|
|
445
|
+
|
|
446
|
+
async def acount(self, pattern: str | None = None) -> int:
|
|
447
|
+
"""Async count keys matching pattern.
|
|
448
|
+
"""
|
|
449
|
+
now = time.time()
|
|
450
|
+
|
|
451
|
+
try:
|
|
452
|
+
conn = await self._ensure_async_initialized()
|
|
453
|
+
if pattern is None:
|
|
454
|
+
cursor = await conn.execute(
|
|
455
|
+
'SELECT COUNT(*) FROM cache WHERE expires_at > ?',
|
|
456
|
+
(now,),
|
|
457
|
+
)
|
|
458
|
+
else:
|
|
459
|
+
glob_pattern = self._fnmatch_to_glob(pattern)
|
|
460
|
+
cursor = await conn.execute(
|
|
461
|
+
'SELECT COUNT(*) FROM cache WHERE key GLOB ? AND expires_at > ?',
|
|
462
|
+
(glob_pattern, now),
|
|
463
|
+
)
|
|
464
|
+
|
|
465
|
+
row = await cursor.fetchone()
|
|
466
|
+
return row[0]
|
|
467
|
+
except Exception:
|
|
468
|
+
return 0
|
|
469
|
+
|
|
470
|
+
def get_async_mutex(self, key: str) -> AsyncCacheMutex:
|
|
471
|
+
"""Get an async mutex for dogpile prevention on the given key.
|
|
472
|
+
"""
|
|
473
|
+
return AsyncioMutex(f'sqlite:{self._filepath}:{key}')
|
|
474
|
+
|
|
475
|
+
async def acleanup_expired(self) -> int:
|
|
476
|
+
"""Async remove expired entries. Returns count of removed entries.
|
|
477
|
+
"""
|
|
478
|
+
now = time.time()
|
|
479
|
+
|
|
480
|
+
async with self._get_async_write_lock():
|
|
481
|
+
conn = await self._ensure_async_initialized()
|
|
482
|
+
cursor = await conn.execute(
|
|
483
|
+
'SELECT COUNT(*) FROM cache WHERE expires_at <= ?',
|
|
484
|
+
(now,),
|
|
485
|
+
)
|
|
486
|
+
row = await cursor.fetchone()
|
|
487
|
+
count = row[0]
|
|
488
|
+
await conn.execute('DELETE FROM cache WHERE expires_at <= ?', (now,))
|
|
489
|
+
await conn.commit()
|
|
490
|
+
return count
|
|
491
|
+
|
|
492
|
+
# ===== Lifecycle =====
|
|
493
|
+
|
|
494
|
+
def _close_async_connection_sync(self) -> None:
|
|
495
|
+
"""Forcefully close async connection from sync context.
|
|
496
|
+
|
|
497
|
+
This accesses aiosqlite internals as there's no public sync close API.
|
|
498
|
+
"""
|
|
499
|
+
if self._async_connection is None:
|
|
500
|
+
return
|
|
501
|
+
|
|
502
|
+
conn = self._async_connection
|
|
503
|
+
self._async_connection = None
|
|
504
|
+
self._async_initialized = False
|
|
505
|
+
|
|
506
|
+
try:
|
|
507
|
+
conn._running = False
|
|
508
|
+
if hasattr(conn, '_connection') and conn._connection:
|
|
509
|
+
conn._connection.close()
|
|
510
|
+
except Exception:
|
|
511
|
+
pass
|
|
512
|
+
|
|
513
|
+
def close(self) -> None:
|
|
514
|
+
"""Close all backend resources from sync context.
|
|
515
|
+
"""
|
|
516
|
+
self._close_async_connection_sync()
|
|
517
|
+
|
|
518
|
+
async def aclose(self) -> None:
|
|
519
|
+
"""Close all backend resources from async context.
|
|
520
|
+
"""
|
|
521
|
+
if self._async_connection is not None:
|
|
522
|
+
conn = self._async_connection
|
|
523
|
+
self._async_connection = None
|
|
524
|
+
self._async_initialized = False
|
|
525
|
+
await conn.close()
|
cachu/config.py
CHANGED
|
@@ -58,7 +58,7 @@ class CacheConfig:
|
|
|
58
58
|
key_prefix: str = ''
|
|
59
59
|
file_dir: str = '/tmp'
|
|
60
60
|
redis_url: str = 'redis://localhost:6379/0'
|
|
61
|
-
|
|
61
|
+
lock_timeout: float = 10.0
|
|
62
62
|
|
|
63
63
|
|
|
64
64
|
class ConfigRegistry:
|
|
@@ -80,7 +80,7 @@ class ConfigRegistry:
|
|
|
80
80
|
key_prefix: str | None = None,
|
|
81
81
|
file_dir: str | None = None,
|
|
82
82
|
redis_url: str | None = None,
|
|
83
|
-
|
|
83
|
+
lock_timeout: float | None = None,
|
|
84
84
|
) -> CacheConfig:
|
|
85
85
|
"""Configure cache for a specific package.
|
|
86
86
|
"""
|
|
@@ -92,7 +92,7 @@ class ConfigRegistry:
|
|
|
92
92
|
'key_prefix': key_prefix,
|
|
93
93
|
'file_dir': str(file_dir) if file_dir else None,
|
|
94
94
|
'redis_url': redis_url,
|
|
95
|
-
'
|
|
95
|
+
'lock_timeout': lock_timeout,
|
|
96
96
|
}
|
|
97
97
|
updates = {k: v for k, v in updates.items() if v is not None}
|
|
98
98
|
|
|
@@ -155,7 +155,7 @@ def configure(
|
|
|
155
155
|
key_prefix: str | None = None,
|
|
156
156
|
file_dir: str | None = None,
|
|
157
157
|
redis_url: str | None = None,
|
|
158
|
-
|
|
158
|
+
lock_timeout: float | None = None,
|
|
159
159
|
) -> CacheConfig:
|
|
160
160
|
"""Configure cache settings for the caller's package.
|
|
161
161
|
|
|
@@ -167,14 +167,14 @@ def configure(
|
|
|
167
167
|
key_prefix: Prefix for all cache keys (for versioning/debugging)
|
|
168
168
|
file_dir: Directory for file-based caches
|
|
169
169
|
redis_url: Redis connection URL (e.g., 'redis://localhost:6379/0')
|
|
170
|
-
|
|
170
|
+
lock_timeout: Timeout for distributed locks in seconds (default: 10.0)
|
|
171
171
|
"""
|
|
172
172
|
return _registry.configure(
|
|
173
173
|
backend=backend,
|
|
174
174
|
key_prefix=key_prefix,
|
|
175
175
|
file_dir=str(file_dir) if file_dir else None,
|
|
176
176
|
redis_url=redis_url,
|
|
177
|
-
|
|
177
|
+
lock_timeout=lock_timeout,
|
|
178
178
|
)
|
|
179
179
|
|
|
180
180
|
|