cachu 0.2.3__py3-none-any.whl → 0.2.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cachu/__init__.py +6 -9
- cachu/backends/__init__.py +45 -2
- cachu/backends/memory.py +102 -2
- cachu/backends/redis.py +132 -21
- cachu/backends/sqlite.py +257 -12
- cachu/config.py +0 -6
- cachu/decorator.py +353 -68
- cachu/keys.py +8 -0
- cachu/operations.py +172 -23
- {cachu-0.2.3.dist-info → cachu-0.2.4.dist-info}/METADATA +7 -9
- cachu-0.2.4.dist-info/RECORD +21 -0
- cachu-0.2.3.dist-info/RECORD +0 -21
- {cachu-0.2.3.dist-info → cachu-0.2.4.dist-info}/WHEEL +0 -0
- {cachu-0.2.3.dist-info → cachu-0.2.4.dist-info}/top_level.txt +0 -0
cachu/backends/sqlite.py
CHANGED
|
@@ -1,14 +1,30 @@
|
|
|
1
1
|
"""SQLite-based cache backend.
|
|
2
2
|
"""
|
|
3
|
-
import
|
|
3
|
+
import asyncio
|
|
4
4
|
import pickle
|
|
5
5
|
import sqlite3
|
|
6
6
|
import threading
|
|
7
7
|
import time
|
|
8
|
-
from collections.abc import Iterator
|
|
9
|
-
from typing import Any
|
|
8
|
+
from collections.abc import AsyncIterator, Iterator
|
|
9
|
+
from typing import TYPE_CHECKING, Any
|
|
10
10
|
|
|
11
|
-
from . import NO_VALUE, Backend
|
|
11
|
+
from . import NO_VALUE, AsyncBackend, Backend
|
|
12
|
+
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
import aiosqlite
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def _get_aiosqlite_module() -> Any:
|
|
18
|
+
"""Import aiosqlite module, raising helpful error if not installed.
|
|
19
|
+
"""
|
|
20
|
+
try:
|
|
21
|
+
import aiosqlite
|
|
22
|
+
return aiosqlite
|
|
23
|
+
except ImportError as e:
|
|
24
|
+
raise RuntimeError(
|
|
25
|
+
"Async SQLite support requires the 'aiosqlite' package. "
|
|
26
|
+
"Install with: pip install cachu[async]"
|
|
27
|
+
) from e
|
|
12
28
|
|
|
13
29
|
|
|
14
30
|
class SqliteBackend(Backend):
|
|
@@ -26,18 +42,18 @@ class SqliteBackend(Backend):
|
|
|
26
42
|
with self._lock:
|
|
27
43
|
conn = sqlite3.connect(self._filepath)
|
|
28
44
|
try:
|
|
29
|
-
conn.execute(
|
|
45
|
+
conn.execute("""
|
|
30
46
|
CREATE TABLE IF NOT EXISTS cache (
|
|
31
47
|
key TEXT PRIMARY KEY,
|
|
32
48
|
value BLOB NOT NULL,
|
|
33
49
|
created_at REAL NOT NULL,
|
|
34
50
|
expires_at REAL NOT NULL
|
|
35
51
|
)
|
|
36
|
-
|
|
37
|
-
conn.execute(
|
|
52
|
+
""")
|
|
53
|
+
conn.execute("""
|
|
38
54
|
CREATE INDEX IF NOT EXISTS idx_cache_expires
|
|
39
55
|
ON cache(expires_at)
|
|
40
|
-
|
|
56
|
+
""")
|
|
41
57
|
conn.commit()
|
|
42
58
|
finally:
|
|
43
59
|
conn.close()
|
|
@@ -111,8 +127,8 @@ class SqliteBackend(Backend):
|
|
|
111
127
|
conn = self._get_connection()
|
|
112
128
|
try:
|
|
113
129
|
conn.execute(
|
|
114
|
-
|
|
115
|
-
VALUES (?, ?, ?, ?)
|
|
130
|
+
"""INSERT OR REPLACE INTO cache (key, value, created_at, expires_at)
|
|
131
|
+
VALUES (?, ?, ?, ?)""",
|
|
116
132
|
(key, value_blob, now, now + ttl),
|
|
117
133
|
)
|
|
118
134
|
conn.commit()
|
|
@@ -183,8 +199,7 @@ class SqliteBackend(Backend):
|
|
|
183
199
|
finally:
|
|
184
200
|
conn.close()
|
|
185
201
|
|
|
186
|
-
|
|
187
|
-
yield key
|
|
202
|
+
yield from all_keys
|
|
188
203
|
|
|
189
204
|
def count(self, pattern: str | None = None) -> int:
|
|
190
205
|
"""Count keys matching pattern.
|
|
@@ -238,3 +253,233 @@ class SqliteBackend(Backend):
|
|
|
238
253
|
return count
|
|
239
254
|
finally:
|
|
240
255
|
conn.close()
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
class AsyncSqliteBackend(AsyncBackend):
|
|
259
|
+
"""Async SQLite file-based cache backend using aiosqlite.
|
|
260
|
+
"""
|
|
261
|
+
|
|
262
|
+
def __init__(self, filepath: str) -> None:
|
|
263
|
+
self._filepath = filepath
|
|
264
|
+
self._connection: aiosqlite.Connection | None = None
|
|
265
|
+
self._init_lock = asyncio.Lock()
|
|
266
|
+
self._write_lock = asyncio.Lock()
|
|
267
|
+
self._initialized = False
|
|
268
|
+
|
|
269
|
+
async def _ensure_initialized(self) -> 'aiosqlite.Connection':
|
|
270
|
+
"""Ensure database is initialized and return connection.
|
|
271
|
+
"""
|
|
272
|
+
async with self._init_lock:
|
|
273
|
+
if self._connection is None:
|
|
274
|
+
aiosqlite = _get_aiosqlite_module()
|
|
275
|
+
self._connection = await aiosqlite.connect(self._filepath)
|
|
276
|
+
await self._connection.execute('PRAGMA journal_mode=WAL')
|
|
277
|
+
await self._connection.execute('PRAGMA busy_timeout=5000')
|
|
278
|
+
|
|
279
|
+
if not self._initialized:
|
|
280
|
+
await self._connection.execute("""
|
|
281
|
+
CREATE TABLE IF NOT EXISTS cache (
|
|
282
|
+
key TEXT PRIMARY KEY,
|
|
283
|
+
value BLOB NOT NULL,
|
|
284
|
+
created_at REAL NOT NULL,
|
|
285
|
+
expires_at REAL NOT NULL
|
|
286
|
+
)
|
|
287
|
+
""")
|
|
288
|
+
await self._connection.execute("""
|
|
289
|
+
CREATE INDEX IF NOT EXISTS idx_cache_expires
|
|
290
|
+
ON cache(expires_at)
|
|
291
|
+
""")
|
|
292
|
+
await self._connection.commit()
|
|
293
|
+
self._initialized = True
|
|
294
|
+
|
|
295
|
+
return self._connection
|
|
296
|
+
|
|
297
|
+
def _schedule_delete(self, key: str) -> None:
|
|
298
|
+
"""Schedule a background deletion task (fire-and-forget).
|
|
299
|
+
"""
|
|
300
|
+
async def _delete() -> None:
|
|
301
|
+
try:
|
|
302
|
+
async with self._write_lock:
|
|
303
|
+
conn = await self._ensure_initialized()
|
|
304
|
+
await conn.execute('DELETE FROM cache WHERE key = ?', (key,))
|
|
305
|
+
await conn.commit()
|
|
306
|
+
except Exception:
|
|
307
|
+
pass
|
|
308
|
+
|
|
309
|
+
asyncio.create_task(_delete())
|
|
310
|
+
|
|
311
|
+
async def get(self, key: str) -> Any:
|
|
312
|
+
"""Get value by key. Returns NO_VALUE if not found or expired.
|
|
313
|
+
"""
|
|
314
|
+
try:
|
|
315
|
+
conn = await self._ensure_initialized()
|
|
316
|
+
cursor = await conn.execute(
|
|
317
|
+
'SELECT value, expires_at FROM cache WHERE key = ?',
|
|
318
|
+
(key,),
|
|
319
|
+
)
|
|
320
|
+
row = await cursor.fetchone()
|
|
321
|
+
|
|
322
|
+
if row is None:
|
|
323
|
+
return NO_VALUE
|
|
324
|
+
|
|
325
|
+
value_blob, expires_at = row
|
|
326
|
+
if time.time() > expires_at:
|
|
327
|
+
self._schedule_delete(key)
|
|
328
|
+
return NO_VALUE
|
|
329
|
+
|
|
330
|
+
return pickle.loads(value_blob)
|
|
331
|
+
except Exception:
|
|
332
|
+
return NO_VALUE
|
|
333
|
+
|
|
334
|
+
async def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
|
|
335
|
+
"""Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
|
|
336
|
+
"""
|
|
337
|
+
try:
|
|
338
|
+
conn = await self._ensure_initialized()
|
|
339
|
+
cursor = await conn.execute(
|
|
340
|
+
'SELECT value, created_at, expires_at FROM cache WHERE key = ?',
|
|
341
|
+
(key,),
|
|
342
|
+
)
|
|
343
|
+
row = await cursor.fetchone()
|
|
344
|
+
|
|
345
|
+
if row is None:
|
|
346
|
+
return NO_VALUE, None
|
|
347
|
+
|
|
348
|
+
value_blob, created_at, expires_at = row
|
|
349
|
+
if time.time() > expires_at:
|
|
350
|
+
self._schedule_delete(key)
|
|
351
|
+
return NO_VALUE, None
|
|
352
|
+
|
|
353
|
+
return pickle.loads(value_blob), created_at
|
|
354
|
+
except Exception:
|
|
355
|
+
return NO_VALUE, None
|
|
356
|
+
|
|
357
|
+
async def set(self, key: str, value: Any, ttl: int) -> None:
|
|
358
|
+
"""Set value with TTL in seconds.
|
|
359
|
+
"""
|
|
360
|
+
now = time.time()
|
|
361
|
+
value_blob = pickle.dumps(value)
|
|
362
|
+
|
|
363
|
+
async with self._write_lock:
|
|
364
|
+
conn = await self._ensure_initialized()
|
|
365
|
+
await conn.execute(
|
|
366
|
+
"""INSERT OR REPLACE INTO cache (key, value, created_at, expires_at)
|
|
367
|
+
VALUES (?, ?, ?, ?)""",
|
|
368
|
+
(key, value_blob, now, now + ttl),
|
|
369
|
+
)
|
|
370
|
+
await conn.commit()
|
|
371
|
+
|
|
372
|
+
async def delete(self, key: str) -> None:
|
|
373
|
+
"""Delete value by key.
|
|
374
|
+
"""
|
|
375
|
+
async with self._write_lock:
|
|
376
|
+
try:
|
|
377
|
+
conn = await self._ensure_initialized()
|
|
378
|
+
await conn.execute('DELETE FROM cache WHERE key = ?', (key,))
|
|
379
|
+
await conn.commit()
|
|
380
|
+
except Exception:
|
|
381
|
+
pass
|
|
382
|
+
|
|
383
|
+
async def clear(self, pattern: str | None = None) -> int:
|
|
384
|
+
"""Clear entries matching pattern. Returns count of cleared entries.
|
|
385
|
+
"""
|
|
386
|
+
async with self._write_lock:
|
|
387
|
+
try:
|
|
388
|
+
conn = await self._ensure_initialized()
|
|
389
|
+
if pattern is None:
|
|
390
|
+
cursor = await conn.execute('SELECT COUNT(*) FROM cache')
|
|
391
|
+
row = await cursor.fetchone()
|
|
392
|
+
count = row[0]
|
|
393
|
+
await conn.execute('DELETE FROM cache')
|
|
394
|
+
await conn.commit()
|
|
395
|
+
return count
|
|
396
|
+
|
|
397
|
+
glob_pattern = self._fnmatch_to_glob(pattern)
|
|
398
|
+
cursor = await conn.execute(
|
|
399
|
+
'SELECT COUNT(*) FROM cache WHERE key GLOB ?',
|
|
400
|
+
(glob_pattern,),
|
|
401
|
+
)
|
|
402
|
+
row = await cursor.fetchone()
|
|
403
|
+
count = row[0]
|
|
404
|
+
await conn.execute('DELETE FROM cache WHERE key GLOB ?', (glob_pattern,))
|
|
405
|
+
await conn.commit()
|
|
406
|
+
return count
|
|
407
|
+
except Exception:
|
|
408
|
+
return 0
|
|
409
|
+
|
|
410
|
+
async def keys(self, pattern: str | None = None) -> AsyncIterator[str]:
|
|
411
|
+
"""Iterate over keys matching pattern.
|
|
412
|
+
"""
|
|
413
|
+
now = time.time()
|
|
414
|
+
conn = await self._ensure_initialized()
|
|
415
|
+
|
|
416
|
+
if pattern is None:
|
|
417
|
+
cursor = await conn.execute(
|
|
418
|
+
'SELECT key FROM cache WHERE expires_at > ?',
|
|
419
|
+
(now,),
|
|
420
|
+
)
|
|
421
|
+
else:
|
|
422
|
+
glob_pattern = self._fnmatch_to_glob(pattern)
|
|
423
|
+
cursor = await conn.execute(
|
|
424
|
+
'SELECT key FROM cache WHERE key GLOB ? AND expires_at > ?',
|
|
425
|
+
(glob_pattern, now),
|
|
426
|
+
)
|
|
427
|
+
|
|
428
|
+
all_keys = [row[0] for row in await cursor.fetchall()]
|
|
429
|
+
|
|
430
|
+
for key in all_keys:
|
|
431
|
+
yield key
|
|
432
|
+
|
|
433
|
+
async def count(self, pattern: str | None = None) -> int:
|
|
434
|
+
"""Count keys matching pattern.
|
|
435
|
+
"""
|
|
436
|
+
now = time.time()
|
|
437
|
+
|
|
438
|
+
try:
|
|
439
|
+
conn = await self._ensure_initialized()
|
|
440
|
+
if pattern is None:
|
|
441
|
+
cursor = await conn.execute(
|
|
442
|
+
'SELECT COUNT(*) FROM cache WHERE expires_at > ?',
|
|
443
|
+
(now,),
|
|
444
|
+
)
|
|
445
|
+
else:
|
|
446
|
+
glob_pattern = self._fnmatch_to_glob(pattern)
|
|
447
|
+
cursor = await conn.execute(
|
|
448
|
+
'SELECT COUNT(*) FROM cache WHERE key GLOB ? AND expires_at > ?',
|
|
449
|
+
(glob_pattern, now),
|
|
450
|
+
)
|
|
451
|
+
|
|
452
|
+
row = await cursor.fetchone()
|
|
453
|
+
return row[0]
|
|
454
|
+
except Exception:
|
|
455
|
+
return 0
|
|
456
|
+
|
|
457
|
+
def _fnmatch_to_glob(self, pattern: str) -> str:
|
|
458
|
+
"""Convert fnmatch pattern to SQLite GLOB pattern.
|
|
459
|
+
"""
|
|
460
|
+
return pattern
|
|
461
|
+
|
|
462
|
+
async def cleanup_expired(self) -> int:
|
|
463
|
+
"""Remove expired entries. Returns count of removed entries.
|
|
464
|
+
"""
|
|
465
|
+
now = time.time()
|
|
466
|
+
|
|
467
|
+
async with self._write_lock:
|
|
468
|
+
conn = await self._ensure_initialized()
|
|
469
|
+
cursor = await conn.execute(
|
|
470
|
+
'SELECT COUNT(*) FROM cache WHERE expires_at <= ?',
|
|
471
|
+
(now,),
|
|
472
|
+
)
|
|
473
|
+
row = await cursor.fetchone()
|
|
474
|
+
count = row[0]
|
|
475
|
+
await conn.execute('DELETE FROM cache WHERE expires_at <= ?', (now,))
|
|
476
|
+
await conn.commit()
|
|
477
|
+
return count
|
|
478
|
+
|
|
479
|
+
async def close(self) -> None:
|
|
480
|
+
"""Close the database connection.
|
|
481
|
+
"""
|
|
482
|
+
if self._connection is not None:
|
|
483
|
+
await self._connection.close()
|
|
484
|
+
self._connection = None
|
|
485
|
+
self._initialized = False
|
cachu/config.py
CHANGED
|
@@ -58,7 +58,6 @@ class CacheConfig:
|
|
|
58
58
|
key_prefix: str = ''
|
|
59
59
|
file_dir: str = '/tmp'
|
|
60
60
|
redis_url: str = 'redis://localhost:6379/0'
|
|
61
|
-
redis_distributed: bool = False
|
|
62
61
|
|
|
63
62
|
|
|
64
63
|
class ConfigRegistry:
|
|
@@ -80,7 +79,6 @@ class ConfigRegistry:
|
|
|
80
79
|
key_prefix: str | None = None,
|
|
81
80
|
file_dir: str | None = None,
|
|
82
81
|
redis_url: str | None = None,
|
|
83
|
-
redis_distributed: bool | None = None,
|
|
84
82
|
) -> CacheConfig:
|
|
85
83
|
"""Configure cache for a specific package.
|
|
86
84
|
"""
|
|
@@ -92,7 +90,6 @@ class ConfigRegistry:
|
|
|
92
90
|
'key_prefix': key_prefix,
|
|
93
91
|
'file_dir': str(file_dir) if file_dir else None,
|
|
94
92
|
'redis_url': redis_url,
|
|
95
|
-
'redis_distributed': redis_distributed,
|
|
96
93
|
}
|
|
97
94
|
updates = {k: v for k, v in updates.items() if v is not None}
|
|
98
95
|
|
|
@@ -155,7 +152,6 @@ def configure(
|
|
|
155
152
|
key_prefix: str | None = None,
|
|
156
153
|
file_dir: str | None = None,
|
|
157
154
|
redis_url: str | None = None,
|
|
158
|
-
redis_distributed: bool | None = None,
|
|
159
155
|
) -> CacheConfig:
|
|
160
156
|
"""Configure cache settings for the caller's package.
|
|
161
157
|
|
|
@@ -167,14 +163,12 @@ def configure(
|
|
|
167
163
|
key_prefix: Prefix for all cache keys (for versioning/debugging)
|
|
168
164
|
file_dir: Directory for file-based caches
|
|
169
165
|
redis_url: Redis connection URL (e.g., 'redis://localhost:6379/0')
|
|
170
|
-
redis_distributed: Use distributed locks for Redis
|
|
171
166
|
"""
|
|
172
167
|
return _registry.configure(
|
|
173
168
|
backend=backend,
|
|
174
169
|
key_prefix=key_prefix,
|
|
175
170
|
file_dir=str(file_dir) if file_dir else None,
|
|
176
171
|
redis_url=redis_url,
|
|
177
|
-
redis_distributed=redis_distributed,
|
|
178
172
|
)
|
|
179
173
|
|
|
180
174
|
|