cachu 0.1.3__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,141 @@
1
+ """Async Redis cache backend implementation using redis.asyncio.
2
+ """
3
+ import pickle
4
+ import struct
5
+ import time
6
+ from collections.abc import AsyncIterator
7
+ from typing import TYPE_CHECKING, Any
8
+
9
+ from . import NO_VALUE
10
+ from .async_base import AsyncBackend
11
+
12
+ if TYPE_CHECKING:
13
+ import redis.asyncio as aioredis
14
+
15
+
16
+ _METADATA_FORMAT = 'd'
17
+ _METADATA_SIZE = struct.calcsize(_METADATA_FORMAT)
18
+
19
+
20
+ def _get_async_redis_module() -> Any:
21
+ """Import redis.asyncio module, raising helpful error if not installed.
22
+ """
23
+ try:
24
+ import redis.asyncio as aioredis
25
+ return aioredis
26
+ except ImportError as e:
27
+ raise RuntimeError(
28
+ "Async Redis support requires the 'redis' package (>=4.2.0). "
29
+ "Install with: pip install cachu[redis]"
30
+ ) from e
31
+
32
+
33
+ async def get_async_redis_client(url: str) -> 'aioredis.Redis':
34
+ """Create an async Redis client from URL.
35
+
36
+ Args:
37
+ url: Redis URL (e.g., 'redis://localhost:6379/0')
38
+ """
39
+ aioredis = _get_async_redis_module()
40
+ return aioredis.from_url(url)
41
+
42
+
43
+ class AsyncRedisBackend(AsyncBackend):
44
+ """Async Redis cache backend using redis.asyncio.
45
+ """
46
+
47
+ def __init__(self, url: str, distributed_lock: bool = False) -> None:
48
+ self._url = url
49
+ self._distributed_lock = distributed_lock
50
+ self._client: aioredis.Redis | None = None
51
+
52
+ async def _get_client(self) -> 'aioredis.Redis':
53
+ """Lazy-load async Redis client.
54
+ """
55
+ if self._client is None:
56
+ self._client = await get_async_redis_client(self._url)
57
+ return self._client
58
+
59
+ def _pack_value(self, value: Any, created_at: float) -> bytes:
60
+ """Pack value with creation timestamp.
61
+ """
62
+ metadata = struct.pack(_METADATA_FORMAT, created_at)
63
+ pickled = pickle.dumps(value)
64
+ return metadata + pickled
65
+
66
+ def _unpack_value(self, data: bytes) -> tuple[Any, float]:
67
+ """Unpack value and creation timestamp.
68
+ """
69
+ created_at = struct.unpack(_METADATA_FORMAT, data[:_METADATA_SIZE])[0]
70
+ value = pickle.loads(data[_METADATA_SIZE:])
71
+ return value, created_at
72
+
73
+ async def get(self, key: str) -> Any:
74
+ """Get value by key. Returns NO_VALUE if not found.
75
+ """
76
+ client = await self._get_client()
77
+ data = await client.get(key)
78
+ if data is None:
79
+ return NO_VALUE
80
+ value, _ = self._unpack_value(data)
81
+ return value
82
+
83
+ async def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
84
+ """Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
85
+ """
86
+ client = await self._get_client()
87
+ data = await client.get(key)
88
+ if data is None:
89
+ return NO_VALUE, None
90
+ value, created_at = self._unpack_value(data)
91
+ return value, created_at
92
+
93
+ async def set(self, key: str, value: Any, ttl: int) -> None:
94
+ """Set value with TTL in seconds.
95
+ """
96
+ client = await self._get_client()
97
+ now = time.time()
98
+ packed = self._pack_value(value, now)
99
+ await client.setex(key, ttl, packed)
100
+
101
+ async def delete(self, key: str) -> None:
102
+ """Delete value by key.
103
+ """
104
+ client = await self._get_client()
105
+ await client.delete(key)
106
+
107
+ async def clear(self, pattern: str | None = None) -> int:
108
+ """Clear entries matching pattern. Returns count of cleared entries.
109
+ """
110
+ client = await self._get_client()
111
+ if pattern is None:
112
+ pattern = '*'
113
+
114
+ count = 0
115
+ async for key in client.scan_iter(match=pattern):
116
+ await client.delete(key)
117
+ count += 1
118
+ return count
119
+
120
+ async def keys(self, pattern: str | None = None) -> AsyncIterator[str]:
121
+ """Iterate over keys matching pattern.
122
+ """
123
+ client = await self._get_client()
124
+ redis_pattern = pattern or '*'
125
+ async for key in client.scan_iter(match=redis_pattern):
126
+ yield key.decode() if isinstance(key, bytes) else key
127
+
128
+ async def count(self, pattern: str | None = None) -> int:
129
+ """Count keys matching pattern.
130
+ """
131
+ count = 0
132
+ async for _ in self.keys(pattern):
133
+ count += 1
134
+ return count
135
+
136
+ async def close(self) -> None:
137
+ """Close the Redis connection.
138
+ """
139
+ if self._client is not None:
140
+ await self._client.close()
141
+ self._client = None
@@ -0,0 +1,244 @@
1
+ """Async SQLite-based cache backend using aiosqlite.
2
+ """
3
+ import asyncio
4
+ import pickle
5
+ import time
6
+ from collections.abc import AsyncIterator
7
+ from typing import TYPE_CHECKING, Any
8
+
9
+ from . import NO_VALUE
10
+ from .async_base import AsyncBackend
11
+
12
+ if TYPE_CHECKING:
13
+ import aiosqlite
14
+
15
+
16
+ def _get_aiosqlite_module() -> Any:
17
+ """Import aiosqlite module, raising helpful error if not installed.
18
+ """
19
+ try:
20
+ import aiosqlite
21
+ return aiosqlite
22
+ except ImportError as e:
23
+ raise RuntimeError(
24
+ "Async SQLite support requires the 'aiosqlite' package. "
25
+ "Install with: pip install cachu[async]"
26
+ ) from e
27
+
28
+
29
+ class AsyncSqliteBackend(AsyncBackend):
30
+ """Async SQLite file-based cache backend using aiosqlite.
31
+ """
32
+
33
+ def __init__(self, filepath: str) -> None:
34
+ self._filepath = filepath
35
+ self._connection: aiosqlite.Connection | None = None
36
+ self._lock = asyncio.Lock()
37
+ self._initialized = False
38
+
39
+ async def _ensure_initialized(self) -> 'aiosqlite.Connection':
40
+ """Ensure database is initialized and return connection.
41
+ """
42
+ if self._connection is None:
43
+ aiosqlite = _get_aiosqlite_module()
44
+ self._connection = await aiosqlite.connect(self._filepath)
45
+
46
+ if not self._initialized:
47
+ await self._connection.execute('''
48
+ CREATE TABLE IF NOT EXISTS cache (
49
+ key TEXT PRIMARY KEY,
50
+ value BLOB NOT NULL,
51
+ created_at REAL NOT NULL,
52
+ expires_at REAL NOT NULL
53
+ )
54
+ ''')
55
+ await self._connection.execute('''
56
+ CREATE INDEX IF NOT EXISTS idx_cache_expires
57
+ ON cache(expires_at)
58
+ ''')
59
+ await self._connection.commit()
60
+ self._initialized = True
61
+
62
+ return self._connection
63
+
64
+ async def get(self, key: str) -> Any:
65
+ """Get value by key. Returns NO_VALUE if not found or expired.
66
+ """
67
+ async with self._lock:
68
+ try:
69
+ conn = await self._ensure_initialized()
70
+ cursor = await conn.execute(
71
+ 'SELECT value, expires_at FROM cache WHERE key = ?',
72
+ (key,),
73
+ )
74
+ row = await cursor.fetchone()
75
+
76
+ if row is None:
77
+ return NO_VALUE
78
+
79
+ value_blob, expires_at = row
80
+ if time.time() > expires_at:
81
+ await conn.execute('DELETE FROM cache WHERE key = ?', (key,))
82
+ await conn.commit()
83
+ return NO_VALUE
84
+
85
+ return pickle.loads(value_blob)
86
+ except Exception:
87
+ return NO_VALUE
88
+
89
+ async def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
90
+ """Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
91
+ """
92
+ async with self._lock:
93
+ try:
94
+ conn = await self._ensure_initialized()
95
+ cursor = await conn.execute(
96
+ 'SELECT value, created_at, expires_at FROM cache WHERE key = ?',
97
+ (key,),
98
+ )
99
+ row = await cursor.fetchone()
100
+
101
+ if row is None:
102
+ return NO_VALUE, None
103
+
104
+ value_blob, created_at, expires_at = row
105
+ if time.time() > expires_at:
106
+ await conn.execute('DELETE FROM cache WHERE key = ?', (key,))
107
+ await conn.commit()
108
+ return NO_VALUE, None
109
+
110
+ return pickle.loads(value_blob), created_at
111
+ except Exception:
112
+ return NO_VALUE, None
113
+
114
+ async def set(self, key: str, value: Any, ttl: int) -> None:
115
+ """Set value with TTL in seconds.
116
+ """
117
+ now = time.time()
118
+ value_blob = pickle.dumps(value)
119
+
120
+ async with self._lock:
121
+ conn = await self._ensure_initialized()
122
+ await conn.execute(
123
+ '''INSERT OR REPLACE INTO cache (key, value, created_at, expires_at)
124
+ VALUES (?, ?, ?, ?)''',
125
+ (key, value_blob, now, now + ttl),
126
+ )
127
+ await conn.commit()
128
+
129
+ async def delete(self, key: str) -> None:
130
+ """Delete value by key.
131
+ """
132
+ async with self._lock:
133
+ try:
134
+ conn = await self._ensure_initialized()
135
+ await conn.execute('DELETE FROM cache WHERE key = ?', (key,))
136
+ await conn.commit()
137
+ except Exception:
138
+ pass
139
+
140
+ async def clear(self, pattern: str | None = None) -> int:
141
+ """Clear entries matching pattern. Returns count of cleared entries.
142
+ """
143
+ async with self._lock:
144
+ try:
145
+ conn = await self._ensure_initialized()
146
+ if pattern is None:
147
+ cursor = await conn.execute('SELECT COUNT(*) FROM cache')
148
+ row = await cursor.fetchone()
149
+ count = row[0]
150
+ await conn.execute('DELETE FROM cache')
151
+ await conn.commit()
152
+ return count
153
+
154
+ glob_pattern = self._fnmatch_to_glob(pattern)
155
+ cursor = await conn.execute(
156
+ 'SELECT COUNT(*) FROM cache WHERE key GLOB ?',
157
+ (glob_pattern,),
158
+ )
159
+ row = await cursor.fetchone()
160
+ count = row[0]
161
+ await conn.execute('DELETE FROM cache WHERE key GLOB ?', (glob_pattern,))
162
+ await conn.commit()
163
+ return count
164
+ except Exception:
165
+ return 0
166
+
167
+ async def keys(self, pattern: str | None = None) -> AsyncIterator[str]:
168
+ """Iterate over keys matching pattern.
169
+ """
170
+ now = time.time()
171
+
172
+ async with self._lock:
173
+ conn = await self._ensure_initialized()
174
+ if pattern is None:
175
+ cursor = await conn.execute(
176
+ 'SELECT key FROM cache WHERE expires_at > ?',
177
+ (now,),
178
+ )
179
+ else:
180
+ glob_pattern = self._fnmatch_to_glob(pattern)
181
+ cursor = await conn.execute(
182
+ 'SELECT key FROM cache WHERE key GLOB ? AND expires_at > ?',
183
+ (glob_pattern, now),
184
+ )
185
+
186
+ all_keys = [row[0] for row in await cursor.fetchall()]
187
+
188
+ for key in all_keys:
189
+ yield key
190
+
191
+ async def count(self, pattern: str | None = None) -> int:
192
+ """Count keys matching pattern.
193
+ """
194
+ now = time.time()
195
+
196
+ async with self._lock:
197
+ try:
198
+ conn = await self._ensure_initialized()
199
+ if pattern is None:
200
+ cursor = await conn.execute(
201
+ 'SELECT COUNT(*) FROM cache WHERE expires_at > ?',
202
+ (now,),
203
+ )
204
+ else:
205
+ glob_pattern = self._fnmatch_to_glob(pattern)
206
+ cursor = await conn.execute(
207
+ 'SELECT COUNT(*) FROM cache WHERE key GLOB ? AND expires_at > ?',
208
+ (glob_pattern, now),
209
+ )
210
+
211
+ row = await cursor.fetchone()
212
+ return row[0]
213
+ except Exception:
214
+ return 0
215
+
216
+ def _fnmatch_to_glob(self, pattern: str) -> str:
217
+ """Convert fnmatch pattern to SQLite GLOB pattern.
218
+ """
219
+ return pattern
220
+
221
+ async def cleanup_expired(self) -> int:
222
+ """Remove expired entries. Returns count of removed entries.
223
+ """
224
+ now = time.time()
225
+
226
+ async with self._lock:
227
+ conn = await self._ensure_initialized()
228
+ cursor = await conn.execute(
229
+ 'SELECT COUNT(*) FROM cache WHERE expires_at <= ?',
230
+ (now,),
231
+ )
232
+ row = await cursor.fetchone()
233
+ count = row[0]
234
+ await conn.execute('DELETE FROM cache WHERE expires_at <= ?', (now,))
235
+ await conn.commit()
236
+ return count
237
+
238
+ async def close(self) -> None:
239
+ """Close the database connection.
240
+ """
241
+ if self._connection is not None:
242
+ await self._connection.close()
243
+ self._connection = None
244
+ self._initialized = False
cachu/backends/file.py CHANGED
@@ -1,158 +1,10 @@
1
- """File-based cache backend using DBM.
2
- """
3
- import dbm
4
- import fnmatch
5
- import pathlib
6
- import pickle
7
- import struct
8
- import threading
9
- import time
10
- from collections.abc import Iterator
11
- from typing import Any
12
-
13
- from . import NO_VALUE, Backend
14
-
15
- _METADATA_FORMAT = 'dd'
16
- _METADATA_SIZE = struct.calcsize(_METADATA_FORMAT)
17
-
18
-
19
- class FileBackend(Backend):
20
- """DBM file-based cache backend.
21
- """
22
-
23
- def __init__(self, filepath: str) -> None:
24
- self._filepath = filepath
25
- self._lock = threading.RLock()
26
- self._ensure_dir()
27
-
28
- def _ensure_dir(self) -> None:
29
- """Ensure the directory for the cache file exists.
30
- """
31
- directory = pathlib.Path(self._filepath).parent
32
- if directory and not pathlib.Path(directory).exists():
33
- pathlib.Path(directory).mkdir(exist_ok=True, parents=True)
34
-
35
- def _pack_value(self, value: Any, created_at: float, expires_at: float) -> bytes:
36
- """Pack value with metadata.
37
- """
38
- metadata = struct.pack(_METADATA_FORMAT, created_at, expires_at)
39
- pickled = pickle.dumps(value)
40
- return metadata + pickled
41
-
42
- def _unpack_value(self, data: bytes) -> tuple[Any, float, float]:
43
- """Unpack value and metadata.
44
- """
45
- created_at, expires_at = struct.unpack(_METADATA_FORMAT, data[:_METADATA_SIZE])
46
- value = pickle.loads(data[_METADATA_SIZE:])
47
- return value, created_at, expires_at
48
-
49
- def get(self, key: str) -> Any:
50
- """Get value by key. Returns NO_VALUE if not found or expired.
51
- """
52
- with self._lock:
53
- try:
54
- with dbm.open(self._filepath, 'c') as db:
55
- data = db.get(key.encode())
56
- if data is None:
57
- return NO_VALUE
58
-
59
- value, created_at, expires_at = self._unpack_value(data)
60
- if time.time() > expires_at:
61
- del db[key.encode()]
62
- return NO_VALUE
1
+ """File-based cache backend using SQLite.
63
2
 
64
- return value
65
- except Exception:
66
- return NO_VALUE
67
-
68
- def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
69
- """Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
70
- """
71
- with self._lock:
72
- try:
73
- with dbm.open(self._filepath, 'c') as db:
74
- data = db.get(key.encode())
75
- if data is None:
76
- return NO_VALUE, None
77
-
78
- value, created_at, expires_at = self._unpack_value(data)
79
- if time.time() > expires_at:
80
- del db[key.encode()]
81
- return NO_VALUE, None
82
-
83
- return value, created_at
84
- except Exception:
85
- return NO_VALUE, None
86
-
87
- def set(self, key: str, value: Any, ttl: int) -> None:
88
- """Set value with TTL in seconds.
89
- """
90
- now = time.time()
91
- packed = self._pack_value(value, now, now + ttl)
92
- with self._lock, dbm.open(self._filepath, 'c') as db:
93
- db[key.encode()] = packed
94
-
95
- def delete(self, key: str) -> None:
96
- """Delete value by key.
97
- """
98
- with self._lock:
99
- try:
100
- with dbm.open(self._filepath, 'c') as db:
101
- if key.encode() in db:
102
- del db[key.encode()]
103
- except Exception:
104
- pass
105
-
106
- def clear(self, pattern: str | None = None) -> int:
107
- """Clear entries matching pattern. Returns count of cleared entries.
108
- """
109
- with self._lock:
110
- try:
111
- if pattern is None:
112
- with dbm.open(self._filepath, 'n'):
113
- pass
114
- return -1
115
-
116
- with dbm.open(self._filepath, 'c') as db:
117
- keys_to_delete = [
118
- k for k in db.keys()
119
- if fnmatch.fnmatch(k.decode(), pattern)
120
- ]
121
- for key in keys_to_delete:
122
- del db[key]
123
- return len(keys_to_delete)
124
- except Exception:
125
- return 0
126
-
127
- def keys(self, pattern: str | None = None) -> Iterator[str]:
128
- """Iterate over keys matching pattern.
129
- """
130
- now = time.time()
131
- with self._lock:
132
- try:
133
- with dbm.open(self._filepath, 'c') as db:
134
- all_keys = [k.decode() for k in db.keys()]
135
- except Exception:
136
- return
137
-
138
- for key in all_keys:
139
- with self._lock:
140
- try:
141
- with dbm.open(self._filepath, 'c') as db:
142
- data = db.get(key.encode())
143
- if data is None:
144
- continue
145
- _, _, expires_at = self._unpack_value(data)
146
- if now > expires_at:
147
- del db[key.encode()]
148
- continue
149
- except Exception:
150
- continue
3
+ Note: This module previously used DBM. Migration to SQLite happened in v0.2.0.
4
+ Existing DBM cache files will be ignored - clear your cache directory on upgrade.
5
+ """
6
+ from .sqlite import SqliteBackend
151
7
 
152
- if pattern is None or fnmatch.fnmatch(key, pattern):
153
- yield key
8
+ FileBackend = SqliteBackend
154
9
 
155
- def count(self, pattern: str | None = None) -> int:
156
- """Count keys matching pattern.
157
- """
158
- return sum(1 for _ in self.keys(pattern))
10
+ __all__ = ['FileBackend']