cachu 0.2.3__py3-none-any.whl → 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cachu/__init__.py +8 -12
- cachu/backends/__init__.py +65 -5
- cachu/backends/memory.py +159 -57
- cachu/backends/redis.py +160 -28
- cachu/backends/sqlite.py +326 -41
- cachu/config.py +6 -6
- cachu/decorator.py +354 -87
- cachu/keys.py +8 -0
- cachu/mutex.py +247 -0
- cachu/operations.py +171 -23
- {cachu-0.2.3.dist-info → cachu-0.2.5.dist-info}/METADATA +7 -10
- cachu-0.2.5.dist-info/RECORD +15 -0
- cachu/async_decorator.py +0 -262
- cachu/async_operations.py +0 -178
- cachu/backends/async_base.py +0 -50
- cachu/backends/async_memory.py +0 -111
- cachu/backends/async_redis.py +0 -141
- cachu/backends/async_sqlite.py +0 -256
- cachu/backends/file.py +0 -10
- cachu-0.2.3.dist-info/RECORD +0 -21
- {cachu-0.2.3.dist-info → cachu-0.2.5.dist-info}/WHEEL +0 -0
- {cachu-0.2.3.dist-info → cachu-0.2.5.dist-info}/top_level.txt +0 -0
cachu/backends/async_redis.py
DELETED
|
@@ -1,141 +0,0 @@
|
|
|
1
|
-
"""Async Redis cache backend implementation using redis.asyncio.
|
|
2
|
-
"""
|
|
3
|
-
import pickle
|
|
4
|
-
import struct
|
|
5
|
-
import time
|
|
6
|
-
from collections.abc import AsyncIterator
|
|
7
|
-
from typing import TYPE_CHECKING, Any
|
|
8
|
-
|
|
9
|
-
from . import NO_VALUE
|
|
10
|
-
from .async_base import AsyncBackend
|
|
11
|
-
|
|
12
|
-
if TYPE_CHECKING:
|
|
13
|
-
import redis.asyncio as aioredis
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
_METADATA_FORMAT = 'd'
|
|
17
|
-
_METADATA_SIZE = struct.calcsize(_METADATA_FORMAT)
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
def _get_async_redis_module() -> Any:
|
|
21
|
-
"""Import redis.asyncio module, raising helpful error if not installed.
|
|
22
|
-
"""
|
|
23
|
-
try:
|
|
24
|
-
import redis.asyncio as aioredis
|
|
25
|
-
return aioredis
|
|
26
|
-
except ImportError as e:
|
|
27
|
-
raise RuntimeError(
|
|
28
|
-
"Async Redis support requires the 'redis' package (>=4.2.0). "
|
|
29
|
-
"Install with: pip install cachu[redis]"
|
|
30
|
-
) from e
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
async def get_async_redis_client(url: str) -> 'aioredis.Redis':
|
|
34
|
-
"""Create an async Redis client from URL.
|
|
35
|
-
|
|
36
|
-
Args:
|
|
37
|
-
url: Redis URL (e.g., 'redis://localhost:6379/0')
|
|
38
|
-
"""
|
|
39
|
-
aioredis = _get_async_redis_module()
|
|
40
|
-
return aioredis.from_url(url)
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
class AsyncRedisBackend(AsyncBackend):
|
|
44
|
-
"""Async Redis cache backend using redis.asyncio.
|
|
45
|
-
"""
|
|
46
|
-
|
|
47
|
-
def __init__(self, url: str, distributed_lock: bool = False) -> None:
|
|
48
|
-
self._url = url
|
|
49
|
-
self._distributed_lock = distributed_lock
|
|
50
|
-
self._client: aioredis.Redis | None = None
|
|
51
|
-
|
|
52
|
-
async def _get_client(self) -> 'aioredis.Redis':
|
|
53
|
-
"""Lazy-load async Redis client.
|
|
54
|
-
"""
|
|
55
|
-
if self._client is None:
|
|
56
|
-
self._client = await get_async_redis_client(self._url)
|
|
57
|
-
return self._client
|
|
58
|
-
|
|
59
|
-
def _pack_value(self, value: Any, created_at: float) -> bytes:
|
|
60
|
-
"""Pack value with creation timestamp.
|
|
61
|
-
"""
|
|
62
|
-
metadata = struct.pack(_METADATA_FORMAT, created_at)
|
|
63
|
-
pickled = pickle.dumps(value)
|
|
64
|
-
return metadata + pickled
|
|
65
|
-
|
|
66
|
-
def _unpack_value(self, data: bytes) -> tuple[Any, float]:
|
|
67
|
-
"""Unpack value and creation timestamp.
|
|
68
|
-
"""
|
|
69
|
-
created_at = struct.unpack(_METADATA_FORMAT, data[:_METADATA_SIZE])[0]
|
|
70
|
-
value = pickle.loads(data[_METADATA_SIZE:])
|
|
71
|
-
return value, created_at
|
|
72
|
-
|
|
73
|
-
async def get(self, key: str) -> Any:
|
|
74
|
-
"""Get value by key. Returns NO_VALUE if not found.
|
|
75
|
-
"""
|
|
76
|
-
client = await self._get_client()
|
|
77
|
-
data = await client.get(key)
|
|
78
|
-
if data is None:
|
|
79
|
-
return NO_VALUE
|
|
80
|
-
value, _ = self._unpack_value(data)
|
|
81
|
-
return value
|
|
82
|
-
|
|
83
|
-
async def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
|
|
84
|
-
"""Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
|
|
85
|
-
"""
|
|
86
|
-
client = await self._get_client()
|
|
87
|
-
data = await client.get(key)
|
|
88
|
-
if data is None:
|
|
89
|
-
return NO_VALUE, None
|
|
90
|
-
value, created_at = self._unpack_value(data)
|
|
91
|
-
return value, created_at
|
|
92
|
-
|
|
93
|
-
async def set(self, key: str, value: Any, ttl: int) -> None:
|
|
94
|
-
"""Set value with TTL in seconds.
|
|
95
|
-
"""
|
|
96
|
-
client = await self._get_client()
|
|
97
|
-
now = time.time()
|
|
98
|
-
packed = self._pack_value(value, now)
|
|
99
|
-
await client.setex(key, ttl, packed)
|
|
100
|
-
|
|
101
|
-
async def delete(self, key: str) -> None:
|
|
102
|
-
"""Delete value by key.
|
|
103
|
-
"""
|
|
104
|
-
client = await self._get_client()
|
|
105
|
-
await client.delete(key)
|
|
106
|
-
|
|
107
|
-
async def clear(self, pattern: str | None = None) -> int:
|
|
108
|
-
"""Clear entries matching pattern. Returns count of cleared entries.
|
|
109
|
-
"""
|
|
110
|
-
client = await self._get_client()
|
|
111
|
-
if pattern is None:
|
|
112
|
-
pattern = '*'
|
|
113
|
-
|
|
114
|
-
count = 0
|
|
115
|
-
async for key in client.scan_iter(match=pattern):
|
|
116
|
-
await client.delete(key)
|
|
117
|
-
count += 1
|
|
118
|
-
return count
|
|
119
|
-
|
|
120
|
-
async def keys(self, pattern: str | None = None) -> AsyncIterator[str]:
|
|
121
|
-
"""Iterate over keys matching pattern.
|
|
122
|
-
"""
|
|
123
|
-
client = await self._get_client()
|
|
124
|
-
redis_pattern = pattern or '*'
|
|
125
|
-
async for key in client.scan_iter(match=redis_pattern):
|
|
126
|
-
yield key.decode() if isinstance(key, bytes) else key
|
|
127
|
-
|
|
128
|
-
async def count(self, pattern: str | None = None) -> int:
|
|
129
|
-
"""Count keys matching pattern.
|
|
130
|
-
"""
|
|
131
|
-
count = 0
|
|
132
|
-
async for _ in self.keys(pattern):
|
|
133
|
-
count += 1
|
|
134
|
-
return count
|
|
135
|
-
|
|
136
|
-
async def close(self) -> None:
|
|
137
|
-
"""Close the Redis connection.
|
|
138
|
-
"""
|
|
139
|
-
if self._client is not None:
|
|
140
|
-
await self._client.close()
|
|
141
|
-
self._client = None
|
cachu/backends/async_sqlite.py
DELETED
|
@@ -1,256 +0,0 @@
|
|
|
1
|
-
"""Async SQLite-based cache backend using aiosqlite.
|
|
2
|
-
"""
|
|
3
|
-
import asyncio
|
|
4
|
-
import pickle
|
|
5
|
-
import time
|
|
6
|
-
from collections.abc import AsyncIterator
|
|
7
|
-
from typing import TYPE_CHECKING, Any
|
|
8
|
-
|
|
9
|
-
from . import NO_VALUE
|
|
10
|
-
from .async_base import AsyncBackend
|
|
11
|
-
|
|
12
|
-
if TYPE_CHECKING:
|
|
13
|
-
import aiosqlite
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
def _get_aiosqlite_module() -> Any:
|
|
17
|
-
"""Import aiosqlite module, raising helpful error if not installed.
|
|
18
|
-
"""
|
|
19
|
-
try:
|
|
20
|
-
import aiosqlite
|
|
21
|
-
return aiosqlite
|
|
22
|
-
except ImportError as e:
|
|
23
|
-
raise RuntimeError(
|
|
24
|
-
"Async SQLite support requires the 'aiosqlite' package. "
|
|
25
|
-
"Install with: pip install cachu[async]"
|
|
26
|
-
) from e
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
class AsyncSqliteBackend(AsyncBackend):
|
|
30
|
-
"""Async SQLite file-based cache backend using aiosqlite.
|
|
31
|
-
"""
|
|
32
|
-
|
|
33
|
-
def __init__(self, filepath: str) -> None:
|
|
34
|
-
self._filepath = filepath
|
|
35
|
-
self._connection: aiosqlite.Connection | None = None
|
|
36
|
-
self._init_lock = asyncio.Lock()
|
|
37
|
-
self._write_lock = asyncio.Lock()
|
|
38
|
-
self._initialized = False
|
|
39
|
-
|
|
40
|
-
async def _ensure_initialized(self) -> 'aiosqlite.Connection':
|
|
41
|
-
"""Ensure database is initialized and return connection.
|
|
42
|
-
"""
|
|
43
|
-
async with self._init_lock:
|
|
44
|
-
if self._connection is None:
|
|
45
|
-
aiosqlite = _get_aiosqlite_module()
|
|
46
|
-
self._connection = await aiosqlite.connect(self._filepath)
|
|
47
|
-
await self._connection.execute('PRAGMA journal_mode=WAL')
|
|
48
|
-
await self._connection.execute('PRAGMA busy_timeout=5000')
|
|
49
|
-
|
|
50
|
-
if not self._initialized:
|
|
51
|
-
await self._connection.execute("""
|
|
52
|
-
CREATE TABLE IF NOT EXISTS cache (
|
|
53
|
-
key TEXT PRIMARY KEY,
|
|
54
|
-
value BLOB NOT NULL,
|
|
55
|
-
created_at REAL NOT NULL,
|
|
56
|
-
expires_at REAL NOT NULL
|
|
57
|
-
)
|
|
58
|
-
""")
|
|
59
|
-
await self._connection.execute("""
|
|
60
|
-
CREATE INDEX IF NOT EXISTS idx_cache_expires
|
|
61
|
-
ON cache(expires_at)
|
|
62
|
-
""")
|
|
63
|
-
await self._connection.commit()
|
|
64
|
-
self._initialized = True
|
|
65
|
-
|
|
66
|
-
return self._connection
|
|
67
|
-
|
|
68
|
-
def _schedule_delete(self, key: str) -> None:
|
|
69
|
-
"""Schedule a background deletion task (fire-and-forget).
|
|
70
|
-
"""
|
|
71
|
-
async def _delete() -> None:
|
|
72
|
-
try:
|
|
73
|
-
async with self._write_lock:
|
|
74
|
-
conn = await self._ensure_initialized()
|
|
75
|
-
await conn.execute('DELETE FROM cache WHERE key = ?', (key,))
|
|
76
|
-
await conn.commit()
|
|
77
|
-
except Exception:
|
|
78
|
-
pass
|
|
79
|
-
|
|
80
|
-
asyncio.create_task(_delete())
|
|
81
|
-
|
|
82
|
-
async def get(self, key: str) -> Any:
|
|
83
|
-
"""Get value by key. Returns NO_VALUE if not found or expired.
|
|
84
|
-
"""
|
|
85
|
-
try:
|
|
86
|
-
conn = await self._ensure_initialized()
|
|
87
|
-
cursor = await conn.execute(
|
|
88
|
-
'SELECT value, expires_at FROM cache WHERE key = ?',
|
|
89
|
-
(key,),
|
|
90
|
-
)
|
|
91
|
-
row = await cursor.fetchone()
|
|
92
|
-
|
|
93
|
-
if row is None:
|
|
94
|
-
return NO_VALUE
|
|
95
|
-
|
|
96
|
-
value_blob, expires_at = row
|
|
97
|
-
if time.time() > expires_at:
|
|
98
|
-
self._schedule_delete(key)
|
|
99
|
-
return NO_VALUE
|
|
100
|
-
|
|
101
|
-
return pickle.loads(value_blob)
|
|
102
|
-
except Exception:
|
|
103
|
-
return NO_VALUE
|
|
104
|
-
|
|
105
|
-
async def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
|
|
106
|
-
"""Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
|
|
107
|
-
"""
|
|
108
|
-
try:
|
|
109
|
-
conn = await self._ensure_initialized()
|
|
110
|
-
cursor = await conn.execute(
|
|
111
|
-
'SELECT value, created_at, expires_at FROM cache WHERE key = ?',
|
|
112
|
-
(key,),
|
|
113
|
-
)
|
|
114
|
-
row = await cursor.fetchone()
|
|
115
|
-
|
|
116
|
-
if row is None:
|
|
117
|
-
return NO_VALUE, None
|
|
118
|
-
|
|
119
|
-
value_blob, created_at, expires_at = row
|
|
120
|
-
if time.time() > expires_at:
|
|
121
|
-
self._schedule_delete(key)
|
|
122
|
-
return NO_VALUE, None
|
|
123
|
-
|
|
124
|
-
return pickle.loads(value_blob), created_at
|
|
125
|
-
except Exception:
|
|
126
|
-
return NO_VALUE, None
|
|
127
|
-
|
|
128
|
-
async def set(self, key: str, value: Any, ttl: int) -> None:
|
|
129
|
-
"""Set value with TTL in seconds.
|
|
130
|
-
"""
|
|
131
|
-
now = time.time()
|
|
132
|
-
value_blob = pickle.dumps(value)
|
|
133
|
-
|
|
134
|
-
async with self._write_lock:
|
|
135
|
-
conn = await self._ensure_initialized()
|
|
136
|
-
await conn.execute(
|
|
137
|
-
"""INSERT OR REPLACE INTO cache (key, value, created_at, expires_at)
|
|
138
|
-
VALUES (?, ?, ?, ?)""",
|
|
139
|
-
(key, value_blob, now, now + ttl),
|
|
140
|
-
)
|
|
141
|
-
await conn.commit()
|
|
142
|
-
|
|
143
|
-
async def delete(self, key: str) -> None:
|
|
144
|
-
"""Delete value by key.
|
|
145
|
-
"""
|
|
146
|
-
async with self._write_lock:
|
|
147
|
-
try:
|
|
148
|
-
conn = await self._ensure_initialized()
|
|
149
|
-
await conn.execute('DELETE FROM cache WHERE key = ?', (key,))
|
|
150
|
-
await conn.commit()
|
|
151
|
-
except Exception:
|
|
152
|
-
pass
|
|
153
|
-
|
|
154
|
-
async def clear(self, pattern: str | None = None) -> int:
|
|
155
|
-
"""Clear entries matching pattern. Returns count of cleared entries.
|
|
156
|
-
"""
|
|
157
|
-
async with self._write_lock:
|
|
158
|
-
try:
|
|
159
|
-
conn = await self._ensure_initialized()
|
|
160
|
-
if pattern is None:
|
|
161
|
-
cursor = await conn.execute('SELECT COUNT(*) FROM cache')
|
|
162
|
-
row = await cursor.fetchone()
|
|
163
|
-
count = row[0]
|
|
164
|
-
await conn.execute('DELETE FROM cache')
|
|
165
|
-
await conn.commit()
|
|
166
|
-
return count
|
|
167
|
-
|
|
168
|
-
glob_pattern = self._fnmatch_to_glob(pattern)
|
|
169
|
-
cursor = await conn.execute(
|
|
170
|
-
'SELECT COUNT(*) FROM cache WHERE key GLOB ?',
|
|
171
|
-
(glob_pattern,),
|
|
172
|
-
)
|
|
173
|
-
row = await cursor.fetchone()
|
|
174
|
-
count = row[0]
|
|
175
|
-
await conn.execute('DELETE FROM cache WHERE key GLOB ?', (glob_pattern,))
|
|
176
|
-
await conn.commit()
|
|
177
|
-
return count
|
|
178
|
-
except Exception:
|
|
179
|
-
return 0
|
|
180
|
-
|
|
181
|
-
async def keys(self, pattern: str | None = None) -> AsyncIterator[str]:
|
|
182
|
-
"""Iterate over keys matching pattern.
|
|
183
|
-
"""
|
|
184
|
-
now = time.time()
|
|
185
|
-
conn = await self._ensure_initialized()
|
|
186
|
-
|
|
187
|
-
if pattern is None:
|
|
188
|
-
cursor = await conn.execute(
|
|
189
|
-
'SELECT key FROM cache WHERE expires_at > ?',
|
|
190
|
-
(now,),
|
|
191
|
-
)
|
|
192
|
-
else:
|
|
193
|
-
glob_pattern = self._fnmatch_to_glob(pattern)
|
|
194
|
-
cursor = await conn.execute(
|
|
195
|
-
'SELECT key FROM cache WHERE key GLOB ? AND expires_at > ?',
|
|
196
|
-
(glob_pattern, now),
|
|
197
|
-
)
|
|
198
|
-
|
|
199
|
-
all_keys = [row[0] for row in await cursor.fetchall()]
|
|
200
|
-
|
|
201
|
-
for key in all_keys:
|
|
202
|
-
yield key
|
|
203
|
-
|
|
204
|
-
async def count(self, pattern: str | None = None) -> int:
|
|
205
|
-
"""Count keys matching pattern.
|
|
206
|
-
"""
|
|
207
|
-
now = time.time()
|
|
208
|
-
|
|
209
|
-
try:
|
|
210
|
-
conn = await self._ensure_initialized()
|
|
211
|
-
if pattern is None:
|
|
212
|
-
cursor = await conn.execute(
|
|
213
|
-
'SELECT COUNT(*) FROM cache WHERE expires_at > ?',
|
|
214
|
-
(now,),
|
|
215
|
-
)
|
|
216
|
-
else:
|
|
217
|
-
glob_pattern = self._fnmatch_to_glob(pattern)
|
|
218
|
-
cursor = await conn.execute(
|
|
219
|
-
'SELECT COUNT(*) FROM cache WHERE key GLOB ? AND expires_at > ?',
|
|
220
|
-
(glob_pattern, now),
|
|
221
|
-
)
|
|
222
|
-
|
|
223
|
-
row = await cursor.fetchone()
|
|
224
|
-
return row[0]
|
|
225
|
-
except Exception:
|
|
226
|
-
return 0
|
|
227
|
-
|
|
228
|
-
def _fnmatch_to_glob(self, pattern: str) -> str:
|
|
229
|
-
"""Convert fnmatch pattern to SQLite GLOB pattern.
|
|
230
|
-
"""
|
|
231
|
-
return pattern
|
|
232
|
-
|
|
233
|
-
async def cleanup_expired(self) -> int:
|
|
234
|
-
"""Remove expired entries. Returns count of removed entries.
|
|
235
|
-
"""
|
|
236
|
-
now = time.time()
|
|
237
|
-
|
|
238
|
-
async with self._write_lock:
|
|
239
|
-
conn = await self._ensure_initialized()
|
|
240
|
-
cursor = await conn.execute(
|
|
241
|
-
'SELECT COUNT(*) FROM cache WHERE expires_at <= ?',
|
|
242
|
-
(now,),
|
|
243
|
-
)
|
|
244
|
-
row = await cursor.fetchone()
|
|
245
|
-
count = row[0]
|
|
246
|
-
await conn.execute('DELETE FROM cache WHERE expires_at <= ?', (now,))
|
|
247
|
-
await conn.commit()
|
|
248
|
-
return count
|
|
249
|
-
|
|
250
|
-
async def close(self) -> None:
|
|
251
|
-
"""Close the database connection.
|
|
252
|
-
"""
|
|
253
|
-
if self._connection is not None:
|
|
254
|
-
await self._connection.close()
|
|
255
|
-
self._connection = None
|
|
256
|
-
self._initialized = False
|
cachu/backends/file.py
DELETED
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
"""File-based cache backend using SQLite.
|
|
2
|
-
|
|
3
|
-
Note: This module previously used DBM. Migration to SQLite happened in v0.2.0.
|
|
4
|
-
Existing DBM cache files will be ignored - clear your cache directory on upgrade.
|
|
5
|
-
"""
|
|
6
|
-
from .sqlite import SqliteBackend
|
|
7
|
-
|
|
8
|
-
FileBackend = SqliteBackend
|
|
9
|
-
|
|
10
|
-
__all__ = ['FileBackend']
|
cachu-0.2.3.dist-info/RECORD
DELETED
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
cachu/__init__.py,sha256=vMcuJYvSfRUsMwlFrpjS8FOuQiqPBP9e4dRlJbsvpZ0,1286
|
|
2
|
-
cachu/async_decorator.py,sha256=Jx2fHESLlld7NZiD2-6kcozukJtp5efnt4cMhntDDRA,8939
|
|
3
|
-
cachu/async_operations.py,sha256=eVqhZk3FVLNip_abjnCzG8AajzvJTtXbpL--dpMXBlc,5597
|
|
4
|
-
cachu/config.py,sha256=KtcDGpSTJmjRrcNLz9_Om3O814oJJ3p8gntB84Pd6Dk,5922
|
|
5
|
-
cachu/decorator.py,sha256=RHwDRZxZfOkBgEK1XgRyis22bxQ0ba0X4NtHBd9FTb4,8161
|
|
6
|
-
cachu/keys.py,sha256=fwwNOpnDJFCIWZoQ5UGJWhJa6xu36hsBsURI-n2NJKU,3557
|
|
7
|
-
cachu/operations.py,sha256=t42_Er-O59vrwFa5jdf4yq3Jr4li2l7php4yMVJnxPs,5588
|
|
8
|
-
cachu/types.py,sha256=FghBN5GhxnrpuT4WUL9iNnAfdoH__cw9_Ag4kHbIXq4,723
|
|
9
|
-
cachu/backends/__init__.py,sha256=Jn2yBAMmJ8d0J_NyjOtxRt7UTyMLf1rlY8QJ049hXE8,1318
|
|
10
|
-
cachu/backends/async_base.py,sha256=oZ3K3PhsYkbgZxFLFk3_NbxBxtNopqS90HZBizwg_q8,1394
|
|
11
|
-
cachu/backends/async_memory.py,sha256=SQvSHeWbySa52BnQLF75nhVXgsydubNu84a8hvSzQSc,3457
|
|
12
|
-
cachu/backends/async_redis.py,sha256=8kefPIoIJDAZ6C6HJCvHqKFMDS10sJYh8YcJMpXpQm8,4455
|
|
13
|
-
cachu/backends/async_sqlite.py,sha256=iS1YaVakzK7msKL3BVmnZc_n73-V5fUz1wCQJxEY0ak,8730
|
|
14
|
-
cachu/backends/file.py,sha256=Pu01VtgHDgK6ev5hqyZXuJRCSB2VbNKHQ4w4nNKNyeI,298
|
|
15
|
-
cachu/backends/memory.py,sha256=kIgrVU8k_3Aquyj2PDf8IPbTjCITM_0V5GU47m3fJmo,3138
|
|
16
|
-
cachu/backends/redis.py,sha256=yE5rEBgOij9QOeC1VhWdIbGCgi442q-aWfmbbG4aNSE,3858
|
|
17
|
-
cachu/backends/sqlite.py,sha256=whduN5G_bN6ZJNuCBwbraDcadv_sg0j-OEiFnP8EEsk,7803
|
|
18
|
-
cachu-0.2.3.dist-info/METADATA,sha256=UXittsVjHwFjAGq6Fl8LAS_2zTKs14BYc9KvOEJHX9I,11992
|
|
19
|
-
cachu-0.2.3.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
20
|
-
cachu-0.2.3.dist-info/top_level.txt,sha256=g80nNoMvLMzhSwQWV-JotCBqtsLAHeFMBo_g8hCK8hQ,6
|
|
21
|
-
cachu-0.2.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|