hishel 1.1.6__py3-none-any.whl → 1.1.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hishel/_async_cache.py +5 -5
- hishel/_core/_storages/_async_sqlite.py +161 -144
- hishel/_core/_storages/_sync_sqlite.py +161 -144
- hishel/_sync_cache.py +5 -5
- {hishel-1.1.6.dist-info → hishel-1.1.8.dist-info}/METADATA +40 -1
- {hishel-1.1.6.dist-info → hishel-1.1.8.dist-info}/RECORD +8 -8
- {hishel-1.1.6.dist-info → hishel-1.1.8.dist-info}/WHEEL +0 -0
- {hishel-1.1.6.dist-info → hishel-1.1.8.dist-info}/licenses/LICENSE +0 -0
hishel/_async_cache.py
CHANGED
|
@@ -197,12 +197,12 @@ class AsyncCacheProxy:
|
|
|
197
197
|
return state.next(revalidation_response)
|
|
198
198
|
|
|
199
199
|
async def _handle_update(self, state: NeedToBeUpdated) -> AnyState:
|
|
200
|
-
for
|
|
200
|
+
for updating_entry in state.updating_entries:
|
|
201
201
|
await self.storage.update_entry(
|
|
202
|
-
|
|
203
|
-
lambda
|
|
204
|
-
|
|
205
|
-
response=replace(
|
|
202
|
+
updating_entry.id,
|
|
203
|
+
lambda existing_entry: replace(
|
|
204
|
+
existing_entry,
|
|
205
|
+
response=replace(existing_entry.response, headers=updating_entry.response.headers),
|
|
206
206
|
),
|
|
207
207
|
)
|
|
208
208
|
return state.next()
|
|
@@ -35,6 +35,7 @@ BATCH_CLEANUP_CHUNK_SIZE = 200
|
|
|
35
35
|
|
|
36
36
|
try:
|
|
37
37
|
import anysqlite
|
|
38
|
+
from anyio import Lock
|
|
38
39
|
|
|
39
40
|
class AsyncSqliteStorage(AsyncBaseStorage):
|
|
40
41
|
_COMPLETE_CHUNK_NUMBER = -1
|
|
@@ -43,27 +44,32 @@ try:
|
|
|
43
44
|
self,
|
|
44
45
|
*,
|
|
45
46
|
connection: Optional[anysqlite.Connection] = None,
|
|
46
|
-
database_path: str = "hishel_cache.db",
|
|
47
|
+
database_path: Union[str, Path] = "hishel_cache.db",
|
|
47
48
|
default_ttl: Optional[float] = None,
|
|
48
49
|
refresh_ttl_on_access: bool = True,
|
|
49
50
|
) -> None:
|
|
50
|
-
db_path = Path(database_path)
|
|
51
|
-
|
|
52
51
|
self.connection = connection
|
|
53
|
-
self.database_path = (
|
|
54
|
-
ensure_cache_dict(db_path.parent if db_path.parent != Path(".") else None) / db_path.name
|
|
55
|
-
)
|
|
52
|
+
self.database_path: Path = database_path if isinstance(database_path, Path) else Path(database_path)
|
|
56
53
|
self.default_ttl = default_ttl
|
|
57
54
|
self.refresh_ttl_on_access = refresh_ttl_on_access
|
|
58
55
|
self.last_cleanup = time.time() - BATCH_CLEANUP_INTERVAL + BATCH_CLEANUP_START_DELAY
|
|
59
56
|
# When this storage instance was created. Used to delay the first cleanup.
|
|
60
57
|
self._start_time = time.time()
|
|
61
58
|
self._initialized = False
|
|
59
|
+
self._lock = Lock()
|
|
62
60
|
|
|
63
61
|
async def _ensure_connection(self) -> anysqlite.Connection:
|
|
64
|
-
"""
|
|
62
|
+
"""
|
|
63
|
+
Ensure connection is established and database is initialized.
|
|
64
|
+
|
|
65
|
+
Note: This method assumes the caller has already acquired the lock.
|
|
66
|
+
"""
|
|
67
|
+
|
|
65
68
|
if self.connection is None:
|
|
66
|
-
|
|
69
|
+
# Create cache directory and resolve full path on first connection
|
|
70
|
+
parent = self.database_path.parent if self.database_path.parent != Path(".") else None
|
|
71
|
+
full_path = ensure_cache_dict(parent) / self.database_path.name
|
|
72
|
+
self.connection = await anysqlite.connect(str(full_path))
|
|
67
73
|
if not self._initialized:
|
|
68
74
|
await self._initialize_database()
|
|
69
75
|
self._initialized = True
|
|
@@ -107,151 +113,156 @@ try:
|
|
|
107
113
|
) -> Entry:
|
|
108
114
|
key_bytes = key.encode("utf-8")
|
|
109
115
|
|
|
110
|
-
|
|
111
|
-
|
|
116
|
+
async with self._lock:
|
|
117
|
+
connection = await self._ensure_connection()
|
|
118
|
+
cursor = await connection.cursor()
|
|
112
119
|
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
120
|
+
# Create a new entry directly with both request and response
|
|
121
|
+
pair_id = id_ if id_ is not None else uuid.uuid4()
|
|
122
|
+
pair_meta = EntryMeta(
|
|
123
|
+
created_at=time.time(),
|
|
124
|
+
)
|
|
118
125
|
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
126
|
+
assert isinstance(response.stream, (AsyncIterator, AsyncIterable))
|
|
127
|
+
response_with_stream = replace(
|
|
128
|
+
response,
|
|
129
|
+
stream=self._save_stream_unlocked(response.stream, pair_id.bytes),
|
|
130
|
+
)
|
|
124
131
|
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
+
complete_entry = Entry(
|
|
133
|
+
id=pair_id,
|
|
134
|
+
request=request,
|
|
135
|
+
response=response_with_stream,
|
|
136
|
+
meta=pair_meta,
|
|
137
|
+
cache_key=key_bytes,
|
|
138
|
+
)
|
|
132
139
|
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
140
|
+
# Insert the complete entry into the database
|
|
141
|
+
await cursor.execute(
|
|
142
|
+
"INSERT INTO entries (id, cache_key, data, created_at, deleted_at) VALUES (?, ?, ?, ?, ?)",
|
|
143
|
+
(pair_id.bytes, key_bytes, pack(complete_entry, kind="pair"), pair_meta.created_at, None),
|
|
144
|
+
)
|
|
145
|
+
await connection.commit()
|
|
139
146
|
|
|
140
|
-
|
|
147
|
+
return complete_entry
|
|
141
148
|
|
|
142
149
|
async def get_entries(self, key: str) -> List[Entry]:
|
|
143
150
|
final_pairs: List[Entry] = []
|
|
144
151
|
|
|
145
152
|
now = time.time()
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
153
|
+
async with self._lock:
|
|
154
|
+
if now - self.last_cleanup >= BATCH_CLEANUP_INTERVAL:
|
|
155
|
+
try:
|
|
156
|
+
await self._batch_cleanup()
|
|
157
|
+
except Exception:
|
|
158
|
+
# don't let cleanup prevent reads; failures are non-fatal
|
|
159
|
+
pass
|
|
152
160
|
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
161
|
+
connection = await self._ensure_connection()
|
|
162
|
+
cursor = await connection.cursor()
|
|
163
|
+
# Query entries directly by cache_key
|
|
164
|
+
await cursor.execute(
|
|
165
|
+
"SELECT id, data FROM entries WHERE cache_key = ?",
|
|
166
|
+
(key.encode("utf-8"),),
|
|
167
|
+
)
|
|
160
168
|
|
|
161
|
-
|
|
162
|
-
|
|
169
|
+
for row in await cursor.fetchall():
|
|
170
|
+
pair_data = unpack(row[1], kind="pair")
|
|
163
171
|
|
|
164
|
-
|
|
165
|
-
|
|
172
|
+
if pair_data is None:
|
|
173
|
+
continue
|
|
166
174
|
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
175
|
+
# Skip entries without a response (incomplete)
|
|
176
|
+
if not await self._is_stream_complete(pair_data.id, cursor=cursor):
|
|
177
|
+
continue
|
|
178
|
+
|
|
179
|
+
# Skip expired entries
|
|
180
|
+
if await self._is_pair_expired(pair_data, cursor=cursor):
|
|
181
|
+
continue
|
|
182
|
+
|
|
183
|
+
# Skip soft-deleted entries
|
|
184
|
+
if self.is_soft_deleted(pair_data):
|
|
185
|
+
continue
|
|
186
|
+
|
|
187
|
+
final_pairs.append(pair_data)
|
|
188
|
+
|
|
189
|
+
pairs_with_streams: List[Entry] = []
|
|
190
|
+
|
|
191
|
+
# Only restore response streams from cache
|
|
192
|
+
for pair in final_pairs:
|
|
193
|
+
pairs_with_streams.append(
|
|
194
|
+
replace(
|
|
195
|
+
pair,
|
|
196
|
+
response=replace(
|
|
197
|
+
pair.response,
|
|
198
|
+
stream=self._stream_data_from_cache(pair.id.bytes),
|
|
199
|
+
),
|
|
200
|
+
)
|
|
192
201
|
)
|
|
193
|
-
|
|
194
|
-
return pairs_with_streams
|
|
202
|
+
return pairs_with_streams
|
|
195
203
|
|
|
196
204
|
async def update_entry(
|
|
197
205
|
self,
|
|
198
206
|
id: uuid.UUID,
|
|
199
207
|
new_pair: Union[Entry, Callable[[Entry], Entry]],
|
|
200
208
|
) -> Optional[Entry]:
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
if result is None:
|
|
207
|
-
return None
|
|
209
|
+
async with self._lock:
|
|
210
|
+
connection = await self._ensure_connection()
|
|
211
|
+
cursor = await connection.cursor()
|
|
212
|
+
await cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
|
|
213
|
+
result = await cursor.fetchone()
|
|
208
214
|
|
|
209
|
-
|
|
215
|
+
if result is None:
|
|
216
|
+
return None
|
|
210
217
|
|
|
211
|
-
|
|
212
|
-
if not isinstance(pair, Entry) or pair.response is None:
|
|
213
|
-
return None
|
|
218
|
+
pair = unpack(result[0], kind="pair")
|
|
214
219
|
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
complete_pair = new_pair(pair)
|
|
220
|
+
# Skip entries without a response (incomplete)
|
|
221
|
+
if not isinstance(pair, Entry) or pair.response is None:
|
|
222
|
+
return None
|
|
219
223
|
|
|
220
|
-
|
|
221
|
-
|
|
224
|
+
if isinstance(new_pair, Entry):
|
|
225
|
+
complete_pair = new_pair
|
|
226
|
+
else:
|
|
227
|
+
complete_pair = new_pair(pair)
|
|
222
228
|
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
(pack(complete_pair, kind="pair"), id.bytes),
|
|
226
|
-
)
|
|
229
|
+
if pair.id != complete_pair.id:
|
|
230
|
+
raise ValueError("Pair ID mismatch")
|
|
227
231
|
|
|
228
|
-
if pair.cache_key != complete_pair.cache_key:
|
|
229
232
|
await cursor.execute(
|
|
230
|
-
"UPDATE entries SET
|
|
231
|
-
(complete_pair
|
|
233
|
+
"UPDATE entries SET data = ? WHERE id = ?",
|
|
234
|
+
(pack(complete_pair, kind="pair"), id.bytes),
|
|
232
235
|
)
|
|
233
236
|
|
|
234
|
-
|
|
237
|
+
if pair.cache_key != complete_pair.cache_key:
|
|
238
|
+
await cursor.execute(
|
|
239
|
+
"UPDATE entries SET cache_key = ? WHERE id = ?",
|
|
240
|
+
(complete_pair.cache_key, complete_pair.id.bytes),
|
|
241
|
+
)
|
|
235
242
|
|
|
236
|
-
|
|
243
|
+
await connection.commit()
|
|
244
|
+
|
|
245
|
+
return complete_pair
|
|
237
246
|
|
|
238
247
|
async def remove_entry(self, id: uuid.UUID) -> None:
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
248
|
+
async with self._lock:
|
|
249
|
+
connection = await self._ensure_connection()
|
|
250
|
+
cursor = await connection.cursor()
|
|
251
|
+
await cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
|
|
252
|
+
result = await cursor.fetchone()
|
|
243
253
|
|
|
244
|
-
|
|
245
|
-
|
|
254
|
+
if result is None:
|
|
255
|
+
return None
|
|
246
256
|
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
257
|
+
pair = unpack(result[0], kind="pair")
|
|
258
|
+
await self._soft_delete_pair(pair, cursor)
|
|
259
|
+
await connection.commit()
|
|
250
260
|
|
|
251
261
|
async def close(self) -> None:
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
262
|
+
async with self._lock:
|
|
263
|
+
if self.connection is not None:
|
|
264
|
+
await self.connection.close()
|
|
265
|
+
self.connection = None
|
|
255
266
|
|
|
256
267
|
async def _is_stream_complete(self, pair_id: uuid.UUID, cursor: anysqlite.Cursor) -> bool:
|
|
257
268
|
# Check if there's a completion marker (chunk_number = -1) for response stream
|
|
@@ -364,36 +375,40 @@ try:
|
|
|
364
375
|
"""
|
|
365
376
|
await cursor.execute("DELETE FROM streams WHERE entry_id = ?", (entry_id,))
|
|
366
377
|
|
|
367
|
-
async def
|
|
378
|
+
async def _save_stream_unlocked(
|
|
368
379
|
self,
|
|
369
380
|
stream: AsyncIterator[bytes],
|
|
370
381
|
entry_id: bytes,
|
|
371
382
|
) -> AsyncIterator[bytes]:
|
|
372
383
|
"""
|
|
373
384
|
Wrapper around an async iterator that also saves the response data to the cache in chunks.
|
|
385
|
+
|
|
386
|
+
Note: This method assumes the caller has already acquired the lock.
|
|
374
387
|
"""
|
|
375
388
|
chunk_number = 0
|
|
376
389
|
content_length = 0
|
|
377
390
|
async for chunk in stream:
|
|
378
391
|
content_length += len(chunk)
|
|
392
|
+
async with self._lock:
|
|
393
|
+
connection = await self._ensure_connection()
|
|
394
|
+
cursor = await connection.cursor()
|
|
395
|
+
await cursor.execute(
|
|
396
|
+
"INSERT INTO streams (entry_id, chunk_number, chunk_data) VALUES (?, ?, ?)",
|
|
397
|
+
(entry_id, chunk_number, chunk),
|
|
398
|
+
)
|
|
399
|
+
await connection.commit()
|
|
400
|
+
chunk_number += 1
|
|
401
|
+
yield chunk
|
|
402
|
+
|
|
403
|
+
async with self._lock:
|
|
404
|
+
# Mark end of stream with chunk_number = -1
|
|
379
405
|
connection = await self._ensure_connection()
|
|
380
406
|
cursor = await connection.cursor()
|
|
381
407
|
await cursor.execute(
|
|
382
408
|
"INSERT INTO streams (entry_id, chunk_number, chunk_data) VALUES (?, ?, ?)",
|
|
383
|
-
(entry_id,
|
|
409
|
+
(entry_id, self._COMPLETE_CHUNK_NUMBER, b""),
|
|
384
410
|
)
|
|
385
411
|
await connection.commit()
|
|
386
|
-
chunk_number += 1
|
|
387
|
-
yield chunk
|
|
388
|
-
|
|
389
|
-
# Mark end of stream with chunk_number = -1
|
|
390
|
-
connection = await self._ensure_connection()
|
|
391
|
-
cursor = await connection.cursor()
|
|
392
|
-
await cursor.execute(
|
|
393
|
-
"INSERT INTO streams (entry_id, chunk_number, chunk_data) VALUES (?, ?, ?)",
|
|
394
|
-
(entry_id, self._COMPLETE_CHUNK_NUMBER, b""),
|
|
395
|
-
)
|
|
396
|
-
await connection.commit()
|
|
397
412
|
|
|
398
413
|
async def _stream_data_from_cache(
|
|
399
414
|
self,
|
|
@@ -404,23 +419,25 @@ try:
|
|
|
404
419
|
"""
|
|
405
420
|
chunk_number = 0
|
|
406
421
|
|
|
407
|
-
connection = await self._ensure_connection()
|
|
408
422
|
while True:
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
(
|
|
413
|
-
|
|
414
|
-
|
|
423
|
+
async with self._lock:
|
|
424
|
+
connection = await self._ensure_connection()
|
|
425
|
+
cursor = await connection.cursor()
|
|
426
|
+
await cursor.execute(
|
|
427
|
+
"SELECT chunk_data FROM streams WHERE entry_id = ? AND chunk_number = ?",
|
|
428
|
+
(entry_id, chunk_number),
|
|
429
|
+
)
|
|
430
|
+
result = await cursor.fetchone()
|
|
431
|
+
|
|
432
|
+
if result is None:
|
|
433
|
+
break
|
|
434
|
+
chunk = result[0]
|
|
435
|
+
# chunk_number = -1 is the completion marker with empty data
|
|
436
|
+
if chunk == b"":
|
|
437
|
+
break
|
|
438
|
+
yield chunk
|
|
439
|
+
chunk_number += 1
|
|
415
440
|
|
|
416
|
-
if result is None:
|
|
417
|
-
break
|
|
418
|
-
chunk = result[0]
|
|
419
|
-
# chunk_number = -1 is the completion marker with empty data
|
|
420
|
-
if chunk == b"":
|
|
421
|
-
break
|
|
422
|
-
yield chunk
|
|
423
|
-
chunk_number += 1
|
|
424
441
|
except ImportError:
|
|
425
442
|
|
|
426
443
|
class AsyncSqliteStorage: # type: ignore[no-redef]
|
|
@@ -35,6 +35,7 @@ BATCH_CLEANUP_CHUNK_SIZE = 200
|
|
|
35
35
|
|
|
36
36
|
try:
|
|
37
37
|
import sqlite3
|
|
38
|
+
from threading import RLock
|
|
38
39
|
|
|
39
40
|
class SyncSqliteStorage(SyncBaseStorage):
|
|
40
41
|
_COMPLETE_CHUNK_NUMBER = -1
|
|
@@ -43,27 +44,32 @@ try:
|
|
|
43
44
|
self,
|
|
44
45
|
*,
|
|
45
46
|
connection: Optional[sqlite3.Connection] = None,
|
|
46
|
-
database_path: str = "hishel_cache.db",
|
|
47
|
+
database_path: Union[str, Path] = "hishel_cache.db",
|
|
47
48
|
default_ttl: Optional[float] = None,
|
|
48
49
|
refresh_ttl_on_access: bool = True,
|
|
49
50
|
) -> None:
|
|
50
|
-
db_path = Path(database_path)
|
|
51
|
-
|
|
52
51
|
self.connection = connection
|
|
53
|
-
self.database_path = (
|
|
54
|
-
ensure_cache_dict(db_path.parent if db_path.parent != Path(".") else None) / db_path.name
|
|
55
|
-
)
|
|
52
|
+
self.database_path: Path = database_path if isinstance(database_path, Path) else Path(database_path)
|
|
56
53
|
self.default_ttl = default_ttl
|
|
57
54
|
self.refresh_ttl_on_access = refresh_ttl_on_access
|
|
58
55
|
self.last_cleanup = time.time() - BATCH_CLEANUP_INTERVAL + BATCH_CLEANUP_START_DELAY
|
|
59
56
|
# When this storage instance was created. Used to delay the first cleanup.
|
|
60
57
|
self._start_time = time.time()
|
|
61
58
|
self._initialized = False
|
|
59
|
+
self._lock = RLock()
|
|
62
60
|
|
|
63
61
|
def _ensure_connection(self) -> sqlite3.Connection:
|
|
64
|
-
"""
|
|
62
|
+
"""
|
|
63
|
+
Ensure connection is established and database is initialized.
|
|
64
|
+
|
|
65
|
+
Note: This method assumes the caller has already acquired the lock.
|
|
66
|
+
"""
|
|
67
|
+
|
|
65
68
|
if self.connection is None:
|
|
66
|
-
|
|
69
|
+
# Create cache directory and resolve full path on first connection
|
|
70
|
+
parent = self.database_path.parent if self.database_path.parent != Path(".") else None
|
|
71
|
+
full_path = ensure_cache_dict(parent) / self.database_path.name
|
|
72
|
+
self.connection = sqlite3.connect(str(full_path))
|
|
67
73
|
if not self._initialized:
|
|
68
74
|
self._initialize_database()
|
|
69
75
|
self._initialized = True
|
|
@@ -107,151 +113,156 @@ try:
|
|
|
107
113
|
) -> Entry:
|
|
108
114
|
key_bytes = key.encode("utf-8")
|
|
109
115
|
|
|
110
|
-
|
|
111
|
-
|
|
116
|
+
with self._lock:
|
|
117
|
+
connection = self._ensure_connection()
|
|
118
|
+
cursor = connection.cursor()
|
|
112
119
|
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
120
|
+
# Create a new entry directly with both request and response
|
|
121
|
+
pair_id = id_ if id_ is not None else uuid.uuid4()
|
|
122
|
+
pair_meta = EntryMeta(
|
|
123
|
+
created_at=time.time(),
|
|
124
|
+
)
|
|
118
125
|
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
126
|
+
assert isinstance(response.stream, (Iterator, Iterable))
|
|
127
|
+
response_with_stream = replace(
|
|
128
|
+
response,
|
|
129
|
+
stream=self._save_stream_unlocked(response.stream, pair_id.bytes),
|
|
130
|
+
)
|
|
124
131
|
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
+
complete_entry = Entry(
|
|
133
|
+
id=pair_id,
|
|
134
|
+
request=request,
|
|
135
|
+
response=response_with_stream,
|
|
136
|
+
meta=pair_meta,
|
|
137
|
+
cache_key=key_bytes,
|
|
138
|
+
)
|
|
132
139
|
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
140
|
+
# Insert the complete entry into the database
|
|
141
|
+
cursor.execute(
|
|
142
|
+
"INSERT INTO entries (id, cache_key, data, created_at, deleted_at) VALUES (?, ?, ?, ?, ?)",
|
|
143
|
+
(pair_id.bytes, key_bytes, pack(complete_entry, kind="pair"), pair_meta.created_at, None),
|
|
144
|
+
)
|
|
145
|
+
connection.commit()
|
|
139
146
|
|
|
140
|
-
|
|
147
|
+
return complete_entry
|
|
141
148
|
|
|
142
149
|
def get_entries(self, key: str) -> List[Entry]:
|
|
143
150
|
final_pairs: List[Entry] = []
|
|
144
151
|
|
|
145
152
|
now = time.time()
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
153
|
+
with self._lock:
|
|
154
|
+
if now - self.last_cleanup >= BATCH_CLEANUP_INTERVAL:
|
|
155
|
+
try:
|
|
156
|
+
self._batch_cleanup()
|
|
157
|
+
except Exception:
|
|
158
|
+
# don't let cleanup prevent reads; failures are non-fatal
|
|
159
|
+
pass
|
|
152
160
|
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
161
|
+
connection = self._ensure_connection()
|
|
162
|
+
cursor = connection.cursor()
|
|
163
|
+
# Query entries directly by cache_key
|
|
164
|
+
cursor.execute(
|
|
165
|
+
"SELECT id, data FROM entries WHERE cache_key = ?",
|
|
166
|
+
(key.encode("utf-8"),),
|
|
167
|
+
)
|
|
160
168
|
|
|
161
|
-
|
|
162
|
-
|
|
169
|
+
for row in cursor.fetchall():
|
|
170
|
+
pair_data = unpack(row[1], kind="pair")
|
|
163
171
|
|
|
164
|
-
|
|
165
|
-
|
|
172
|
+
if pair_data is None:
|
|
173
|
+
continue
|
|
166
174
|
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
175
|
+
# Skip entries without a response (incomplete)
|
|
176
|
+
if not self._is_stream_complete(pair_data.id, cursor=cursor):
|
|
177
|
+
continue
|
|
178
|
+
|
|
179
|
+
# Skip expired entries
|
|
180
|
+
if self._is_pair_expired(pair_data, cursor=cursor):
|
|
181
|
+
continue
|
|
182
|
+
|
|
183
|
+
# Skip soft-deleted entries
|
|
184
|
+
if self.is_soft_deleted(pair_data):
|
|
185
|
+
continue
|
|
186
|
+
|
|
187
|
+
final_pairs.append(pair_data)
|
|
188
|
+
|
|
189
|
+
pairs_with_streams: List[Entry] = []
|
|
190
|
+
|
|
191
|
+
# Only restore response streams from cache
|
|
192
|
+
for pair in final_pairs:
|
|
193
|
+
pairs_with_streams.append(
|
|
194
|
+
replace(
|
|
195
|
+
pair,
|
|
196
|
+
response=replace(
|
|
197
|
+
pair.response,
|
|
198
|
+
stream=self._stream_data_from_cache(pair.id.bytes),
|
|
199
|
+
),
|
|
200
|
+
)
|
|
192
201
|
)
|
|
193
|
-
|
|
194
|
-
return pairs_with_streams
|
|
202
|
+
return pairs_with_streams
|
|
195
203
|
|
|
196
204
|
def update_entry(
|
|
197
205
|
self,
|
|
198
206
|
id: uuid.UUID,
|
|
199
207
|
new_pair: Union[Entry, Callable[[Entry], Entry]],
|
|
200
208
|
) -> Optional[Entry]:
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
if result is None:
|
|
207
|
-
return None
|
|
209
|
+
with self._lock:
|
|
210
|
+
connection = self._ensure_connection()
|
|
211
|
+
cursor = connection.cursor()
|
|
212
|
+
cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
|
|
213
|
+
result = cursor.fetchone()
|
|
208
214
|
|
|
209
|
-
|
|
215
|
+
if result is None:
|
|
216
|
+
return None
|
|
210
217
|
|
|
211
|
-
|
|
212
|
-
if not isinstance(pair, Entry) or pair.response is None:
|
|
213
|
-
return None
|
|
218
|
+
pair = unpack(result[0], kind="pair")
|
|
214
219
|
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
complete_pair = new_pair(pair)
|
|
220
|
+
# Skip entries without a response (incomplete)
|
|
221
|
+
if not isinstance(pair, Entry) or pair.response is None:
|
|
222
|
+
return None
|
|
219
223
|
|
|
220
|
-
|
|
221
|
-
|
|
224
|
+
if isinstance(new_pair, Entry):
|
|
225
|
+
complete_pair = new_pair
|
|
226
|
+
else:
|
|
227
|
+
complete_pair = new_pair(pair)
|
|
222
228
|
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
(pack(complete_pair, kind="pair"), id.bytes),
|
|
226
|
-
)
|
|
229
|
+
if pair.id != complete_pair.id:
|
|
230
|
+
raise ValueError("Pair ID mismatch")
|
|
227
231
|
|
|
228
|
-
if pair.cache_key != complete_pair.cache_key:
|
|
229
232
|
cursor.execute(
|
|
230
|
-
"UPDATE entries SET
|
|
231
|
-
(complete_pair
|
|
233
|
+
"UPDATE entries SET data = ? WHERE id = ?",
|
|
234
|
+
(pack(complete_pair, kind="pair"), id.bytes),
|
|
232
235
|
)
|
|
233
236
|
|
|
234
|
-
|
|
237
|
+
if pair.cache_key != complete_pair.cache_key:
|
|
238
|
+
cursor.execute(
|
|
239
|
+
"UPDATE entries SET cache_key = ? WHERE id = ?",
|
|
240
|
+
(complete_pair.cache_key, complete_pair.id.bytes),
|
|
241
|
+
)
|
|
235
242
|
|
|
236
|
-
|
|
243
|
+
connection.commit()
|
|
244
|
+
|
|
245
|
+
return complete_pair
|
|
237
246
|
|
|
238
247
|
def remove_entry(self, id: uuid.UUID) -> None:
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
248
|
+
with self._lock:
|
|
249
|
+
connection = self._ensure_connection()
|
|
250
|
+
cursor = connection.cursor()
|
|
251
|
+
cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
|
|
252
|
+
result = cursor.fetchone()
|
|
243
253
|
|
|
244
|
-
|
|
245
|
-
|
|
254
|
+
if result is None:
|
|
255
|
+
return None
|
|
246
256
|
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
257
|
+
pair = unpack(result[0], kind="pair")
|
|
258
|
+
self._soft_delete_pair(pair, cursor)
|
|
259
|
+
connection.commit()
|
|
250
260
|
|
|
251
261
|
def close(self) -> None:
|
|
252
|
-
|
|
253
|
-
self.connection
|
|
254
|
-
|
|
262
|
+
with self._lock:
|
|
263
|
+
if self.connection is not None:
|
|
264
|
+
self.connection.close()
|
|
265
|
+
self.connection = None
|
|
255
266
|
|
|
256
267
|
def _is_stream_complete(self, pair_id: uuid.UUID, cursor: sqlite3.Cursor) -> bool:
|
|
257
268
|
# Check if there's a completion marker (chunk_number = -1) for response stream
|
|
@@ -364,36 +375,40 @@ try:
|
|
|
364
375
|
"""
|
|
365
376
|
cursor.execute("DELETE FROM streams WHERE entry_id = ?", (entry_id,))
|
|
366
377
|
|
|
367
|
-
def
|
|
378
|
+
def _save_stream_unlocked(
|
|
368
379
|
self,
|
|
369
380
|
stream: Iterator[bytes],
|
|
370
381
|
entry_id: bytes,
|
|
371
382
|
) -> Iterator[bytes]:
|
|
372
383
|
"""
|
|
373
384
|
Wrapper around an async iterator that also saves the response data to the cache in chunks.
|
|
385
|
+
|
|
386
|
+
Note: This method assumes the caller has already acquired the lock.
|
|
374
387
|
"""
|
|
375
388
|
chunk_number = 0
|
|
376
389
|
content_length = 0
|
|
377
390
|
for chunk in stream:
|
|
378
391
|
content_length += len(chunk)
|
|
392
|
+
with self._lock:
|
|
393
|
+
connection = self._ensure_connection()
|
|
394
|
+
cursor = connection.cursor()
|
|
395
|
+
cursor.execute(
|
|
396
|
+
"INSERT INTO streams (entry_id, chunk_number, chunk_data) VALUES (?, ?, ?)",
|
|
397
|
+
(entry_id, chunk_number, chunk),
|
|
398
|
+
)
|
|
399
|
+
connection.commit()
|
|
400
|
+
chunk_number += 1
|
|
401
|
+
yield chunk
|
|
402
|
+
|
|
403
|
+
with self._lock:
|
|
404
|
+
# Mark end of stream with chunk_number = -1
|
|
379
405
|
connection = self._ensure_connection()
|
|
380
406
|
cursor = connection.cursor()
|
|
381
407
|
cursor.execute(
|
|
382
408
|
"INSERT INTO streams (entry_id, chunk_number, chunk_data) VALUES (?, ?, ?)",
|
|
383
|
-
(entry_id,
|
|
409
|
+
(entry_id, self._COMPLETE_CHUNK_NUMBER, b""),
|
|
384
410
|
)
|
|
385
411
|
connection.commit()
|
|
386
|
-
chunk_number += 1
|
|
387
|
-
yield chunk
|
|
388
|
-
|
|
389
|
-
# Mark end of stream with chunk_number = -1
|
|
390
|
-
connection = self._ensure_connection()
|
|
391
|
-
cursor = connection.cursor()
|
|
392
|
-
cursor.execute(
|
|
393
|
-
"INSERT INTO streams (entry_id, chunk_number, chunk_data) VALUES (?, ?, ?)",
|
|
394
|
-
(entry_id, self._COMPLETE_CHUNK_NUMBER, b""),
|
|
395
|
-
)
|
|
396
|
-
connection.commit()
|
|
397
412
|
|
|
398
413
|
def _stream_data_from_cache(
|
|
399
414
|
self,
|
|
@@ -404,23 +419,25 @@ try:
|
|
|
404
419
|
"""
|
|
405
420
|
chunk_number = 0
|
|
406
421
|
|
|
407
|
-
connection = self._ensure_connection()
|
|
408
422
|
while True:
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
(
|
|
413
|
-
|
|
414
|
-
|
|
423
|
+
with self._lock:
|
|
424
|
+
connection = self._ensure_connection()
|
|
425
|
+
cursor = connection.cursor()
|
|
426
|
+
cursor.execute(
|
|
427
|
+
"SELECT chunk_data FROM streams WHERE entry_id = ? AND chunk_number = ?",
|
|
428
|
+
(entry_id, chunk_number),
|
|
429
|
+
)
|
|
430
|
+
result = cursor.fetchone()
|
|
431
|
+
|
|
432
|
+
if result is None:
|
|
433
|
+
break
|
|
434
|
+
chunk = result[0]
|
|
435
|
+
# chunk_number = -1 is the completion marker with empty data
|
|
436
|
+
if chunk == b"":
|
|
437
|
+
break
|
|
438
|
+
yield chunk
|
|
439
|
+
chunk_number += 1
|
|
415
440
|
|
|
416
|
-
if result is None:
|
|
417
|
-
break
|
|
418
|
-
chunk = result[0]
|
|
419
|
-
# chunk_number = -1 is the completion marker with empty data
|
|
420
|
-
if chunk == b"":
|
|
421
|
-
break
|
|
422
|
-
yield chunk
|
|
423
|
-
chunk_number += 1
|
|
424
441
|
except ImportError:
|
|
425
442
|
|
|
426
443
|
class SyncSqliteStorage: # type: ignore[no-redef]
|
hishel/_sync_cache.py
CHANGED
|
@@ -197,12 +197,12 @@ class SyncCacheProxy:
|
|
|
197
197
|
return state.next(revalidation_response)
|
|
198
198
|
|
|
199
199
|
def _handle_update(self, state: NeedToBeUpdated) -> AnyState:
|
|
200
|
-
for
|
|
200
|
+
for updating_entry in state.updating_entries:
|
|
201
201
|
self.storage.update_entry(
|
|
202
|
-
|
|
203
|
-
lambda
|
|
204
|
-
|
|
205
|
-
response=replace(
|
|
202
|
+
updating_entry.id,
|
|
203
|
+
lambda existing_entry: replace(
|
|
204
|
+
existing_entry,
|
|
205
|
+
response=replace(existing_entry.response, headers=updating_entry.response.headers),
|
|
206
206
|
),
|
|
207
207
|
)
|
|
208
208
|
return state.next()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: hishel
|
|
3
|
-
Version: 1.1.
|
|
3
|
+
Version: 1.1.8
|
|
4
4
|
Summary: Elegant HTTP Caching for Python
|
|
5
5
|
Project-URL: Homepage, https://hishel.com
|
|
6
6
|
Project-URL: Source, https://github.com/karpetrosyan/hishel
|
|
@@ -406,6 +406,45 @@ Hishel is inspired by and builds upon the excellent work in the Python HTTP ecos
|
|
|
406
406
|
<strong>Made with ❤️ by <a href="https://github.com/karpetrosyan">Kar Petrosyan</a></strong>
|
|
407
407
|
</p>
|
|
408
408
|
|
|
409
|
+
## What's Changed in 1.1.8
|
|
410
|
+
### ⚙️ Miscellaneous Tasks
|
|
411
|
+
|
|
412
|
+
* chore(ci): remove redis action by @karpetrosyan in [#428](https://github.com/karpetrosyan/hishel/pull/428)
|
|
413
|
+
### 🐛 Bug Fixes
|
|
414
|
+
|
|
415
|
+
* fix: prevent race conditions by @karpetrosyan in [#436](https://github.com/karpetrosyan/hishel/pull/436)
|
|
416
|
+
|
|
417
|
+
### Contributors
|
|
418
|
+
* @karpetrosyan
|
|
419
|
+
|
|
420
|
+
**Full Changelog**: https://github.com/karpetrosyan/hishel/compare/1.1.7...1.1.8
|
|
421
|
+
|
|
422
|
+
## What's Changed in 1.1.7
|
|
423
|
+
### ♻️ Refactoring
|
|
424
|
+
|
|
425
|
+
* refactor(storage): create sqlite database path only when creating connections by @jeefberkey in [#426](https://github.com/karpetrosyan/hishel/pull/426)
|
|
426
|
+
### ⚙️ Miscellaneous Tasks
|
|
427
|
+
|
|
428
|
+
* chore(deps-dev): bump the python-packages group with 5 updates by @dependabot[bot] in [#424](https://github.com/karpetrosyan/hishel/pull/424)
|
|
429
|
+
### 🐛 Bug Fixes
|
|
430
|
+
|
|
431
|
+
* fix(cache): Lambda parameter name clashes the loop variable being closed over by @dump247 in [#427](https://github.com/karpetrosyan/hishel/pull/427)
|
|
432
|
+
### 📚 Documentation
|
|
433
|
+
|
|
434
|
+
* add release process guidelines for maintainers by @karpetrosyan
|
|
435
|
+
### 🚀 Features
|
|
436
|
+
|
|
437
|
+
* Feature/accept pathlib path in SqliteStorage by @daudef in [#419](https://github.com/karpetrosyan/hishel/pull/419)
|
|
438
|
+
|
|
439
|
+
### Contributors
|
|
440
|
+
* @karpetrosyan
|
|
441
|
+
* @daudef
|
|
442
|
+
* @dependabot[bot]
|
|
443
|
+
* @jeefberkey
|
|
444
|
+
* @dump247
|
|
445
|
+
|
|
446
|
+
**Full Changelog**: https://github.com/karpetrosyan/hishel/compare/1.1.6...1.1.7
|
|
447
|
+
|
|
409
448
|
## What's Changed in 1.1.6
|
|
410
449
|
### 📚 Documentation
|
|
411
450
|
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
hishel/__init__.py,sha256=1EdAEXWx41gmxUzG1Fchd_B4gQDtqlxlqQw0WkCBaUE,1826
|
|
2
|
-
hishel/_async_cache.py,sha256=
|
|
2
|
+
hishel/_async_cache.py,sha256=QprSuucR6OXYWNVS9lzM1jHjUEC33AGq8zpUT27Cngs,8839
|
|
3
3
|
hishel/_async_httpx.py,sha256=89i92f2SlvgWrav_TDNU1iUzMxdR607apauxXA3pE3U,8127
|
|
4
4
|
hishel/_policies.py,sha256=1ae_rmDF7oaG91-lQyOGVaTrRX8uI2GImmu5gN6WJa4,1135
|
|
5
|
-
hishel/_sync_cache.py,sha256=
|
|
5
|
+
hishel/_sync_cache.py,sha256=afM1MfBlT3kitBJX-YmDEJ4i6Kc1d0A4PcrvCL6LlPI,8564
|
|
6
6
|
hishel/_sync_httpx.py,sha256=z1pwVUQfRf72Q48PXXZ4FKwXGevll0X5iHcVRANiP38,7952
|
|
7
7
|
hishel/_utils.py,sha256=kR7RnhFqLzFRmB-YNnZteQVP0iDPUouCscA0_FHHFls,3837
|
|
8
8
|
hishel/asgi.py,sha256=ocXzqrrYGazeJxlKFcz1waoKvKGOqJ7YBEAmly4Towk,14998
|
|
@@ -14,11 +14,11 @@ hishel/_core/_headers.py,sha256=hGaT6o1F-gs1pm5RpdGb0IMQL3uJYDH1xpwJLy28Cys,1751
|
|
|
14
14
|
hishel/_core/_spec.py,sha256=26mrK0MFSN_03ZecKem0asHYCXqzJ0tmcVmJXG7VHeI,105016
|
|
15
15
|
hishel/_core/models.py,sha256=EabP2qnjYVzhPWhQer3QFmdDE6TDbqEBEqPHzv25VnA,7978
|
|
16
16
|
hishel/_core/_storages/_async_base.py,sha256=iZ6Mb30P0ho5h4UU5bgOrcsSMZ1427j9tht-tupZs68,2106
|
|
17
|
-
hishel/_core/_storages/_async_sqlite.py,sha256=
|
|
17
|
+
hishel/_core/_storages/_async_sqlite.py,sha256=IAWAqTDyIMGiMy6qAySzPU_wMcNfqV76BJQTRTX65TI,17200
|
|
18
18
|
hishel/_core/_storages/_packing.py,sha256=mC8LMFQ5uPfFOgingKm2WKFO_DwcZ1OjTgI6xc0hfJI,3708
|
|
19
19
|
hishel/_core/_storages/_sync_base.py,sha256=qfOvcFY5qvrzSh4ztV2Trlxft-BF7An5SFsLlEb8EeE,2075
|
|
20
|
-
hishel/_core/_storages/_sync_sqlite.py,sha256=
|
|
21
|
-
hishel-1.1.
|
|
22
|
-
hishel-1.1.
|
|
23
|
-
hishel-1.1.
|
|
24
|
-
hishel-1.1.
|
|
20
|
+
hishel/_core/_storages/_sync_sqlite.py,sha256=VJRlNaZjAxg1YOdLajZNG404qykyDHEp4cfsVy5HBzw,16627
|
|
21
|
+
hishel-1.1.8.dist-info/METADATA,sha256=stkghsijhi2i55XUAPh9IuLeWFw2t9ptsMn1ByDXes0,22164
|
|
22
|
+
hishel-1.1.8.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
23
|
+
hishel-1.1.8.dist-info/licenses/LICENSE,sha256=1qQj7pE0V2O9OIedvyOgLGLvZLaPd3nFEup3IBEOZjQ,1493
|
|
24
|
+
hishel-1.1.8.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|