hishel 1.0.0.dev1__py3-none-any.whl → 1.0.0.dev3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hishel/__init__.py +15 -14
- hishel/_async_cache.py +50 -37
- hishel/_async_httpx.py +243 -0
- hishel/_core/_headers.py +11 -1
- hishel/_core/_spec.py +184 -127
- hishel/_core/_storages/_async_base.py +71 -0
- hishel/_core/{_async/_storages/_sqlite.py → _storages/_async_sqlite.py} +95 -132
- hishel/_core/_storages/_packing.py +144 -0
- hishel/_core/_storages/_sync_base.py +71 -0
- hishel/_core/{_sync/_storages/_sqlite.py → _storages/_sync_sqlite.py} +95 -132
- hishel/_core/models.py +13 -26
- hishel/_sync_cache.py +50 -37
- hishel/_sync_httpx.py +243 -0
- hishel/_utils.py +48 -137
- hishel/asgi.py +400 -0
- hishel/fastapi.py +263 -0
- hishel/httpx.py +3 -326
- hishel/requests.py +25 -17
- {hishel-1.0.0.dev1.dist-info → hishel-1.0.0.dev3.dist-info}/METADATA +139 -27
- hishel-1.0.0.dev3.dist-info/RECORD +23 -0
- hishel/_core/__init__.py +0 -59
- hishel/_core/_base/_storages/_base.py +0 -272
- hishel/_core/_base/_storages/_packing.py +0 -165
- hishel-1.0.0.dev1.dist-info/RECORD +0 -19
- {hishel-1.0.0.dev1.dist-info → hishel-1.0.0.dev3.dist-info}/WHEEL +0 -0
- {hishel-1.0.0.dev1.dist-info → hishel-1.0.0.dev3.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import abc
|
|
4
|
+
import time
|
|
5
|
+
import typing as tp
|
|
6
|
+
import uuid
|
|
7
|
+
|
|
8
|
+
from ..models import Entry, Request, Response
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class AsyncBaseStorage(abc.ABC):
|
|
12
|
+
@abc.abstractmethod
|
|
13
|
+
async def create_entry(self, request: Request, response: Response, key: str, id_: uuid.UUID | None = None) -> Entry:
|
|
14
|
+
raise NotImplementedError()
|
|
15
|
+
|
|
16
|
+
@abc.abstractmethod
|
|
17
|
+
async def get_entries(self, key: str) -> tp.List[Entry]:
|
|
18
|
+
raise NotImplementedError()
|
|
19
|
+
|
|
20
|
+
@abc.abstractmethod
|
|
21
|
+
async def update_entry(
|
|
22
|
+
self,
|
|
23
|
+
id: uuid.UUID,
|
|
24
|
+
new_entry: tp.Union[Entry, tp.Callable[[Entry], Entry]],
|
|
25
|
+
) -> tp.Optional[Entry]:
|
|
26
|
+
raise NotImplementedError()
|
|
27
|
+
|
|
28
|
+
@abc.abstractmethod
|
|
29
|
+
async def remove_entry(self, id: uuid.UUID) -> None:
|
|
30
|
+
raise NotImplementedError()
|
|
31
|
+
|
|
32
|
+
async def close(self) -> None:
|
|
33
|
+
pass
|
|
34
|
+
|
|
35
|
+
def is_soft_deleted(self, pair: Entry) -> bool:
|
|
36
|
+
"""
|
|
37
|
+
Check if a pair is soft deleted based on its metadata.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
pair: The request pair to check.
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
True if the pair is soft deleted, False otherwise.
|
|
44
|
+
"""
|
|
45
|
+
return pair.meta.deleted_at is not None and pair.meta.deleted_at > 0
|
|
46
|
+
|
|
47
|
+
def is_safe_to_hard_delete(self, pair: Entry) -> bool:
|
|
48
|
+
"""
|
|
49
|
+
Check if a pair is safe to hard delete based on its metadata.
|
|
50
|
+
|
|
51
|
+
If the pair has been soft deleted for more than 1 hour, it is considered safe to hard delete.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
pair: The request pair to check.
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
True if the pair is safe to hard delete, False otherwise.
|
|
58
|
+
"""
|
|
59
|
+
return bool(pair.meta.deleted_at is not None and (pair.meta.deleted_at + 3600 < time.time()))
|
|
60
|
+
|
|
61
|
+
def mark_pair_as_deleted(self, pair: Entry) -> Entry:
|
|
62
|
+
"""
|
|
63
|
+
Mark a pair as soft deleted by setting its deleted_at timestamp.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
pair: The request pair to mark as deleted.
|
|
67
|
+
Returns:
|
|
68
|
+
The updated request pair with the deleted_at timestamp set.
|
|
69
|
+
"""
|
|
70
|
+
pair.meta.deleted_at = time.time()
|
|
71
|
+
return pair
|
|
@@ -9,21 +9,19 @@ from typing import (
|
|
|
9
9
|
AsyncIterator,
|
|
10
10
|
Callable,
|
|
11
11
|
List,
|
|
12
|
-
Literal,
|
|
13
12
|
Optional,
|
|
14
13
|
Union,
|
|
15
14
|
)
|
|
16
15
|
|
|
17
|
-
from hishel._core.
|
|
18
|
-
from hishel._core.
|
|
16
|
+
from hishel._core._storages._async_base import AsyncBaseStorage
|
|
17
|
+
from hishel._core._storages._packing import pack, unpack
|
|
19
18
|
from hishel._core.models import (
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
Pair,
|
|
23
|
-
PairMeta,
|
|
19
|
+
Entry,
|
|
20
|
+
EntryMeta,
|
|
24
21
|
Request,
|
|
25
22
|
Response,
|
|
26
23
|
)
|
|
24
|
+
from hishel._utils import ensure_cache_dict
|
|
27
25
|
|
|
28
26
|
# Batch cleanup configuration
|
|
29
27
|
# How often to run cleanup (seconds). Default: 1 hour.
|
|
@@ -38,7 +36,6 @@ try:
|
|
|
38
36
|
import anysqlite
|
|
39
37
|
|
|
40
38
|
class AsyncSqliteStorage(AsyncBaseStorage):
|
|
41
|
-
_STREAM_KIND = {"request": 0, "response": 1}
|
|
42
39
|
_COMPLETE_CHUNK_NUMBER = -1
|
|
43
40
|
|
|
44
41
|
def __init__(
|
|
@@ -85,14 +82,13 @@ try:
|
|
|
85
82
|
)
|
|
86
83
|
""")
|
|
87
84
|
|
|
88
|
-
# Table for storing stream chunks
|
|
85
|
+
# Table for storing response stream chunks only
|
|
89
86
|
await cursor.execute("""
|
|
90
87
|
CREATE TABLE IF NOT EXISTS streams (
|
|
91
88
|
entry_id BLOB NOT NULL,
|
|
92
|
-
kind INTEGER NOT NULL,
|
|
93
89
|
chunk_number INTEGER NOT NULL,
|
|
94
90
|
chunk_data BLOB NOT NULL,
|
|
95
|
-
PRIMARY KEY (entry_id,
|
|
91
|
+
PRIMARY KEY (entry_id, chunk_number),
|
|
96
92
|
FOREIGN KEY (entry_id) REFERENCES entries(id) ON DELETE CASCADE
|
|
97
93
|
)
|
|
98
94
|
""")
|
|
@@ -100,85 +96,48 @@ try:
|
|
|
100
96
|
# Indexes for performance
|
|
101
97
|
await cursor.execute("CREATE INDEX IF NOT EXISTS idx_entries_deleted_at ON entries(deleted_at)")
|
|
102
98
|
await cursor.execute("CREATE INDEX IF NOT EXISTS idx_entries_cache_key ON entries(cache_key)")
|
|
103
|
-
# Note: PRIMARY KEY (entry_id, kind, chunk_number) already provides an index
|
|
104
|
-
# for queries like: entry_id = ? AND kind = ? AND chunk_number = ?
|
|
105
99
|
|
|
106
100
|
await self.connection.commit()
|
|
107
101
|
|
|
108
|
-
async def
|
|
109
|
-
self,
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
) -> IncompletePair:
|
|
113
|
-
pair_id = id if id is not None else uuid.uuid4()
|
|
114
|
-
pair_meta = PairMeta(
|
|
115
|
-
created_at=time.time(),
|
|
116
|
-
)
|
|
117
|
-
|
|
118
|
-
pair = IncompletePair(id=pair_id, request=request, meta=pair_meta)
|
|
119
|
-
|
|
120
|
-
packed_pair = pack(pair, kind="pair")
|
|
102
|
+
async def create_entry(
|
|
103
|
+
self, request: Request, response: Response, key: str, id_: uuid.UUID | None = None
|
|
104
|
+
) -> Entry:
|
|
105
|
+
key_bytes = key.encode("utf-8")
|
|
121
106
|
|
|
122
107
|
connection = await self._ensure_connection()
|
|
123
108
|
cursor = await connection.cursor()
|
|
124
|
-
await cursor.execute(
|
|
125
|
-
"INSERT INTO entries (id, cache_key, data, created_at, deleted_at) VALUES (?, ?, ?, ?, ?)",
|
|
126
|
-
(pair_id.bytes, None, packed_pair, pair_meta.created_at, None),
|
|
127
|
-
)
|
|
128
|
-
await connection.commit()
|
|
129
|
-
|
|
130
|
-
assert isinstance(request.stream, AsyncIterable), "Request stream must be an AsyncIterable, not Iterable"
|
|
131
109
|
|
|
132
|
-
request
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
metadata=request.metadata,
|
|
137
|
-
stream=self._save_stream(request.stream, pair_id.bytes, "request"),
|
|
110
|
+
# Create a new entry directly with both request and response
|
|
111
|
+
pair_id = id_ if id_ is not None else uuid.uuid4()
|
|
112
|
+
pair_meta = EntryMeta(
|
|
113
|
+
created_at=time.time(),
|
|
138
114
|
)
|
|
139
115
|
|
|
140
|
-
return replace(pair, request=request)
|
|
141
|
-
|
|
142
|
-
async def add_response(
|
|
143
|
-
self,
|
|
144
|
-
pair_id: uuid.UUID,
|
|
145
|
-
response: Response,
|
|
146
|
-
key: str | bytes,
|
|
147
|
-
) -> CompletePair:
|
|
148
|
-
if isinstance(key, str):
|
|
149
|
-
key = key.encode("utf-8")
|
|
150
|
-
|
|
151
|
-
connection = await self._ensure_connection()
|
|
152
|
-
cursor = await connection.cursor()
|
|
153
|
-
|
|
154
|
-
# Get the existing pair
|
|
155
|
-
await cursor.execute("SELECT data FROM entries WHERE id = ?", (pair_id.bytes,))
|
|
156
|
-
result = await cursor.fetchone()
|
|
157
|
-
|
|
158
|
-
if result is None:
|
|
159
|
-
raise ValueError(f"Entry with ID {pair_id} not found.")
|
|
160
|
-
|
|
161
|
-
pair = unpack(result[0], kind="pair")
|
|
162
|
-
|
|
163
116
|
assert isinstance(response.stream, (AsyncIterator, AsyncIterable))
|
|
164
|
-
|
|
117
|
+
response_with_stream = replace(
|
|
118
|
+
response,
|
|
119
|
+
stream=self._save_stream(response.stream, pair_id.bytes),
|
|
120
|
+
)
|
|
165
121
|
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
122
|
+
complete_entry = Entry(
|
|
123
|
+
id=pair_id,
|
|
124
|
+
request=request,
|
|
125
|
+
response=response_with_stream,
|
|
126
|
+
meta=pair_meta,
|
|
127
|
+
cache_key=key_bytes,
|
|
169
128
|
)
|
|
170
129
|
|
|
171
|
-
#
|
|
130
|
+
# Insert the complete entry into the database
|
|
172
131
|
await cursor.execute(
|
|
173
|
-
"
|
|
174
|
-
(pack(
|
|
132
|
+
"INSERT INTO entries (id, cache_key, data, created_at, deleted_at) VALUES (?, ?, ?, ?, ?)",
|
|
133
|
+
(pair_id.bytes, key_bytes, pack(complete_entry, kind="pair"), pair_meta.created_at, None),
|
|
175
134
|
)
|
|
176
135
|
await connection.commit()
|
|
177
136
|
|
|
178
|
-
return
|
|
137
|
+
return complete_entry
|
|
179
138
|
|
|
180
|
-
async def
|
|
181
|
-
final_pairs: List[
|
|
139
|
+
async def get_entries(self, key: str) -> List[Entry]:
|
|
140
|
+
final_pairs: List[Entry] = []
|
|
182
141
|
|
|
183
142
|
now = time.time()
|
|
184
143
|
if now - self.last_cleanup >= BATCH_CLEANUP_INTERVAL:
|
|
@@ -191,39 +150,40 @@ try:
|
|
|
191
150
|
connection = await self._ensure_connection()
|
|
192
151
|
cursor = await connection.cursor()
|
|
193
152
|
# Query entries directly by cache_key
|
|
194
|
-
await cursor.execute(
|
|
153
|
+
await cursor.execute(
|
|
154
|
+
"SELECT id, data FROM entries WHERE cache_key = ?",
|
|
155
|
+
(key.encode("utf-8"),),
|
|
156
|
+
)
|
|
195
157
|
|
|
196
158
|
for row in await cursor.fetchall():
|
|
197
159
|
pair_data = unpack(row[1], kind="pair")
|
|
198
160
|
|
|
199
|
-
|
|
161
|
+
# Skip entries without a response (incomplete)
|
|
162
|
+
if not isinstance(pair_data, Entry) or pair_data.response is None:
|
|
200
163
|
continue
|
|
201
164
|
|
|
202
165
|
final_pairs.append(pair_data)
|
|
203
166
|
|
|
204
|
-
pairs_with_streams: List[
|
|
167
|
+
pairs_with_streams: List[Entry] = []
|
|
205
168
|
|
|
169
|
+
# Only restore response streams from cache
|
|
206
170
|
for pair in final_pairs:
|
|
207
171
|
pairs_with_streams.append(
|
|
208
172
|
replace(
|
|
209
173
|
pair,
|
|
210
174
|
response=replace(
|
|
211
175
|
pair.response,
|
|
212
|
-
stream=self._stream_data_from_cache(pair.id.bytes
|
|
213
|
-
),
|
|
214
|
-
request=replace(
|
|
215
|
-
pair.request,
|
|
216
|
-
stream=self._stream_data_from_cache(pair.id.bytes, "request"),
|
|
176
|
+
stream=self._stream_data_from_cache(pair.id.bytes),
|
|
217
177
|
),
|
|
218
178
|
)
|
|
219
179
|
)
|
|
220
180
|
return pairs_with_streams
|
|
221
181
|
|
|
222
|
-
async def
|
|
182
|
+
async def update_entry(
|
|
223
183
|
self,
|
|
224
184
|
id: uuid.UUID,
|
|
225
|
-
new_pair: Union[
|
|
226
|
-
) -> Optional[
|
|
185
|
+
new_pair: Union[Entry, Callable[[Entry], Entry]],
|
|
186
|
+
) -> Optional[Entry]:
|
|
227
187
|
connection = await self._ensure_connection()
|
|
228
188
|
cursor = await connection.cursor()
|
|
229
189
|
await cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
|
|
@@ -234,10 +194,11 @@ try:
|
|
|
234
194
|
|
|
235
195
|
pair = unpack(result[0], kind="pair")
|
|
236
196
|
|
|
237
|
-
|
|
197
|
+
# Skip entries without a response (incomplete)
|
|
198
|
+
if not isinstance(pair, Entry) or pair.response is None:
|
|
238
199
|
return None
|
|
239
200
|
|
|
240
|
-
if isinstance(new_pair,
|
|
201
|
+
if isinstance(new_pair, Entry):
|
|
241
202
|
complete_pair = new_pair
|
|
242
203
|
else:
|
|
243
204
|
complete_pair = new_pair(pair)
|
|
@@ -246,7 +207,8 @@ try:
|
|
|
246
207
|
raise ValueError("Pair ID mismatch")
|
|
247
208
|
|
|
248
209
|
await cursor.execute(
|
|
249
|
-
"UPDATE entries SET data = ? WHERE id = ?",
|
|
210
|
+
"UPDATE entries SET data = ? WHERE id = ?",
|
|
211
|
+
(pack(complete_pair, kind="pair"), id.bytes),
|
|
250
212
|
)
|
|
251
213
|
|
|
252
214
|
if pair.cache_key != complete_pair.cache_key:
|
|
@@ -259,7 +221,7 @@ try:
|
|
|
259
221
|
|
|
260
222
|
return complete_pair
|
|
261
223
|
|
|
262
|
-
async def
|
|
224
|
+
async def remove_entry(self, id: uuid.UUID) -> None:
|
|
263
225
|
connection = await self._ensure_connection()
|
|
264
226
|
cursor = await connection.cursor()
|
|
265
227
|
await cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
|
|
@@ -272,28 +234,33 @@ try:
|
|
|
272
234
|
await self._soft_delete_pair(pair, cursor)
|
|
273
235
|
await connection.commit()
|
|
274
236
|
|
|
275
|
-
async def _is_stream_complete(
|
|
276
|
-
|
|
277
|
-
) -> bool:
|
|
278
|
-
kind_id = self._STREAM_KIND[kind]
|
|
279
|
-
# Check if there's a completion marker (chunk_number = -1)
|
|
237
|
+
async def _is_stream_complete(self, pair_id: uuid.UUID, cursor: anysqlite.Cursor) -> bool:
|
|
238
|
+
# Check if there's a completion marker (chunk_number = -1) for response stream
|
|
280
239
|
await cursor.execute(
|
|
281
|
-
"SELECT 1 FROM streams WHERE entry_id = ? AND
|
|
282
|
-
(pair_id.bytes,
|
|
240
|
+
"SELECT 1 FROM streams WHERE entry_id = ? AND chunk_number = ? LIMIT 1",
|
|
241
|
+
(pair_id.bytes, self._COMPLETE_CHUNK_NUMBER),
|
|
283
242
|
)
|
|
284
243
|
return await cursor.fetchone() is not None
|
|
285
244
|
|
|
286
|
-
async def _soft_delete_pair(
|
|
245
|
+
async def _soft_delete_pair(
|
|
246
|
+
self,
|
|
247
|
+
pair: Entry,
|
|
248
|
+
cursor: anysqlite.Cursor,
|
|
249
|
+
) -> None:
|
|
287
250
|
"""
|
|
288
251
|
Mark the pair as deleted by setting the deleted_at timestamp.
|
|
289
252
|
"""
|
|
290
253
|
marked_pair = self.mark_pair_as_deleted(pair)
|
|
291
254
|
await cursor.execute(
|
|
292
255
|
"UPDATE entries SET data = ?, deleted_at = ? WHERE id = ?",
|
|
293
|
-
(
|
|
256
|
+
(
|
|
257
|
+
pack(marked_pair, kind="pair"),
|
|
258
|
+
marked_pair.meta.deleted_at,
|
|
259
|
+
pair.id.bytes,
|
|
260
|
+
),
|
|
294
261
|
)
|
|
295
262
|
|
|
296
|
-
async def _is_pair_expired(self, pair:
|
|
263
|
+
async def _is_pair_expired(self, pair: Entry, cursor: anysqlite.Cursor) -> bool:
|
|
297
264
|
"""
|
|
298
265
|
Check if the pair is expired.
|
|
299
266
|
"""
|
|
@@ -307,10 +274,10 @@ try:
|
|
|
307
274
|
self,
|
|
308
275
|
) -> None:
|
|
309
276
|
"""
|
|
310
|
-
Cleanup expired
|
|
277
|
+
Cleanup expired entries in the database.
|
|
311
278
|
"""
|
|
312
|
-
should_mark_as_deleted: List[
|
|
313
|
-
should_hard_delete: List[
|
|
279
|
+
should_mark_as_deleted: List[Entry] = []
|
|
280
|
+
should_hard_delete: List[Entry] = []
|
|
314
281
|
|
|
315
282
|
connection = await self._ensure_connection()
|
|
316
283
|
cursor = await connection.cursor()
|
|
@@ -319,7 +286,10 @@ try:
|
|
|
319
286
|
chunk_size = BATCH_CLEANUP_CHUNK_SIZE
|
|
320
287
|
offset = 0
|
|
321
288
|
while True:
|
|
322
|
-
await cursor.execute(
|
|
289
|
+
await cursor.execute(
|
|
290
|
+
"SELECT id, data FROM entries LIMIT ? OFFSET ?",
|
|
291
|
+
(chunk_size, offset),
|
|
292
|
+
)
|
|
323
293
|
rows = await cursor.fetchall()
|
|
324
294
|
if not rows:
|
|
325
295
|
break
|
|
@@ -350,61 +320,56 @@ try:
|
|
|
350
320
|
|
|
351
321
|
await connection.commit()
|
|
352
322
|
|
|
353
|
-
async def _is_corrupted(self, pair:
|
|
354
|
-
# if
|
|
355
|
-
if pair.meta.created_at + 3600 < time.time() and
|
|
323
|
+
async def _is_corrupted(self, pair: Entry, cursor: anysqlite.Cursor) -> bool:
|
|
324
|
+
# if entry was created more than 1 hour ago and still has no response (incomplete)
|
|
325
|
+
if pair.meta.created_at + 3600 < time.time() and pair.response is None:
|
|
356
326
|
return True
|
|
357
327
|
|
|
358
|
-
if
|
|
328
|
+
# Check if response stream is complete for Entry with response
|
|
329
|
+
if (
|
|
330
|
+
isinstance(pair, Entry)
|
|
331
|
+
and pair.response is not None
|
|
332
|
+
and not await self._is_stream_complete(pair.id, cursor)
|
|
333
|
+
):
|
|
359
334
|
return True
|
|
360
335
|
return False
|
|
361
336
|
|
|
362
|
-
async def _hard_delete_pair(self, pair:
|
|
337
|
+
async def _hard_delete_pair(self, pair: Entry, cursor: anysqlite.Cursor) -> None:
|
|
363
338
|
"""
|
|
364
339
|
Permanently delete the pair from the database.
|
|
365
340
|
"""
|
|
366
341
|
await cursor.execute("DELETE FROM entries WHERE id = ?", (pair.id.bytes,))
|
|
367
342
|
|
|
368
|
-
# Delete
|
|
343
|
+
# Delete response stream for this entry
|
|
369
344
|
await self._delete_stream(pair.id.bytes, cursor)
|
|
370
345
|
|
|
371
346
|
async def _delete_stream(
|
|
372
347
|
self,
|
|
373
348
|
entry_id: bytes,
|
|
374
349
|
cursor: anysqlite.Cursor,
|
|
375
|
-
type: Literal["request", "response", "all"] = "all",
|
|
376
350
|
) -> None:
|
|
377
351
|
"""
|
|
378
|
-
Delete
|
|
352
|
+
Delete response stream associated with the given entry ID.
|
|
379
353
|
"""
|
|
380
|
-
|
|
381
|
-
await cursor.execute(
|
|
382
|
-
"DELETE FROM streams WHERE entry_id = ? AND kind = ?", (entry_id, self._STREAM_KIND["request"])
|
|
383
|
-
)
|
|
384
|
-
elif type == "response":
|
|
385
|
-
await cursor.execute(
|
|
386
|
-
"DELETE FROM streams WHERE entry_id = ? AND kind = ?", (entry_id, self._STREAM_KIND["response"])
|
|
387
|
-
)
|
|
388
|
-
elif type == "all":
|
|
389
|
-
await cursor.execute("DELETE FROM streams WHERE entry_id = ?", (entry_id,))
|
|
354
|
+
await cursor.execute("DELETE FROM streams WHERE entry_id = ?", (entry_id,))
|
|
390
355
|
|
|
391
356
|
async def _save_stream(
|
|
392
357
|
self,
|
|
393
358
|
stream: AsyncIterator[bytes],
|
|
394
359
|
entry_id: bytes,
|
|
395
|
-
kind: Literal["response", "request"],
|
|
396
360
|
) -> AsyncIterator[bytes]:
|
|
397
361
|
"""
|
|
398
|
-
Wrapper around an async iterator that also saves the data to the cache in chunks.
|
|
362
|
+
Wrapper around an async iterator that also saves the response data to the cache in chunks.
|
|
399
363
|
"""
|
|
400
|
-
kind_id = self._STREAM_KIND[kind]
|
|
401
364
|
chunk_number = 0
|
|
365
|
+
content_length = 0
|
|
402
366
|
async for chunk in stream:
|
|
367
|
+
content_length += len(chunk)
|
|
403
368
|
connection = await self._ensure_connection()
|
|
404
369
|
cursor = await connection.cursor()
|
|
405
370
|
await cursor.execute(
|
|
406
|
-
"INSERT INTO streams (entry_id,
|
|
407
|
-
(entry_id,
|
|
371
|
+
"INSERT INTO streams (entry_id, chunk_number, chunk_data) VALUES (?, ?, ?)",
|
|
372
|
+
(entry_id, chunk_number, chunk),
|
|
408
373
|
)
|
|
409
374
|
await connection.commit()
|
|
410
375
|
chunk_number += 1
|
|
@@ -414,28 +379,26 @@ try:
|
|
|
414
379
|
connection = await self._ensure_connection()
|
|
415
380
|
cursor = await connection.cursor()
|
|
416
381
|
await cursor.execute(
|
|
417
|
-
"INSERT INTO streams (entry_id,
|
|
418
|
-
(entry_id,
|
|
382
|
+
"INSERT INTO streams (entry_id, chunk_number, chunk_data) VALUES (?, ?, ?)",
|
|
383
|
+
(entry_id, self._COMPLETE_CHUNK_NUMBER, b""),
|
|
419
384
|
)
|
|
420
385
|
await connection.commit()
|
|
421
386
|
|
|
422
387
|
async def _stream_data_from_cache(
|
|
423
388
|
self,
|
|
424
389
|
entry_id: bytes,
|
|
425
|
-
kind: Literal["response", "request"],
|
|
426
390
|
) -> AsyncIterator[bytes]:
|
|
427
391
|
"""
|
|
428
|
-
Get an async iterator that yields the stream data from the cache.
|
|
392
|
+
Get an async iterator that yields the response stream data from the cache.
|
|
429
393
|
"""
|
|
430
|
-
kind_id = self._STREAM_KIND[kind]
|
|
431
394
|
chunk_number = 0
|
|
432
395
|
|
|
433
396
|
connection = await self._ensure_connection()
|
|
434
397
|
while True:
|
|
435
398
|
cursor = await connection.cursor()
|
|
436
399
|
await cursor.execute(
|
|
437
|
-
"SELECT chunk_data FROM streams WHERE entry_id = ? AND
|
|
438
|
-
(entry_id,
|
|
400
|
+
"SELECT chunk_data FROM streams WHERE entry_id = ? AND chunk_number = ?",
|
|
401
|
+
(entry_id, chunk_number),
|
|
439
402
|
)
|
|
440
403
|
result = await cursor.fetchone()
|
|
441
404
|
|
|
@@ -449,7 +412,7 @@ try:
|
|
|
449
412
|
chunk_number += 1
|
|
450
413
|
except ImportError:
|
|
451
414
|
|
|
452
|
-
class AsyncSqliteStorage
|
|
415
|
+
class AsyncSqliteStorage: # type: ignore[no-redef]
|
|
453
416
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
454
417
|
raise ImportError(
|
|
455
418
|
"The 'anysqlite' library is required to use the `AsyncSqliteStorage` integration. "
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import uuid
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Mapping, Optional, Union, overload
|
|
5
|
+
|
|
6
|
+
import msgpack
|
|
7
|
+
from typing_extensions import Literal, cast
|
|
8
|
+
|
|
9
|
+
from hishel._core._headers import Headers
|
|
10
|
+
from hishel._core.models import EntryMeta, Request, Response
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def filter_out_hishel_metadata(data: Mapping[str, Any]) -> dict[str, Any]:
|
|
14
|
+
return {k: v for k, v in data.items() if not k.startswith("hishel_")}
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
if TYPE_CHECKING:
|
|
18
|
+
from hishel import Entry
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@overload
|
|
22
|
+
def pack(
|
|
23
|
+
value: "Entry",
|
|
24
|
+
/,
|
|
25
|
+
kind: Literal["pair"],
|
|
26
|
+
) -> bytes: ...
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@overload
|
|
30
|
+
def pack(
|
|
31
|
+
value: uuid.UUID,
|
|
32
|
+
/,
|
|
33
|
+
kind: Literal["entry_db_key_index"],
|
|
34
|
+
) -> bytes: ...
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def pack(
|
|
38
|
+
value: Union["Entry", uuid.UUID],
|
|
39
|
+
/,
|
|
40
|
+
kind: Literal["pair", "entry_db_key_index"],
|
|
41
|
+
) -> bytes:
|
|
42
|
+
from hishel import Entry
|
|
43
|
+
|
|
44
|
+
if kind == "entry_db_key_index":
|
|
45
|
+
assert isinstance(value, uuid.UUID)
|
|
46
|
+
return value.bytes
|
|
47
|
+
elif kind == "pair":
|
|
48
|
+
assert isinstance(value, Entry)
|
|
49
|
+
return cast(
|
|
50
|
+
bytes,
|
|
51
|
+
msgpack.packb(
|
|
52
|
+
{
|
|
53
|
+
"id": value.id.bytes,
|
|
54
|
+
"request": {
|
|
55
|
+
"method": value.request.method,
|
|
56
|
+
"url": value.request.url,
|
|
57
|
+
"headers": value.request.headers._headers,
|
|
58
|
+
"extra": filter_out_hishel_metadata(value.request.metadata),
|
|
59
|
+
},
|
|
60
|
+
"response": {
|
|
61
|
+
"status_code": value.response.status_code,
|
|
62
|
+
"headers": value.response.headers._headers,
|
|
63
|
+
"extra": filter_out_hishel_metadata(value.response.metadata),
|
|
64
|
+
},
|
|
65
|
+
"meta": {
|
|
66
|
+
"created_at": value.meta.created_at,
|
|
67
|
+
"deleted_at": value.meta.deleted_at,
|
|
68
|
+
},
|
|
69
|
+
"cache_key": value.cache_key,
|
|
70
|
+
}
|
|
71
|
+
),
|
|
72
|
+
)
|
|
73
|
+
assert False, f"Unexpected kind: {kind}"
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
@overload
|
|
77
|
+
def unpack(
|
|
78
|
+
value: bytes,
|
|
79
|
+
/,
|
|
80
|
+
kind: Literal["pair"],
|
|
81
|
+
) -> "Entry": ...
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
@overload
|
|
85
|
+
def unpack(
|
|
86
|
+
value: bytes,
|
|
87
|
+
/,
|
|
88
|
+
kind: Literal["entry_db_key_index"],
|
|
89
|
+
) -> uuid.UUID: ...
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@overload
|
|
93
|
+
def unpack(
|
|
94
|
+
value: Optional[bytes],
|
|
95
|
+
/,
|
|
96
|
+
kind: Literal["pair"],
|
|
97
|
+
) -> Optional["Entry"]: ...
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
@overload
|
|
101
|
+
def unpack(
|
|
102
|
+
value: Optional[bytes],
|
|
103
|
+
/,
|
|
104
|
+
kind: Literal["entry_db_key_index"],
|
|
105
|
+
) -> Optional[uuid.UUID]: ...
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def unpack(
|
|
109
|
+
value: Optional[bytes],
|
|
110
|
+
/,
|
|
111
|
+
kind: Literal["pair", "entry_db_key_index"],
|
|
112
|
+
) -> Union["Entry", uuid.UUID, None]:
|
|
113
|
+
from hishel import Entry
|
|
114
|
+
|
|
115
|
+
if value is None:
|
|
116
|
+
return None
|
|
117
|
+
if kind == "entry_db_key_index":
|
|
118
|
+
return uuid.UUID(bytes=value)
|
|
119
|
+
elif kind == "pair":
|
|
120
|
+
data = msgpack.unpackb(value)
|
|
121
|
+
id = uuid.UUID(bytes=data["id"])
|
|
122
|
+
return Entry(
|
|
123
|
+
id=id,
|
|
124
|
+
request=Request(
|
|
125
|
+
method=data["request"]["method"],
|
|
126
|
+
url=data["request"]["url"],
|
|
127
|
+
headers=Headers(data["request"]["headers"]),
|
|
128
|
+
metadata=data["request"]["extra"],
|
|
129
|
+
stream=iter([]),
|
|
130
|
+
),
|
|
131
|
+
response=(
|
|
132
|
+
Response(
|
|
133
|
+
status_code=data["response"]["status_code"],
|
|
134
|
+
headers=Headers(data["response"]["headers"]),
|
|
135
|
+
metadata=data["response"]["extra"],
|
|
136
|
+
stream=iter([]),
|
|
137
|
+
)
|
|
138
|
+
),
|
|
139
|
+
meta=EntryMeta(
|
|
140
|
+
created_at=data["meta"]["created_at"],
|
|
141
|
+
deleted_at=data["meta"]["deleted_at"],
|
|
142
|
+
),
|
|
143
|
+
cache_key=data["cache_key"],
|
|
144
|
+
)
|