hishel 0.1.5__py3-none-any.whl → 1.0.0.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. hishel/__init__.py +55 -53
  2. hishel/{beta/_async_cache.py → _async_cache.py} +3 -3
  3. hishel/{beta → _core}/__init__.py +6 -6
  4. hishel/_core/_async/_storages/_sqlite.py +457 -0
  5. hishel/{beta/_core → _core}/_base/_storages/_base.py +1 -1
  6. hishel/{beta/_core → _core}/_base/_storages/_packing.py +5 -5
  7. hishel/{beta/_core → _core}/_spec.py +89 -2
  8. hishel/_core/_sync/_storages/_sqlite.py +457 -0
  9. hishel/{beta/_core → _core}/models.py +1 -1
  10. hishel/{beta/_sync_cache.py → _sync_cache.py} +3 -3
  11. hishel/_utils.py +1 -241
  12. hishel/{beta/httpx.py → httpx.py} +15 -8
  13. hishel/{beta/requests.py → requests.py} +5 -5
  14. hishel-1.0.0.dev1.dist-info/METADATA +298 -0
  15. hishel-1.0.0.dev1.dist-info/RECORD +19 -0
  16. hishel/_async/__init__.py +0 -5
  17. hishel/_async/_client.py +0 -30
  18. hishel/_async/_mock.py +0 -43
  19. hishel/_async/_pool.py +0 -201
  20. hishel/_async/_storages.py +0 -768
  21. hishel/_async/_transports.py +0 -282
  22. hishel/_controller.py +0 -581
  23. hishel/_exceptions.py +0 -10
  24. hishel/_files.py +0 -54
  25. hishel/_headers.py +0 -215
  26. hishel/_lfu_cache.py +0 -71
  27. hishel/_lmdb_types_.pyi +0 -53
  28. hishel/_s3.py +0 -122
  29. hishel/_serializers.py +0 -329
  30. hishel/_sync/__init__.py +0 -5
  31. hishel/_sync/_client.py +0 -30
  32. hishel/_sync/_mock.py +0 -43
  33. hishel/_sync/_pool.py +0 -201
  34. hishel/_sync/_storages.py +0 -768
  35. hishel/_sync/_transports.py +0 -282
  36. hishel/_synchronization.py +0 -37
  37. hishel/beta/_core/__init__.py +0 -0
  38. hishel/beta/_core/_async/_storages/_sqlite.py +0 -411
  39. hishel/beta/_core/_sync/_storages/_sqlite.py +0 -411
  40. hishel-0.1.5.dist-info/METADATA +0 -258
  41. hishel-0.1.5.dist-info/RECORD +0 -41
  42. /hishel/{beta/_core → _core}/_headers.py +0 -0
  43. {hishel-0.1.5.dist-info → hishel-1.0.0.dev1.dist-info}/WHEEL +0 -0
  44. {hishel-0.1.5.dist-info → hishel-1.0.0.dev1.dist-info}/licenses/LICENSE +0 -0
@@ -15,11 +15,11 @@ from typing import (
15
15
  Union,
16
16
  )
17
17
 
18
+ from hishel._core._headers import Headers, Range, Vary, parse_cache_control
18
19
  from hishel._utils import parse_date, partition
19
- from hishel.beta._core._headers import Headers, Range, Vary, parse_cache_control
20
20
 
21
21
  if TYPE_CHECKING:
22
- from hishel.beta import CompletePair, Request, Response
22
+ from hishel import CompletePair, Request, Response
23
23
 
24
24
 
25
25
  TState = TypeVar("TState", bound="State")
@@ -41,9 +41,96 @@ logger = logging.getLogger("hishel.core.spec")
41
41
 
42
42
  @dataclass
43
43
  class CacheOptions:
44
+ """
45
+ Configuration options for HTTP cache behavior.
46
+
47
+ These options control how the cache interprets and applies RFC 9111 caching rules.
48
+ All options have sensible defaults that follow the specification.
49
+
50
+ Attributes:
51
+ ----------
52
+ shared : bool
53
+ Determines whether the cache operates as a shared cache or private cache.
54
+
55
+ RFC 9111 Section 3.5: Authenticated Responses
56
+ https://www.rfc-editor.org/rfc/rfc9111.html#section-3.5
57
+
58
+ - Shared cache (True): Acts as a proxy, CDN, or gateway cache serving multiple users.
59
+ Must respect private directives and Authorization header restrictions.
60
+ Can use s-maxage directive instead of max-age for shared-specific freshness.
61
+
62
+ - Private cache (False): Acts as a browser or user-agent cache for a single user.
63
+ Can cache private responses and ignore s-maxage directives.
64
+
65
+ Default: True (shared cache)
66
+
67
+ Examples:
68
+ --------
69
+ >>> # Shared cache (proxy/CDN)
70
+ >>> options = CacheOptions(shared=True)
71
+
72
+ >>> # Private cache (browser)
73
+ >>> options = CacheOptions(shared=False)
74
+
75
+ supported_methods : list[str]
76
+ HTTP methods that are allowed to be cached by this cache implementation.
77
+
78
+ RFC 9111 Section 3, paragraph 2.1:
79
+ https://www.rfc-editor.org/rfc/rfc9111.html#section-3-2.1.1
80
+
81
+ "A cache MUST NOT store a response to a request unless:
82
+ - the request method is understood by the cache"
83
+
84
+ Default: ["GET", "HEAD"] (most commonly cached methods)
85
+
86
+ Examples:
87
+ --------
88
+ >>> # Default: cache GET and HEAD only
89
+ >>> options = CacheOptions()
90
+ >>> options.supported_methods
91
+ ['GET', 'HEAD']
92
+
93
+ >>> # Cache POST responses (advanced use case)
94
+ >>> options = CacheOptions(supported_methods=["GET", "HEAD", "POST"])
95
+
96
+ allow_stale : bool
97
+ Controls whether stale responses can be served without revalidation.
98
+
99
+ RFC 9111 Section 4.2.4: Serving Stale Responses
100
+ https://www.rfc-editor.org/rfc/rfc9111.html#section-4.2.4
101
+
102
+ "A cache MUST NOT generate a stale response unless it is disconnected or
103
+ doing so is explicitly permitted by the client or origin server (e.g., by
104
+ the max-stale request directive in Section 5.2.1, extension directives
105
+ such as those defined in [RFC5861], or configuration in accordance with
106
+ an out-of-band contract)."
107
+
108
+ Default: False (no stale responses)
109
+
110
+ Examples:
111
+ --------
112
+ >>> # Conservative: never serve stale
113
+ >>> options = CacheOptions(allow_stale=False)
114
+
115
+ >>> # Permissive: serve stale when allowed
116
+ >>> options = CacheOptions(allow_stale=True)
117
+
118
+ >>> # Stale-while-revalidate pattern (RFC 5861)
119
+ >>> # Even with allow_stale=True, directives are respected
120
+ >>> options = CacheOptions(allow_stale=True)
121
+ """
122
+
44
123
  shared: bool = True
124
+ """
125
+ When True, the cache operates as a shared cache (proxy/CDN).
126
+ When False, as a private cache (browser).
127
+ """
128
+
45
129
  supported_methods: list[str] = field(default_factory=lambda: ["GET", "HEAD"])
130
+ """HTTP methods that are allowed to be cached."""
131
+
46
132
  allow_stale: bool = False
133
+ """When True, stale responses can be served without revalidation."""
47
134
 
48
135
 
49
136
  @dataclass
@@ -0,0 +1,457 @@
1
+ from __future__ import annotations
2
+
3
+ import time
4
+ import uuid
5
+ from dataclasses import replace
6
+ from typing import (
7
+ Any,
8
+ Iterable,
9
+ Iterator,
10
+ Callable,
11
+ List,
12
+ Literal,
13
+ Optional,
14
+ Union,
15
+ )
16
+
17
+ from hishel._core._base._storages._base import SyncBaseStorage, ensure_cache_dict
18
+ from hishel._core._base._storages._packing import pack, unpack
19
+ from hishel._core.models import (
20
+ CompletePair,
21
+ IncompletePair,
22
+ Pair,
23
+ PairMeta,
24
+ Request,
25
+ Response,
26
+ )
27
+
28
+ # Batch cleanup configuration
29
+ # How often to run cleanup (seconds). Default: 1 hour.
30
+ BATCH_CLEANUP_INTERVAL = 3600
31
+ # How long to wait after storage creation before allowing the first cleanup (seconds)
32
+ BATCH_CLEANUP_START_DELAY = 5 * 60
33
+ # Number of rows to process per chunk when cleaning
34
+ BATCH_CLEANUP_CHUNK_SIZE = 200
35
+
36
+
37
+ try:
38
+ import sqlite3
39
+
40
+ class SyncSqliteStorage(SyncBaseStorage):
41
+ _STREAM_KIND = {"request": 0, "response": 1}
42
+ _COMPLETE_CHUNK_NUMBER = -1
43
+
44
+ def __init__(
45
+ self,
46
+ *,
47
+ connection: Optional[sqlite3.Connection] = None,
48
+ database_path: str = "hishel_cache.db",
49
+ default_ttl: Optional[float] = None,
50
+ refresh_ttl_on_access: bool = True,
51
+ ) -> None:
52
+ base_path = ensure_cache_dict()
53
+
54
+ self.connection = connection
55
+ self.database_path = base_path / database_path
56
+ self.default_ttl = default_ttl
57
+ self.refresh_ttl_on_access = refresh_ttl_on_access
58
+ self.last_cleanup = time.time() - BATCH_CLEANUP_INTERVAL + BATCH_CLEANUP_START_DELAY
59
+ # When this storage instance was created. Used to delay the first cleanup.
60
+ self._start_time = time.time()
61
+ self._initialized = False
62
+
63
+ def _ensure_connection(self) -> sqlite3.Connection:
64
+ """Ensure connection is established and database is initialized."""
65
+ if self.connection is None:
66
+ self.connection = sqlite3.connect(str(self.database_path))
67
+ if not self._initialized:
68
+ self._initialize_database()
69
+ self._initialized = True
70
+ return self.connection
71
+
72
+ def _initialize_database(self) -> None:
73
+ """Initialize the database schema."""
74
+ assert self.connection is not None
75
+ cursor = self.connection.cursor()
76
+
77
+ # Table for storing request/response pairs
78
+ cursor.execute("""
79
+ CREATE TABLE IF NOT EXISTS entries (
80
+ id BLOB PRIMARY KEY,
81
+ cache_key BLOB,
82
+ data BLOB NOT NULL,
83
+ created_at REAL NOT NULL,
84
+ deleted_at REAL
85
+ )
86
+ """)
87
+
88
+ # Table for storing stream chunks
89
+ cursor.execute("""
90
+ CREATE TABLE IF NOT EXISTS streams (
91
+ entry_id BLOB NOT NULL,
92
+ kind INTEGER NOT NULL,
93
+ chunk_number INTEGER NOT NULL,
94
+ chunk_data BLOB NOT NULL,
95
+ PRIMARY KEY (entry_id, kind, chunk_number),
96
+ FOREIGN KEY (entry_id) REFERENCES entries(id) ON DELETE CASCADE
97
+ )
98
+ """)
99
+
100
+ # Indexes for performance
101
+ cursor.execute("CREATE INDEX IF NOT EXISTS idx_entries_deleted_at ON entries(deleted_at)")
102
+ cursor.execute("CREATE INDEX IF NOT EXISTS idx_entries_cache_key ON entries(cache_key)")
103
+ # Note: PRIMARY KEY (entry_id, kind, chunk_number) already provides an index
104
+ # for queries like: entry_id = ? AND kind = ? AND chunk_number = ?
105
+
106
+ self.connection.commit()
107
+
108
+ def create_pair(
109
+ self,
110
+ request: Request,
111
+ id: uuid.UUID | None = None,
112
+ ) -> IncompletePair:
113
+ pair_id = id if id is not None else uuid.uuid4()
114
+ pair_meta = PairMeta(
115
+ created_at=time.time(),
116
+ )
117
+
118
+ pair = IncompletePair(id=pair_id, request=request, meta=pair_meta)
119
+
120
+ packed_pair = pack(pair, kind="pair")
121
+
122
+ connection = self._ensure_connection()
123
+ cursor = connection.cursor()
124
+ cursor.execute(
125
+ "INSERT INTO entries (id, cache_key, data, created_at, deleted_at) VALUES (?, ?, ?, ?, ?)",
126
+ (pair_id.bytes, None, packed_pair, pair_meta.created_at, None),
127
+ )
128
+ connection.commit()
129
+
130
+ assert isinstance(request.stream, Iterable), "Request stream must be an Iterable, not Iterable"
131
+
132
+ request = Request(
133
+ method=request.method,
134
+ url=request.url,
135
+ headers=request.headers,
136
+ metadata=request.metadata,
137
+ stream=self._save_stream(request.stream, pair_id.bytes, "request"),
138
+ )
139
+
140
+ return replace(pair, request=request)
141
+
142
+ def add_response(
143
+ self,
144
+ pair_id: uuid.UUID,
145
+ response: Response,
146
+ key: str | bytes,
147
+ ) -> CompletePair:
148
+ if isinstance(key, str):
149
+ key = key.encode("utf-8")
150
+
151
+ connection = self._ensure_connection()
152
+ cursor = connection.cursor()
153
+
154
+ # Get the existing pair
155
+ cursor.execute("SELECT data FROM entries WHERE id = ?", (pair_id.bytes,))
156
+ result = cursor.fetchone()
157
+
158
+ if result is None:
159
+ raise ValueError(f"Entry with ID {pair_id} not found.")
160
+
161
+ pair = unpack(result[0], kind="pair")
162
+
163
+ assert isinstance(response.stream, (Iterator, Iterable))
164
+ response = replace(response, stream=self._save_stream(response.stream, pair_id.bytes, "response"))
165
+
166
+ self._delete_stream(pair.id.bytes, cursor, type="response")
167
+ complete_pair = CompletePair(
168
+ id=pair.id, request=pair.request, response=response, meta=pair.meta, cache_key=key
169
+ )
170
+
171
+ # Update the entry with the complete pair and set cache_key
172
+ cursor.execute(
173
+ "UPDATE entries SET data = ?, cache_key = ? WHERE id = ?",
174
+ (pack(complete_pair, kind="pair"), key, pair_id.bytes),
175
+ )
176
+ connection.commit()
177
+
178
+ return complete_pair
179
+
180
+ def get_pairs(self, key: str) -> List[CompletePair]:
181
+ final_pairs: List[CompletePair] = []
182
+
183
+ now = time.time()
184
+ if now - self.last_cleanup >= BATCH_CLEANUP_INTERVAL:
185
+ try:
186
+ self._batch_cleanup()
187
+ except Exception:
188
+ # don't let cleanup prevent reads; failures are non-fatal
189
+ pass
190
+
191
+ connection = self._ensure_connection()
192
+ cursor = connection.cursor()
193
+ # Query entries directly by cache_key
194
+ cursor.execute("SELECT id, data FROM entries WHERE cache_key = ?", (key.encode("utf-8"),))
195
+
196
+ for row in cursor.fetchall():
197
+ pair_data = unpack(row[1], kind="pair")
198
+
199
+ if isinstance(pair_data, IncompletePair):
200
+ continue
201
+
202
+ final_pairs.append(pair_data)
203
+
204
+ pairs_with_streams: List[CompletePair] = []
205
+
206
+ for pair in final_pairs:
207
+ pairs_with_streams.append(
208
+ replace(
209
+ pair,
210
+ response=replace(
211
+ pair.response,
212
+ stream=self._stream_data_from_cache(pair.id.bytes, "response"),
213
+ ),
214
+ request=replace(
215
+ pair.request,
216
+ stream=self._stream_data_from_cache(pair.id.bytes, "request"),
217
+ ),
218
+ )
219
+ )
220
+ return pairs_with_streams
221
+
222
+ def update_pair(
223
+ self,
224
+ id: uuid.UUID,
225
+ new_pair: Union[CompletePair, Callable[[CompletePair], CompletePair]],
226
+ ) -> Optional[CompletePair]:
227
+ connection = self._ensure_connection()
228
+ cursor = connection.cursor()
229
+ cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
230
+ result = cursor.fetchone()
231
+
232
+ if result is None:
233
+ return None
234
+
235
+ pair = unpack(result[0], kind="pair")
236
+
237
+ if isinstance(pair, IncompletePair):
238
+ return None
239
+
240
+ if isinstance(new_pair, CompletePair):
241
+ complete_pair = new_pair
242
+ else:
243
+ complete_pair = new_pair(pair)
244
+
245
+ if pair.id != complete_pair.id:
246
+ raise ValueError("Pair ID mismatch")
247
+
248
+ cursor.execute(
249
+ "UPDATE entries SET data = ? WHERE id = ?", (pack(complete_pair, kind="pair"), id.bytes)
250
+ )
251
+
252
+ if pair.cache_key != complete_pair.cache_key:
253
+ cursor.execute(
254
+ "UPDATE entries SET cache_key = ? WHERE id = ?",
255
+ (complete_pair.cache_key, complete_pair.id.bytes),
256
+ )
257
+
258
+ connection.commit()
259
+
260
+ return complete_pair
261
+
262
+ def remove(self, id: uuid.UUID) -> None:
263
+ connection = self._ensure_connection()
264
+ cursor = connection.cursor()
265
+ cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
266
+ result = cursor.fetchone()
267
+
268
+ if result is None:
269
+ return None
270
+
271
+ pair = unpack(result[0], kind="pair")
272
+ self._soft_delete_pair(pair, cursor)
273
+ connection.commit()
274
+
275
+ def _is_stream_complete(
276
+ self, kind: Literal["request", "response"], pair_id: uuid.UUID, cursor: sqlite3.Cursor
277
+ ) -> bool:
278
+ kind_id = self._STREAM_KIND[kind]
279
+ # Check if there's a completion marker (chunk_number = -1)
280
+ cursor.execute(
281
+ "SELECT 1 FROM streams WHERE entry_id = ? AND kind = ? AND chunk_number = ? LIMIT 1",
282
+ (pair_id.bytes, kind_id, self._COMPLETE_CHUNK_NUMBER),
283
+ )
284
+ return cursor.fetchone() is not None
285
+
286
+ def _soft_delete_pair(self, pair: Union[CompletePair, IncompletePair], cursor: sqlite3.Cursor) -> None:
287
+ """
288
+ Mark the pair as deleted by setting the deleted_at timestamp.
289
+ """
290
+ marked_pair = self.mark_pair_as_deleted(pair)
291
+ cursor.execute(
292
+ "UPDATE entries SET data = ?, deleted_at = ? WHERE id = ?",
293
+ (pack(marked_pair, kind="pair"), marked_pair.meta.deleted_at, pair.id.bytes),
294
+ )
295
+
296
+ def _is_pair_expired(self, pair: Pair, cursor: sqlite3.Cursor) -> bool:
297
+ """
298
+ Check if the pair is expired.
299
+ """
300
+ ttl = pair.request.metadata["hishel_ttl"] if "hishel_ttl" in pair.request.metadata else self.default_ttl
301
+ created_at = pair.meta.created_at
302
+ if ttl is None:
303
+ return False
304
+ return created_at + ttl < time.time()
305
+
306
+ def _batch_cleanup(
307
+ self,
308
+ ) -> None:
309
+ """
310
+ Cleanup expired pairs in the database.
311
+ """
312
+ should_mark_as_deleted: List[Union[CompletePair, IncompletePair]] = []
313
+ should_hard_delete: List[Union[CompletePair, IncompletePair]] = []
314
+
315
+ connection = self._ensure_connection()
316
+ cursor = connection.cursor()
317
+
318
+ # Process entries in chunks to avoid loading the entire table into memory.
319
+ chunk_size = BATCH_CLEANUP_CHUNK_SIZE
320
+ offset = 0
321
+ while True:
322
+ cursor.execute("SELECT id, data FROM entries LIMIT ? OFFSET ?", (chunk_size, offset))
323
+ rows = cursor.fetchall()
324
+ if not rows:
325
+ break
326
+
327
+ for row in rows:
328
+ pair = unpack(row[1], kind="pair")
329
+ if pair is None:
330
+ continue
331
+
332
+ # expired but not yet soft-deleted
333
+ if self._is_pair_expired(pair, cursor) and not self.is_soft_deleted(pair):
334
+ should_mark_as_deleted.append(pair)
335
+
336
+ # soft-deleted and safe to hard delete, or corrupted pair
337
+ if (self.is_soft_deleted(pair) and self.is_safe_to_hard_delete(pair)) or self._is_corrupted(
338
+ pair, cursor
339
+ ):
340
+ should_hard_delete.append(pair)
341
+
342
+ # advance pagination
343
+ offset += len(rows)
344
+
345
+ for pair in should_mark_as_deleted:
346
+ self._soft_delete_pair(pair, cursor)
347
+
348
+ for pair in should_hard_delete:
349
+ self._hard_delete_pair(pair, cursor)
350
+
351
+ connection.commit()
352
+
353
+ def _is_corrupted(self, pair: IncompletePair | CompletePair, cursor: sqlite3.Cursor) -> bool:
354
+ # if pair was created more than 1 hour ago and still not completed
355
+ if pair.meta.created_at + 3600 < time.time() and isinstance(pair, IncompletePair):
356
+ return True
357
+
358
+ if isinstance(pair, CompletePair) and not self._is_stream_complete("request", pair.id, cursor):
359
+ return True
360
+ return False
361
+
362
+ def _hard_delete_pair(self, pair: CompletePair | IncompletePair, cursor: sqlite3.Cursor) -> None:
363
+ """
364
+ Permanently delete the pair from the database.
365
+ """
366
+ cursor.execute("DELETE FROM entries WHERE id = ?", (pair.id.bytes,))
367
+
368
+ # Delete all streams (both request and response) for this entry
369
+ self._delete_stream(pair.id.bytes, cursor)
370
+
371
+ def _delete_stream(
372
+ self,
373
+ entry_id: bytes,
374
+ cursor: sqlite3.Cursor,
375
+ type: Literal["request", "response", "all"] = "all",
376
+ ) -> None:
377
+ """
378
+ Delete all streams (both request and response) associated with the given entry ID.
379
+ """
380
+ if type == "request":
381
+ cursor.execute(
382
+ "DELETE FROM streams WHERE entry_id = ? AND kind = ?", (entry_id, self._STREAM_KIND["request"])
383
+ )
384
+ elif type == "response":
385
+ cursor.execute(
386
+ "DELETE FROM streams WHERE entry_id = ? AND kind = ?", (entry_id, self._STREAM_KIND["response"])
387
+ )
388
+ elif type == "all":
389
+ cursor.execute("DELETE FROM streams WHERE entry_id = ?", (entry_id,))
390
+
391
+ def _save_stream(
392
+ self,
393
+ stream: Iterator[bytes],
394
+ entry_id: bytes,
395
+ kind: Literal["response", "request"],
396
+ ) -> Iterator[bytes]:
397
+ """
398
+ Wrapper around an async iterator that also saves the data to the cache in chunks.
399
+ """
400
+ kind_id = self._STREAM_KIND[kind]
401
+ chunk_number = 0
402
+ for chunk in stream:
403
+ connection = self._ensure_connection()
404
+ cursor = connection.cursor()
405
+ cursor.execute(
406
+ "INSERT INTO streams (entry_id, kind, chunk_number, chunk_data) VALUES (?, ?, ?, ?)",
407
+ (entry_id, kind_id, chunk_number, chunk),
408
+ )
409
+ connection.commit()
410
+ chunk_number += 1
411
+ yield chunk
412
+
413
+ # Mark end of stream with chunk_number = -1
414
+ connection = self._ensure_connection()
415
+ cursor = connection.cursor()
416
+ cursor.execute(
417
+ "INSERT INTO streams (entry_id, kind, chunk_number, chunk_data) VALUES (?, ?, ?, ?)",
418
+ (entry_id, kind_id, self._COMPLETE_CHUNK_NUMBER, b""),
419
+ )
420
+ connection.commit()
421
+
422
+ def _stream_data_from_cache(
423
+ self,
424
+ entry_id: bytes,
425
+ kind: Literal["response", "request"],
426
+ ) -> Iterator[bytes]:
427
+ """
428
+ Get an async iterator that yields the stream data from the cache.
429
+ """
430
+ kind_id = self._STREAM_KIND[kind]
431
+ chunk_number = 0
432
+
433
+ connection = self._ensure_connection()
434
+ while True:
435
+ cursor = connection.cursor()
436
+ cursor.execute(
437
+ "SELECT chunk_data FROM streams WHERE entry_id = ? AND kind = ? AND chunk_number = ?",
438
+ (entry_id, kind_id, chunk_number),
439
+ )
440
+ result = cursor.fetchone()
441
+
442
+ if result is None:
443
+ break
444
+ chunk = result[0]
445
+ # chunk_number = -1 is the completion marker with empty data
446
+ if chunk == b"":
447
+ break
448
+ yield chunk
449
+ chunk_number += 1
450
+ except ImportError:
451
+
452
+ class SyncSqliteStorage(SyncBaseStorage): # type: ignore[no-redef]
453
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
454
+ raise ImportError(
455
+ "The 'sqlite3' library is required to use the `SyncSqliteStorage` integration. "
456
+ "Install hishel with 'pip install hishel[async]'."
457
+ )
@@ -12,7 +12,7 @@ from typing import (
12
12
  TypedDict,
13
13
  )
14
14
 
15
- from hishel.beta._core._headers import Headers
15
+ from hishel._core._headers import Headers
16
16
 
17
17
 
18
18
  class AnyIterable:
@@ -8,7 +8,7 @@ from typing import Iterator, Awaitable, Callable
8
8
 
9
9
  from typing_extensions import assert_never
10
10
 
11
- from hishel.beta import (
11
+ from hishel import (
12
12
  AnyState,
13
13
  SyncBaseStorage,
14
14
  SyncSqliteStorage,
@@ -24,8 +24,8 @@ from hishel.beta import (
24
24
  StoreAndUse,
25
25
  create_idle_state,
26
26
  )
27
- from hishel.beta._core._spec import InvalidatePairs, vary_headers_match
28
- from hishel.beta._core.models import CompletePair
27
+ from hishel._core._spec import InvalidatePairs, vary_headers_match
28
+ from hishel._core.models import CompletePair
29
29
 
30
30
  logger = logging.getLogger("hishel.integrations.clients")
31
31