hishel 0.1.5__py3-none-any.whl → 1.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. hishel/__init__.py +59 -52
  2. hishel/_async_cache.py +213 -0
  3. hishel/_async_httpx.py +236 -0
  4. hishel/{beta/_core → _core}/_headers.py +11 -1
  5. hishel/{beta/_core → _core}/_spec.py +270 -136
  6. hishel/_core/_storages/_async_base.py +71 -0
  7. hishel/_core/_storages/_async_sqlite.py +420 -0
  8. hishel/_core/_storages/_packing.py +144 -0
  9. hishel/_core/_storages/_sync_base.py +71 -0
  10. hishel/_core/_storages/_sync_sqlite.py +420 -0
  11. hishel/{beta/_core → _core}/models.py +100 -37
  12. hishel/_policies.py +49 -0
  13. hishel/_sync_cache.py +213 -0
  14. hishel/_sync_httpx.py +236 -0
  15. hishel/_utils.py +37 -366
  16. hishel/asgi.py +400 -0
  17. hishel/fastapi.py +263 -0
  18. hishel/httpx.py +12 -0
  19. hishel/{beta/requests.py → requests.py} +31 -25
  20. hishel-1.0.0b1.dist-info/METADATA +509 -0
  21. hishel-1.0.0b1.dist-info/RECORD +24 -0
  22. hishel/_async/__init__.py +0 -5
  23. hishel/_async/_client.py +0 -30
  24. hishel/_async/_mock.py +0 -43
  25. hishel/_async/_pool.py +0 -201
  26. hishel/_async/_storages.py +0 -768
  27. hishel/_async/_transports.py +0 -282
  28. hishel/_controller.py +0 -581
  29. hishel/_exceptions.py +0 -10
  30. hishel/_files.py +0 -54
  31. hishel/_headers.py +0 -215
  32. hishel/_lfu_cache.py +0 -71
  33. hishel/_lmdb_types_.pyi +0 -53
  34. hishel/_s3.py +0 -122
  35. hishel/_serializers.py +0 -329
  36. hishel/_sync/__init__.py +0 -5
  37. hishel/_sync/_client.py +0 -30
  38. hishel/_sync/_mock.py +0 -43
  39. hishel/_sync/_pool.py +0 -201
  40. hishel/_sync/_storages.py +0 -768
  41. hishel/_sync/_transports.py +0 -282
  42. hishel/_synchronization.py +0 -37
  43. hishel/beta/__init__.py +0 -59
  44. hishel/beta/_async_cache.py +0 -167
  45. hishel/beta/_core/__init__.py +0 -0
  46. hishel/beta/_core/_async/_storages/_sqlite.py +0 -411
  47. hishel/beta/_core/_base/_storages/_base.py +0 -272
  48. hishel/beta/_core/_base/_storages/_packing.py +0 -165
  49. hishel/beta/_core/_sync/_storages/_sqlite.py +0 -411
  50. hishel/beta/_sync_cache.py +0 -167
  51. hishel/beta/httpx.py +0 -328
  52. hishel-0.1.5.dist-info/METADATA +0 -258
  53. hishel-0.1.5.dist-info/RECORD +0 -41
  54. {hishel-0.1.5.dist-info → hishel-1.0.0b1.dist-info}/WHEEL +0 -0
  55. {hishel-0.1.5.dist-info → hishel-1.0.0b1.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,71 @@
1
+ from __future__ import annotations
2
+
3
+ import abc
4
+ import time
5
+ import typing as tp
6
+ import uuid
7
+
8
+ from ..models import Entry, Request, Response
9
+
10
+
11
+ class AsyncBaseStorage(abc.ABC):
12
+ @abc.abstractmethod
13
+ async def create_entry(self, request: Request, response: Response, key: str, id_: uuid.UUID | None = None) -> Entry:
14
+ raise NotImplementedError()
15
+
16
+ @abc.abstractmethod
17
+ async def get_entries(self, key: str) -> tp.List[Entry]:
18
+ raise NotImplementedError()
19
+
20
+ @abc.abstractmethod
21
+ async def update_entry(
22
+ self,
23
+ id: uuid.UUID,
24
+ new_entry: tp.Union[Entry, tp.Callable[[Entry], Entry]],
25
+ ) -> tp.Optional[Entry]:
26
+ raise NotImplementedError()
27
+
28
+ @abc.abstractmethod
29
+ async def remove_entry(self, id: uuid.UUID) -> None:
30
+ raise NotImplementedError()
31
+
32
+ async def close(self) -> None:
33
+ pass
34
+
35
+ def is_soft_deleted(self, pair: Entry) -> bool:
36
+ """
37
+ Check if a pair is soft deleted based on its metadata.
38
+
39
+ Args:
40
+ pair: The request pair to check.
41
+
42
+ Returns:
43
+ True if the pair is soft deleted, False otherwise.
44
+ """
45
+ return pair.meta.deleted_at is not None and pair.meta.deleted_at > 0
46
+
47
+ def is_safe_to_hard_delete(self, pair: Entry) -> bool:
48
+ """
49
+ Check if a pair is safe to hard delete based on its metadata.
50
+
51
+ If the pair has been soft deleted for more than 1 hour, it is considered safe to hard delete.
52
+
53
+ Args:
54
+ pair: The request pair to check.
55
+
56
+ Returns:
57
+ True if the pair is safe to hard delete, False otherwise.
58
+ """
59
+ return bool(pair.meta.deleted_at is not None and (pair.meta.deleted_at + 3600 < time.time()))
60
+
61
+ def mark_pair_as_deleted(self, pair: Entry) -> Entry:
62
+ """
63
+ Mark a pair as soft deleted by setting its deleted_at timestamp.
64
+
65
+ Args:
66
+ pair: The request pair to mark as deleted.
67
+ Returns:
68
+ The updated request pair with the deleted_at timestamp set.
69
+ """
70
+ pair.meta.deleted_at = time.time()
71
+ return pair
@@ -0,0 +1,420 @@
1
+ from __future__ import annotations
2
+
3
+ import time
4
+ import uuid
5
+ from dataclasses import replace
6
+ from typing import (
7
+ Any,
8
+ AsyncIterable,
9
+ AsyncIterator,
10
+ Callable,
11
+ List,
12
+ Optional,
13
+ Union,
14
+ )
15
+
16
+ from hishel._core._storages._async_base import AsyncBaseStorage
17
+ from hishel._core._storages._packing import pack, unpack
18
+ from hishel._core.models import (
19
+ Entry,
20
+ EntryMeta,
21
+ Request,
22
+ Response,
23
+ )
24
+ from hishel._utils import ensure_cache_dict
25
+
26
+ # Batch cleanup configuration
27
+ # How often to run cleanup (seconds). Default: 1 hour.
28
+ BATCH_CLEANUP_INTERVAL = 3600
29
+ # How long to wait after storage creation before allowing the first cleanup (seconds)
30
+ BATCH_CLEANUP_START_DELAY = 5 * 60
31
+ # Number of rows to process per chunk when cleaning
32
+ BATCH_CLEANUP_CHUNK_SIZE = 200
33
+
34
+
35
+ try:
36
+ import anysqlite
37
+
38
+ class AsyncSqliteStorage(AsyncBaseStorage):
39
+ _COMPLETE_CHUNK_NUMBER = -1
40
+
41
+ def __init__(
42
+ self,
43
+ *,
44
+ connection: Optional[anysqlite.Connection] = None,
45
+ database_path: str = "hishel_cache.db",
46
+ default_ttl: Optional[float] = None,
47
+ refresh_ttl_on_access: bool = True,
48
+ ) -> None:
49
+ base_path = ensure_cache_dict()
50
+
51
+ self.connection = connection
52
+ self.database_path = base_path / database_path
53
+ self.default_ttl = default_ttl
54
+ self.refresh_ttl_on_access = refresh_ttl_on_access
55
+ self.last_cleanup = time.time() - BATCH_CLEANUP_INTERVAL + BATCH_CLEANUP_START_DELAY
56
+ # When this storage instance was created. Used to delay the first cleanup.
57
+ self._start_time = time.time()
58
+ self._initialized = False
59
+
60
+ async def _ensure_connection(self) -> anysqlite.Connection:
61
+ """Ensure connection is established and database is initialized."""
62
+ if self.connection is None:
63
+ self.connection = await anysqlite.connect(str(self.database_path))
64
+ if not self._initialized:
65
+ await self._initialize_database()
66
+ self._initialized = True
67
+ return self.connection
68
+
69
+ async def _initialize_database(self) -> None:
70
+ """Initialize the database schema."""
71
+ assert self.connection is not None
72
+ cursor = await self.connection.cursor()
73
+
74
+ # Table for storing request/response pairs
75
+ await cursor.execute("""
76
+ CREATE TABLE IF NOT EXISTS entries (
77
+ id BLOB PRIMARY KEY,
78
+ cache_key BLOB,
79
+ data BLOB NOT NULL,
80
+ created_at REAL NOT NULL,
81
+ deleted_at REAL
82
+ )
83
+ """)
84
+
85
+ # Table for storing response stream chunks only
86
+ await cursor.execute("""
87
+ CREATE TABLE IF NOT EXISTS streams (
88
+ entry_id BLOB NOT NULL,
89
+ chunk_number INTEGER NOT NULL,
90
+ chunk_data BLOB NOT NULL,
91
+ PRIMARY KEY (entry_id, chunk_number),
92
+ FOREIGN KEY (entry_id) REFERENCES entries(id) ON DELETE CASCADE
93
+ )
94
+ """)
95
+
96
+ # Indexes for performance
97
+ await cursor.execute("CREATE INDEX IF NOT EXISTS idx_entries_deleted_at ON entries(deleted_at)")
98
+ await cursor.execute("CREATE INDEX IF NOT EXISTS idx_entries_cache_key ON entries(cache_key)")
99
+
100
+ await self.connection.commit()
101
+
102
+ async def create_entry(
103
+ self, request: Request, response: Response, key: str, id_: uuid.UUID | None = None
104
+ ) -> Entry:
105
+ key_bytes = key.encode("utf-8")
106
+
107
+ connection = await self._ensure_connection()
108
+ cursor = await connection.cursor()
109
+
110
+ # Create a new entry directly with both request and response
111
+ pair_id = id_ if id_ is not None else uuid.uuid4()
112
+ pair_meta = EntryMeta(
113
+ created_at=time.time(),
114
+ )
115
+
116
+ assert isinstance(response.stream, (AsyncIterator, AsyncIterable))
117
+ response_with_stream = replace(
118
+ response,
119
+ stream=self._save_stream(response.stream, pair_id.bytes),
120
+ )
121
+
122
+ complete_entry = Entry(
123
+ id=pair_id,
124
+ request=request,
125
+ response=response_with_stream,
126
+ meta=pair_meta,
127
+ cache_key=key_bytes,
128
+ )
129
+
130
+ # Insert the complete entry into the database
131
+ await cursor.execute(
132
+ "INSERT INTO entries (id, cache_key, data, created_at, deleted_at) VALUES (?, ?, ?, ?, ?)",
133
+ (pair_id.bytes, key_bytes, pack(complete_entry, kind="pair"), pair_meta.created_at, None),
134
+ )
135
+ await connection.commit()
136
+
137
+ return complete_entry
138
+
139
+ async def get_entries(self, key: str) -> List[Entry]:
140
+ final_pairs: List[Entry] = []
141
+
142
+ now = time.time()
143
+ if now - self.last_cleanup >= BATCH_CLEANUP_INTERVAL:
144
+ try:
145
+ await self._batch_cleanup()
146
+ except Exception:
147
+ # don't let cleanup prevent reads; failures are non-fatal
148
+ pass
149
+
150
+ connection = await self._ensure_connection()
151
+ cursor = await connection.cursor()
152
+ # Query entries directly by cache_key
153
+ await cursor.execute(
154
+ "SELECT id, data FROM entries WHERE cache_key = ?",
155
+ (key.encode("utf-8"),),
156
+ )
157
+
158
+ for row in await cursor.fetchall():
159
+ pair_data = unpack(row[1], kind="pair")
160
+
161
+ # Skip entries without a response (incomplete)
162
+ if not isinstance(pair_data, Entry) or pair_data.response is None:
163
+ continue
164
+
165
+ final_pairs.append(pair_data)
166
+
167
+ pairs_with_streams: List[Entry] = []
168
+
169
+ # Only restore response streams from cache
170
+ for pair in final_pairs:
171
+ pairs_with_streams.append(
172
+ replace(
173
+ pair,
174
+ response=replace(
175
+ pair.response,
176
+ stream=self._stream_data_from_cache(pair.id.bytes),
177
+ ),
178
+ )
179
+ )
180
+ return pairs_with_streams
181
+
182
+ async def update_entry(
183
+ self,
184
+ id: uuid.UUID,
185
+ new_pair: Union[Entry, Callable[[Entry], Entry]],
186
+ ) -> Optional[Entry]:
187
+ connection = await self._ensure_connection()
188
+ cursor = await connection.cursor()
189
+ await cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
190
+ result = await cursor.fetchone()
191
+
192
+ if result is None:
193
+ return None
194
+
195
+ pair = unpack(result[0], kind="pair")
196
+
197
+ # Skip entries without a response (incomplete)
198
+ if not isinstance(pair, Entry) or pair.response is None:
199
+ return None
200
+
201
+ if isinstance(new_pair, Entry):
202
+ complete_pair = new_pair
203
+ else:
204
+ complete_pair = new_pair(pair)
205
+
206
+ if pair.id != complete_pair.id:
207
+ raise ValueError("Pair ID mismatch")
208
+
209
+ await cursor.execute(
210
+ "UPDATE entries SET data = ? WHERE id = ?",
211
+ (pack(complete_pair, kind="pair"), id.bytes),
212
+ )
213
+
214
+ if pair.cache_key != complete_pair.cache_key:
215
+ await cursor.execute(
216
+ "UPDATE entries SET cache_key = ? WHERE id = ?",
217
+ (complete_pair.cache_key, complete_pair.id.bytes),
218
+ )
219
+
220
+ await connection.commit()
221
+
222
+ return complete_pair
223
+
224
+ async def remove_entry(self, id: uuid.UUID) -> None:
225
+ connection = await self._ensure_connection()
226
+ cursor = await connection.cursor()
227
+ await cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
228
+ result = await cursor.fetchone()
229
+
230
+ if result is None:
231
+ return None
232
+
233
+ pair = unpack(result[0], kind="pair")
234
+ await self._soft_delete_pair(pair, cursor)
235
+ await connection.commit()
236
+
237
+ async def _is_stream_complete(self, pair_id: uuid.UUID, cursor: anysqlite.Cursor) -> bool:
238
+ # Check if there's a completion marker (chunk_number = -1) for response stream
239
+ await cursor.execute(
240
+ "SELECT 1 FROM streams WHERE entry_id = ? AND chunk_number = ? LIMIT 1",
241
+ (pair_id.bytes, self._COMPLETE_CHUNK_NUMBER),
242
+ )
243
+ return await cursor.fetchone() is not None
244
+
245
+ async def _soft_delete_pair(
246
+ self,
247
+ pair: Entry,
248
+ cursor: anysqlite.Cursor,
249
+ ) -> None:
250
+ """
251
+ Mark the pair as deleted by setting the deleted_at timestamp.
252
+ """
253
+ marked_pair = self.mark_pair_as_deleted(pair)
254
+ await cursor.execute(
255
+ "UPDATE entries SET data = ?, deleted_at = ? WHERE id = ?",
256
+ (
257
+ pack(marked_pair, kind="pair"),
258
+ marked_pair.meta.deleted_at,
259
+ pair.id.bytes,
260
+ ),
261
+ )
262
+
263
+ async def _is_pair_expired(self, pair: Entry, cursor: anysqlite.Cursor) -> bool:
264
+ """
265
+ Check if the pair is expired.
266
+ """
267
+ ttl = pair.request.metadata["hishel_ttl"] if "hishel_ttl" in pair.request.metadata else self.default_ttl
268
+ created_at = pair.meta.created_at
269
+ if ttl is None:
270
+ return False
271
+ return created_at + ttl < time.time()
272
+
273
+ async def _batch_cleanup(
274
+ self,
275
+ ) -> None:
276
+ """
277
+ Cleanup expired entries in the database.
278
+ """
279
+ should_mark_as_deleted: List[Entry] = []
280
+ should_hard_delete: List[Entry] = []
281
+
282
+ connection = await self._ensure_connection()
283
+ cursor = await connection.cursor()
284
+
285
+ # Process entries in chunks to avoid loading the entire table into memory.
286
+ chunk_size = BATCH_CLEANUP_CHUNK_SIZE
287
+ offset = 0
288
+ while True:
289
+ await cursor.execute(
290
+ "SELECT id, data FROM entries LIMIT ? OFFSET ?",
291
+ (chunk_size, offset),
292
+ )
293
+ rows = await cursor.fetchall()
294
+ if not rows:
295
+ break
296
+
297
+ for row in rows:
298
+ pair = unpack(row[1], kind="pair")
299
+ if pair is None:
300
+ continue
301
+
302
+ # expired but not yet soft-deleted
303
+ if await self._is_pair_expired(pair, cursor) and not self.is_soft_deleted(pair):
304
+ should_mark_as_deleted.append(pair)
305
+
306
+ # soft-deleted and safe to hard delete, or corrupted pair
307
+ if (self.is_soft_deleted(pair) and self.is_safe_to_hard_delete(pair)) or await self._is_corrupted(
308
+ pair, cursor
309
+ ):
310
+ should_hard_delete.append(pair)
311
+
312
+ # advance pagination
313
+ offset += len(rows)
314
+
315
+ for pair in should_mark_as_deleted:
316
+ await self._soft_delete_pair(pair, cursor)
317
+
318
+ for pair in should_hard_delete:
319
+ await self._hard_delete_pair(pair, cursor)
320
+
321
+ await connection.commit()
322
+
323
+ async def _is_corrupted(self, pair: Entry, cursor: anysqlite.Cursor) -> bool:
324
+ # if entry was created more than 1 hour ago and still has no response (incomplete)
325
+ if pair.meta.created_at + 3600 < time.time() and pair.response is None:
326
+ return True
327
+
328
+ # Check if response stream is complete for Entry with response
329
+ if (
330
+ isinstance(pair, Entry)
331
+ and pair.response is not None
332
+ and not await self._is_stream_complete(pair.id, cursor)
333
+ ):
334
+ return True
335
+ return False
336
+
337
+ async def _hard_delete_pair(self, pair: Entry, cursor: anysqlite.Cursor) -> None:
338
+ """
339
+ Permanently delete the pair from the database.
340
+ """
341
+ await cursor.execute("DELETE FROM entries WHERE id = ?", (pair.id.bytes,))
342
+
343
+ # Delete response stream for this entry
344
+ await self._delete_stream(pair.id.bytes, cursor)
345
+
346
+ async def _delete_stream(
347
+ self,
348
+ entry_id: bytes,
349
+ cursor: anysqlite.Cursor,
350
+ ) -> None:
351
+ """
352
+ Delete response stream associated with the given entry ID.
353
+ """
354
+ await cursor.execute("DELETE FROM streams WHERE entry_id = ?", (entry_id,))
355
+
356
+ async def _save_stream(
357
+ self,
358
+ stream: AsyncIterator[bytes],
359
+ entry_id: bytes,
360
+ ) -> AsyncIterator[bytes]:
361
+ """
362
+ Wrapper around an async iterator that also saves the response data to the cache in chunks.
363
+ """
364
+ chunk_number = 0
365
+ content_length = 0
366
+ async for chunk in stream:
367
+ content_length += len(chunk)
368
+ connection = await self._ensure_connection()
369
+ cursor = await connection.cursor()
370
+ await cursor.execute(
371
+ "INSERT INTO streams (entry_id, chunk_number, chunk_data) VALUES (?, ?, ?)",
372
+ (entry_id, chunk_number, chunk),
373
+ )
374
+ await connection.commit()
375
+ chunk_number += 1
376
+ yield chunk
377
+
378
+ # Mark end of stream with chunk_number = -1
379
+ connection = await self._ensure_connection()
380
+ cursor = await connection.cursor()
381
+ await cursor.execute(
382
+ "INSERT INTO streams (entry_id, chunk_number, chunk_data) VALUES (?, ?, ?)",
383
+ (entry_id, self._COMPLETE_CHUNK_NUMBER, b""),
384
+ )
385
+ await connection.commit()
386
+
387
+ async def _stream_data_from_cache(
388
+ self,
389
+ entry_id: bytes,
390
+ ) -> AsyncIterator[bytes]:
391
+ """
392
+ Get an async iterator that yields the response stream data from the cache.
393
+ """
394
+ chunk_number = 0
395
+
396
+ connection = await self._ensure_connection()
397
+ while True:
398
+ cursor = await connection.cursor()
399
+ await cursor.execute(
400
+ "SELECT chunk_data FROM streams WHERE entry_id = ? AND chunk_number = ?",
401
+ (entry_id, chunk_number),
402
+ )
403
+ result = await cursor.fetchone()
404
+
405
+ if result is None:
406
+ break
407
+ chunk = result[0]
408
+ # chunk_number = -1 is the completion marker with empty data
409
+ if chunk == b"":
410
+ break
411
+ yield chunk
412
+ chunk_number += 1
413
+ except ImportError:
414
+
415
+ class AsyncSqliteStorage: # type: ignore[no-redef]
416
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
417
+ raise ImportError(
418
+ "The 'anysqlite' library is required to use the `AsyncSqliteStorage` integration. "
419
+ "Install hishel with 'pip install hishel[async]'."
420
+ )
@@ -0,0 +1,144 @@
1
+ from __future__ import annotations
2
+
3
+ import uuid
4
+ from typing import TYPE_CHECKING, Any, Mapping, Optional, Union, overload
5
+
6
+ import msgpack
7
+ from typing_extensions import Literal, cast
8
+
9
+ from hishel._core._headers import Headers
10
+ from hishel._core.models import EntryMeta, Request, Response
11
+
12
+
13
+ def filter_out_hishel_metadata(data: Mapping[str, Any]) -> dict[str, Any]:
14
+ return {k: v for k, v in data.items() if not k.startswith("hishel_")}
15
+
16
+
17
+ if TYPE_CHECKING:
18
+ from hishel import Entry
19
+
20
+
21
+ @overload
22
+ def pack(
23
+ value: "Entry",
24
+ /,
25
+ kind: Literal["pair"],
26
+ ) -> bytes: ...
27
+
28
+
29
+ @overload
30
+ def pack(
31
+ value: uuid.UUID,
32
+ /,
33
+ kind: Literal["entry_db_key_index"],
34
+ ) -> bytes: ...
35
+
36
+
37
+ def pack(
38
+ value: Union["Entry", uuid.UUID],
39
+ /,
40
+ kind: Literal["pair", "entry_db_key_index"],
41
+ ) -> bytes:
42
+ from hishel import Entry
43
+
44
+ if kind == "entry_db_key_index":
45
+ assert isinstance(value, uuid.UUID)
46
+ return value.bytes
47
+ elif kind == "pair":
48
+ assert isinstance(value, Entry)
49
+ return cast(
50
+ bytes,
51
+ msgpack.packb(
52
+ {
53
+ "id": value.id.bytes,
54
+ "request": {
55
+ "method": value.request.method,
56
+ "url": value.request.url,
57
+ "headers": value.request.headers._headers,
58
+ "extra": filter_out_hishel_metadata(value.request.metadata),
59
+ },
60
+ "response": {
61
+ "status_code": value.response.status_code,
62
+ "headers": value.response.headers._headers,
63
+ "extra": filter_out_hishel_metadata(value.response.metadata),
64
+ },
65
+ "meta": {
66
+ "created_at": value.meta.created_at,
67
+ "deleted_at": value.meta.deleted_at,
68
+ },
69
+ "cache_key": value.cache_key,
70
+ }
71
+ ),
72
+ )
73
+ assert False, f"Unexpected kind: {kind}"
74
+
75
+
76
+ @overload
77
+ def unpack(
78
+ value: bytes,
79
+ /,
80
+ kind: Literal["pair"],
81
+ ) -> "Entry": ...
82
+
83
+
84
+ @overload
85
+ def unpack(
86
+ value: bytes,
87
+ /,
88
+ kind: Literal["entry_db_key_index"],
89
+ ) -> uuid.UUID: ...
90
+
91
+
92
+ @overload
93
+ def unpack(
94
+ value: Optional[bytes],
95
+ /,
96
+ kind: Literal["pair"],
97
+ ) -> Optional["Entry"]: ...
98
+
99
+
100
+ @overload
101
+ def unpack(
102
+ value: Optional[bytes],
103
+ /,
104
+ kind: Literal["entry_db_key_index"],
105
+ ) -> Optional[uuid.UUID]: ...
106
+
107
+
108
+ def unpack(
109
+ value: Optional[bytes],
110
+ /,
111
+ kind: Literal["pair", "entry_db_key_index"],
112
+ ) -> Union["Entry", uuid.UUID, None]:
113
+ from hishel import Entry
114
+
115
+ if value is None:
116
+ return None
117
+ if kind == "entry_db_key_index":
118
+ return uuid.UUID(bytes=value)
119
+ elif kind == "pair":
120
+ data = msgpack.unpackb(value)
121
+ id = uuid.UUID(bytes=data["id"])
122
+ return Entry(
123
+ id=id,
124
+ request=Request(
125
+ method=data["request"]["method"],
126
+ url=data["request"]["url"],
127
+ headers=Headers(data["request"]["headers"]),
128
+ metadata=data["request"]["extra"],
129
+ stream=iter([]),
130
+ ),
131
+ response=(
132
+ Response(
133
+ status_code=data["response"]["status_code"],
134
+ headers=Headers(data["response"]["headers"]),
135
+ metadata=data["response"]["extra"],
136
+ stream=iter([]),
137
+ )
138
+ ),
139
+ meta=EntryMeta(
140
+ created_at=data["meta"]["created_at"],
141
+ deleted_at=data["meta"]["deleted_at"],
142
+ ),
143
+ cache_key=data["cache_key"],
144
+ )
@@ -0,0 +1,71 @@
1
+ from __future__ import annotations
2
+
3
+ import abc
4
+ import time
5
+ import typing as tp
6
+ import uuid
7
+
8
+ from ..models import Entry, Request, Response
9
+
10
+
11
+ class SyncBaseStorage(abc.ABC):
12
+ @abc.abstractmethod
13
+ def create_entry(self, request: Request, response: Response, key: str, id_: uuid.UUID | None = None) -> Entry:
14
+ raise NotImplementedError()
15
+
16
+ @abc.abstractmethod
17
+ def get_entries(self, key: str) -> tp.List[Entry]:
18
+ raise NotImplementedError()
19
+
20
+ @abc.abstractmethod
21
+ def update_entry(
22
+ self,
23
+ id: uuid.UUID,
24
+ new_entry: tp.Union[Entry, tp.Callable[[Entry], Entry]],
25
+ ) -> tp.Optional[Entry]:
26
+ raise NotImplementedError()
27
+
28
+ @abc.abstractmethod
29
+ def remove_entry(self, id: uuid.UUID) -> None:
30
+ raise NotImplementedError()
31
+
32
+ def close(self) -> None:
33
+ pass
34
+
35
+ def is_soft_deleted(self, pair: Entry) -> bool:
36
+ """
37
+ Check if a pair is soft deleted based on its metadata.
38
+
39
+ Args:
40
+ pair: The request pair to check.
41
+
42
+ Returns:
43
+ True if the pair is soft deleted, False otherwise.
44
+ """
45
+ return pair.meta.deleted_at is not None and pair.meta.deleted_at > 0
46
+
47
+ def is_safe_to_hard_delete(self, pair: Entry) -> bool:
48
+ """
49
+ Check if a pair is safe to hard delete based on its metadata.
50
+
51
+ If the pair has been soft deleted for more than 1 hour, it is considered safe to hard delete.
52
+
53
+ Args:
54
+ pair: The request pair to check.
55
+
56
+ Returns:
57
+ True if the pair is safe to hard delete, False otherwise.
58
+ """
59
+ return bool(pair.meta.deleted_at is not None and (pair.meta.deleted_at + 3600 < time.time()))
60
+
61
+ def mark_pair_as_deleted(self, pair: Entry) -> Entry:
62
+ """
63
+ Mark a pair as soft deleted by setting its deleted_at timestamp.
64
+
65
+ Args:
66
+ pair: The request pair to mark as deleted.
67
+ Returns:
68
+ The updated request pair with the deleted_at timestamp set.
69
+ """
70
+ pair.meta.deleted_at = time.time()
71
+ return pair