hishel 1.0.0.dev2__py3-none-any.whl → 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,27 +3,26 @@ from __future__ import annotations
3
3
  import time
4
4
  import uuid
5
5
  from dataclasses import replace
6
+ from pathlib import Path
6
7
  from typing import (
7
8
  Any,
8
9
  AsyncIterable,
9
10
  AsyncIterator,
10
11
  Callable,
11
12
  List,
12
- Literal,
13
13
  Optional,
14
14
  Union,
15
15
  )
16
16
 
17
- from hishel._core._base._storages._base import AsyncBaseStorage, ensure_cache_dict
18
- from hishel._core._base._storages._packing import pack, unpack
17
+ from hishel._core._storages._async_base import AsyncBaseStorage
18
+ from hishel._core._storages._packing import pack, unpack
19
19
  from hishel._core.models import (
20
- CompletePair,
21
- IncompletePair,
22
- Pair,
23
- PairMeta,
20
+ Entry,
21
+ EntryMeta,
24
22
  Request,
25
23
  Response,
26
24
  )
25
+ from hishel._utils import ensure_cache_dict
27
26
 
28
27
  # Batch cleanup configuration
29
28
  # How often to run cleanup (seconds). Default: 1 hour.
@@ -38,7 +37,6 @@ try:
38
37
  import anysqlite
39
38
 
40
39
  class AsyncSqliteStorage(AsyncBaseStorage):
41
- _STREAM_KIND = {"request": 0, "response": 1}
42
40
  _COMPLETE_CHUNK_NUMBER = -1
43
41
 
44
42
  def __init__(
@@ -49,10 +47,12 @@ try:
49
47
  default_ttl: Optional[float] = None,
50
48
  refresh_ttl_on_access: bool = True,
51
49
  ) -> None:
52
- base_path = ensure_cache_dict()
50
+ db_path = Path(database_path)
53
51
 
54
52
  self.connection = connection
55
- self.database_path = base_path / database_path
53
+ self.database_path = (
54
+ ensure_cache_dict(db_path.parent if db_path.parent != Path(".") else None) / db_path.name
55
+ )
56
56
  self.default_ttl = default_ttl
57
57
  self.refresh_ttl_on_access = refresh_ttl_on_access
58
58
  self.last_cleanup = time.time() - BATCH_CLEANUP_INTERVAL + BATCH_CLEANUP_START_DELAY
@@ -85,14 +85,13 @@ try:
85
85
  )
86
86
  """)
87
87
 
88
- # Table for storing stream chunks
88
+ # Table for storing response stream chunks only
89
89
  await cursor.execute("""
90
90
  CREATE TABLE IF NOT EXISTS streams (
91
91
  entry_id BLOB NOT NULL,
92
- kind INTEGER NOT NULL,
93
92
  chunk_number INTEGER NOT NULL,
94
93
  chunk_data BLOB NOT NULL,
95
- PRIMARY KEY (entry_id, kind, chunk_number),
94
+ PRIMARY KEY (entry_id, chunk_number),
96
95
  FOREIGN KEY (entry_id) REFERENCES entries(id) ON DELETE CASCADE
97
96
  )
98
97
  """)
@@ -100,85 +99,48 @@ try:
100
99
  # Indexes for performance
101
100
  await cursor.execute("CREATE INDEX IF NOT EXISTS idx_entries_deleted_at ON entries(deleted_at)")
102
101
  await cursor.execute("CREATE INDEX IF NOT EXISTS idx_entries_cache_key ON entries(cache_key)")
103
- # Note: PRIMARY KEY (entry_id, kind, chunk_number) already provides an index
104
- # for queries like: entry_id = ? AND kind = ? AND chunk_number = ?
105
102
 
106
103
  await self.connection.commit()
107
104
 
108
- async def create_pair(
109
- self,
110
- request: Request,
111
- id: uuid.UUID | None = None,
112
- ) -> IncompletePair:
113
- pair_id = id if id is not None else uuid.uuid4()
114
- pair_meta = PairMeta(
115
- created_at=time.time(),
116
- )
117
-
118
- pair = IncompletePair(id=pair_id, request=request, meta=pair_meta)
119
-
120
- packed_pair = pack(pair, kind="pair")
105
+ async def create_entry(
106
+ self, request: Request, response: Response, key: str, id_: uuid.UUID | None = None
107
+ ) -> Entry:
108
+ key_bytes = key.encode("utf-8")
121
109
 
122
110
  connection = await self._ensure_connection()
123
111
  cursor = await connection.cursor()
124
- await cursor.execute(
125
- "INSERT INTO entries (id, cache_key, data, created_at, deleted_at) VALUES (?, ?, ?, ?, ?)",
126
- (pair_id.bytes, None, packed_pair, pair_meta.created_at, None),
127
- )
128
- await connection.commit()
129
112
 
130
- assert isinstance(request.stream, AsyncIterable), "Request stream must be an AsyncIterable, not Iterable"
131
-
132
- request = Request(
133
- method=request.method,
134
- url=request.url,
135
- headers=request.headers,
136
- metadata=request.metadata,
137
- stream=self._save_stream(request.stream, pair_id.bytes, "request"),
113
+ # Create a new entry directly with both request and response
114
+ pair_id = id_ if id_ is not None else uuid.uuid4()
115
+ pair_meta = EntryMeta(
116
+ created_at=time.time(),
138
117
  )
139
118
 
140
- return replace(pair, request=request)
141
-
142
- async def add_response(
143
- self,
144
- pair_id: uuid.UUID,
145
- response: Response,
146
- key: str | bytes,
147
- ) -> CompletePair:
148
- if isinstance(key, str):
149
- key = key.encode("utf-8")
150
-
151
- connection = await self._ensure_connection()
152
- cursor = await connection.cursor()
153
-
154
- # Get the existing pair
155
- await cursor.execute("SELECT data FROM entries WHERE id = ?", (pair_id.bytes,))
156
- result = await cursor.fetchone()
157
-
158
- if result is None:
159
- raise ValueError(f"Entry with ID {pair_id} not found.")
160
-
161
- pair = unpack(result[0], kind="pair")
162
-
163
119
  assert isinstance(response.stream, (AsyncIterator, AsyncIterable))
164
- response = replace(response, stream=self._save_stream(response.stream, pair_id.bytes, "response"))
120
+ response_with_stream = replace(
121
+ response,
122
+ stream=self._save_stream(response.stream, pair_id.bytes),
123
+ )
165
124
 
166
- await self._delete_stream(pair.id.bytes, cursor, type="response")
167
- complete_pair = CompletePair(
168
- id=pair.id, request=pair.request, response=response, meta=pair.meta, cache_key=key
125
+ complete_entry = Entry(
126
+ id=pair_id,
127
+ request=request,
128
+ response=response_with_stream,
129
+ meta=pair_meta,
130
+ cache_key=key_bytes,
169
131
  )
170
132
 
171
- # Update the entry with the complete pair and set cache_key
133
+ # Insert the complete entry into the database
172
134
  await cursor.execute(
173
- "UPDATE entries SET data = ?, cache_key = ? WHERE id = ?",
174
- (pack(complete_pair, kind="pair"), key, pair_id.bytes),
135
+ "INSERT INTO entries (id, cache_key, data, created_at, deleted_at) VALUES (?, ?, ?, ?, ?)",
136
+ (pair_id.bytes, key_bytes, pack(complete_entry, kind="pair"), pair_meta.created_at, None),
175
137
  )
176
138
  await connection.commit()
177
139
 
178
- return complete_pair
140
+ return complete_entry
179
141
 
180
- async def get_pairs(self, key: str) -> List[CompletePair]:
181
- final_pairs: List[CompletePair] = []
142
+ async def get_entries(self, key: str) -> List[Entry]:
143
+ final_pairs: List[Entry] = []
182
144
 
183
145
  now = time.time()
184
146
  if now - self.last_cleanup >= BATCH_CLEANUP_INTERVAL:
@@ -191,39 +153,40 @@ try:
191
153
  connection = await self._ensure_connection()
192
154
  cursor = await connection.cursor()
193
155
  # Query entries directly by cache_key
194
- await cursor.execute("SELECT id, data FROM entries WHERE cache_key = ?", (key.encode("utf-8"),))
156
+ await cursor.execute(
157
+ "SELECT id, data FROM entries WHERE cache_key = ?",
158
+ (key.encode("utf-8"),),
159
+ )
195
160
 
196
161
  for row in await cursor.fetchall():
197
162
  pair_data = unpack(row[1], kind="pair")
198
163
 
199
- if isinstance(pair_data, IncompletePair):
164
+ # Skip entries without a response (incomplete)
165
+ if not isinstance(pair_data, Entry) or pair_data.response is None:
200
166
  continue
201
167
 
202
168
  final_pairs.append(pair_data)
203
169
 
204
- pairs_with_streams: List[CompletePair] = []
170
+ pairs_with_streams: List[Entry] = []
205
171
 
172
+ # Only restore response streams from cache
206
173
  for pair in final_pairs:
207
174
  pairs_with_streams.append(
208
175
  replace(
209
176
  pair,
210
177
  response=replace(
211
178
  pair.response,
212
- stream=self._stream_data_from_cache(pair.id.bytes, "response"),
213
- ),
214
- request=replace(
215
- pair.request,
216
- stream=self._stream_data_from_cache(pair.id.bytes, "request"),
179
+ stream=self._stream_data_from_cache(pair.id.bytes),
217
180
  ),
218
181
  )
219
182
  )
220
183
  return pairs_with_streams
221
184
 
222
- async def update_pair(
185
+ async def update_entry(
223
186
  self,
224
187
  id: uuid.UUID,
225
- new_pair: Union[CompletePair, Callable[[CompletePair], CompletePair]],
226
- ) -> Optional[CompletePair]:
188
+ new_pair: Union[Entry, Callable[[Entry], Entry]],
189
+ ) -> Optional[Entry]:
227
190
  connection = await self._ensure_connection()
228
191
  cursor = await connection.cursor()
229
192
  await cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
@@ -234,10 +197,11 @@ try:
234
197
 
235
198
  pair = unpack(result[0], kind="pair")
236
199
 
237
- if isinstance(pair, IncompletePair):
200
+ # Skip entries without a response (incomplete)
201
+ if not isinstance(pair, Entry) or pair.response is None:
238
202
  return None
239
203
 
240
- if isinstance(new_pair, CompletePair):
204
+ if isinstance(new_pair, Entry):
241
205
  complete_pair = new_pair
242
206
  else:
243
207
  complete_pair = new_pair(pair)
@@ -246,7 +210,8 @@ try:
246
210
  raise ValueError("Pair ID mismatch")
247
211
 
248
212
  await cursor.execute(
249
- "UPDATE entries SET data = ? WHERE id = ?", (pack(complete_pair, kind="pair"), id.bytes)
213
+ "UPDATE entries SET data = ? WHERE id = ?",
214
+ (pack(complete_pair, kind="pair"), id.bytes),
250
215
  )
251
216
 
252
217
  if pair.cache_key != complete_pair.cache_key:
@@ -259,7 +224,7 @@ try:
259
224
 
260
225
  return complete_pair
261
226
 
262
- async def remove(self, id: uuid.UUID) -> None:
227
+ async def remove_entry(self, id: uuid.UUID) -> None:
263
228
  connection = await self._ensure_connection()
264
229
  cursor = await connection.cursor()
265
230
  await cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
@@ -272,28 +237,33 @@ try:
272
237
  await self._soft_delete_pair(pair, cursor)
273
238
  await connection.commit()
274
239
 
275
- async def _is_stream_complete(
276
- self, kind: Literal["request", "response"], pair_id: uuid.UUID, cursor: anysqlite.Cursor
277
- ) -> bool:
278
- kind_id = self._STREAM_KIND[kind]
279
- # Check if there's a completion marker (chunk_number = -1)
240
+ async def _is_stream_complete(self, pair_id: uuid.UUID, cursor: anysqlite.Cursor) -> bool:
241
+ # Check if there's a completion marker (chunk_number = -1) for response stream
280
242
  await cursor.execute(
281
- "SELECT 1 FROM streams WHERE entry_id = ? AND kind = ? AND chunk_number = ? LIMIT 1",
282
- (pair_id.bytes, kind_id, self._COMPLETE_CHUNK_NUMBER),
243
+ "SELECT 1 FROM streams WHERE entry_id = ? AND chunk_number = ? LIMIT 1",
244
+ (pair_id.bytes, self._COMPLETE_CHUNK_NUMBER),
283
245
  )
284
246
  return await cursor.fetchone() is not None
285
247
 
286
- async def _soft_delete_pair(self, pair: Union[CompletePair, IncompletePair], cursor: anysqlite.Cursor) -> None:
248
+ async def _soft_delete_pair(
249
+ self,
250
+ pair: Entry,
251
+ cursor: anysqlite.Cursor,
252
+ ) -> None:
287
253
  """
288
254
  Mark the pair as deleted by setting the deleted_at timestamp.
289
255
  """
290
256
  marked_pair = self.mark_pair_as_deleted(pair)
291
257
  await cursor.execute(
292
258
  "UPDATE entries SET data = ?, deleted_at = ? WHERE id = ?",
293
- (pack(marked_pair, kind="pair"), marked_pair.meta.deleted_at, pair.id.bytes),
259
+ (
260
+ pack(marked_pair, kind="pair"),
261
+ marked_pair.meta.deleted_at,
262
+ pair.id.bytes,
263
+ ),
294
264
  )
295
265
 
296
- async def _is_pair_expired(self, pair: Pair, cursor: anysqlite.Cursor) -> bool:
266
+ async def _is_pair_expired(self, pair: Entry, cursor: anysqlite.Cursor) -> bool:
297
267
  """
298
268
  Check if the pair is expired.
299
269
  """
@@ -307,10 +277,10 @@ try:
307
277
  self,
308
278
  ) -> None:
309
279
  """
310
- Cleanup expired pairs in the database.
280
+ Cleanup expired entries in the database.
311
281
  """
312
- should_mark_as_deleted: List[Union[CompletePair, IncompletePair]] = []
313
- should_hard_delete: List[Union[CompletePair, IncompletePair]] = []
282
+ should_mark_as_deleted: List[Entry] = []
283
+ should_hard_delete: List[Entry] = []
314
284
 
315
285
  connection = await self._ensure_connection()
316
286
  cursor = await connection.cursor()
@@ -319,7 +289,10 @@ try:
319
289
  chunk_size = BATCH_CLEANUP_CHUNK_SIZE
320
290
  offset = 0
321
291
  while True:
322
- await cursor.execute("SELECT id, data FROM entries LIMIT ? OFFSET ?", (chunk_size, offset))
292
+ await cursor.execute(
293
+ "SELECT id, data FROM entries LIMIT ? OFFSET ?",
294
+ (chunk_size, offset),
295
+ )
323
296
  rows = await cursor.fetchall()
324
297
  if not rows:
325
298
  break
@@ -350,61 +323,56 @@ try:
350
323
 
351
324
  await connection.commit()
352
325
 
353
- async def _is_corrupted(self, pair: IncompletePair | CompletePair, cursor: anysqlite.Cursor) -> bool:
354
- # if pair was created more than 1 hour ago and still not completed
355
- if pair.meta.created_at + 3600 < time.time() and isinstance(pair, IncompletePair):
326
+ async def _is_corrupted(self, pair: Entry, cursor: anysqlite.Cursor) -> bool:
327
+ # if entry was created more than 1 hour ago and still has no response (incomplete)
328
+ if pair.meta.created_at + 3600 < time.time() and pair.response is None:
356
329
  return True
357
330
 
358
- if isinstance(pair, CompletePair) and not await self._is_stream_complete("request", pair.id, cursor):
331
+ # Check if response stream is complete for Entry with response
332
+ if (
333
+ isinstance(pair, Entry)
334
+ and pair.response is not None
335
+ and not await self._is_stream_complete(pair.id, cursor)
336
+ ):
359
337
  return True
360
338
  return False
361
339
 
362
- async def _hard_delete_pair(self, pair: CompletePair | IncompletePair, cursor: anysqlite.Cursor) -> None:
340
+ async def _hard_delete_pair(self, pair: Entry, cursor: anysqlite.Cursor) -> None:
363
341
  """
364
342
  Permanently delete the pair from the database.
365
343
  """
366
344
  await cursor.execute("DELETE FROM entries WHERE id = ?", (pair.id.bytes,))
367
345
 
368
- # Delete all streams (both request and response) for this entry
346
+ # Delete response stream for this entry
369
347
  await self._delete_stream(pair.id.bytes, cursor)
370
348
 
371
349
  async def _delete_stream(
372
350
  self,
373
351
  entry_id: bytes,
374
352
  cursor: anysqlite.Cursor,
375
- type: Literal["request", "response", "all"] = "all",
376
353
  ) -> None:
377
354
  """
378
- Delete all streams (both request and response) associated with the given entry ID.
355
+ Delete response stream associated with the given entry ID.
379
356
  """
380
- if type == "request":
381
- await cursor.execute(
382
- "DELETE FROM streams WHERE entry_id = ? AND kind = ?", (entry_id, self._STREAM_KIND["request"])
383
- )
384
- elif type == "response":
385
- await cursor.execute(
386
- "DELETE FROM streams WHERE entry_id = ? AND kind = ?", (entry_id, self._STREAM_KIND["response"])
387
- )
388
- elif type == "all":
389
- await cursor.execute("DELETE FROM streams WHERE entry_id = ?", (entry_id,))
357
+ await cursor.execute("DELETE FROM streams WHERE entry_id = ?", (entry_id,))
390
358
 
391
359
  async def _save_stream(
392
360
  self,
393
361
  stream: AsyncIterator[bytes],
394
362
  entry_id: bytes,
395
- kind: Literal["response", "request"],
396
363
  ) -> AsyncIterator[bytes]:
397
364
  """
398
- Wrapper around an async iterator that also saves the data to the cache in chunks.
365
+ Wrapper around an async iterator that also saves the response data to the cache in chunks.
399
366
  """
400
- kind_id = self._STREAM_KIND[kind]
401
367
  chunk_number = 0
368
+ content_length = 0
402
369
  async for chunk in stream:
370
+ content_length += len(chunk)
403
371
  connection = await self._ensure_connection()
404
372
  cursor = await connection.cursor()
405
373
  await cursor.execute(
406
- "INSERT INTO streams (entry_id, kind, chunk_number, chunk_data) VALUES (?, ?, ?, ?)",
407
- (entry_id, kind_id, chunk_number, chunk),
374
+ "INSERT INTO streams (entry_id, chunk_number, chunk_data) VALUES (?, ?, ?)",
375
+ (entry_id, chunk_number, chunk),
408
376
  )
409
377
  await connection.commit()
410
378
  chunk_number += 1
@@ -414,28 +382,26 @@ try:
414
382
  connection = await self._ensure_connection()
415
383
  cursor = await connection.cursor()
416
384
  await cursor.execute(
417
- "INSERT INTO streams (entry_id, kind, chunk_number, chunk_data) VALUES (?, ?, ?, ?)",
418
- (entry_id, kind_id, self._COMPLETE_CHUNK_NUMBER, b""),
385
+ "INSERT INTO streams (entry_id, chunk_number, chunk_data) VALUES (?, ?, ?)",
386
+ (entry_id, self._COMPLETE_CHUNK_NUMBER, b""),
419
387
  )
420
388
  await connection.commit()
421
389
 
422
390
  async def _stream_data_from_cache(
423
391
  self,
424
392
  entry_id: bytes,
425
- kind: Literal["response", "request"],
426
393
  ) -> AsyncIterator[bytes]:
427
394
  """
428
- Get an async iterator that yields the stream data from the cache.
395
+ Get an async iterator that yields the response stream data from the cache.
429
396
  """
430
- kind_id = self._STREAM_KIND[kind]
431
397
  chunk_number = 0
432
398
 
433
399
  connection = await self._ensure_connection()
434
400
  while True:
435
401
  cursor = await connection.cursor()
436
402
  await cursor.execute(
437
- "SELECT chunk_data FROM streams WHERE entry_id = ? AND kind = ? AND chunk_number = ?",
438
- (entry_id, kind_id, chunk_number),
403
+ "SELECT chunk_data FROM streams WHERE entry_id = ? AND chunk_number = ?",
404
+ (entry_id, chunk_number),
439
405
  )
440
406
  result = await cursor.fetchone()
441
407
 
@@ -449,7 +415,7 @@ try:
449
415
  chunk_number += 1
450
416
  except ImportError:
451
417
 
452
- class AsyncSqliteStorage(AsyncBaseStorage): # type: ignore[no-redef]
418
+ class AsyncSqliteStorage: # type: ignore[no-redef]
453
419
  def __init__(self, *args: Any, **kwargs: Any) -> None:
454
420
  raise ImportError(
455
421
  "The 'anysqlite' library is required to use the `AsyncSqliteStorage` integration. "
@@ -0,0 +1,144 @@
1
+ from __future__ import annotations
2
+
3
+ import uuid
4
+ from typing import TYPE_CHECKING, Any, Mapping, Optional, Union, overload
5
+
6
+ import msgpack
7
+ from typing_extensions import Literal, cast
8
+
9
+ from hishel._core._headers import Headers
10
+ from hishel._core.models import EntryMeta, Request, Response
11
+
12
+
13
+ def filter_out_hishel_metadata(data: Mapping[str, Any]) -> dict[str, Any]:
14
+ return {k: v for k, v in data.items() if not k.startswith("hishel_")}
15
+
16
+
17
+ if TYPE_CHECKING:
18
+ from hishel import Entry
19
+
20
+
21
+ @overload
22
+ def pack(
23
+ value: "Entry",
24
+ /,
25
+ kind: Literal["pair"],
26
+ ) -> bytes: ...
27
+
28
+
29
+ @overload
30
+ def pack(
31
+ value: uuid.UUID,
32
+ /,
33
+ kind: Literal["entry_db_key_index"],
34
+ ) -> bytes: ...
35
+
36
+
37
+ def pack(
38
+ value: Union["Entry", uuid.UUID],
39
+ /,
40
+ kind: Literal["pair", "entry_db_key_index"],
41
+ ) -> bytes:
42
+ from hishel import Entry
43
+
44
+ if kind == "entry_db_key_index":
45
+ assert isinstance(value, uuid.UUID)
46
+ return value.bytes
47
+ elif kind == "pair":
48
+ assert isinstance(value, Entry)
49
+ return cast(
50
+ bytes,
51
+ msgpack.packb(
52
+ {
53
+ "id": value.id.bytes,
54
+ "request": {
55
+ "method": value.request.method,
56
+ "url": value.request.url,
57
+ "headers": value.request.headers._headers,
58
+ "extra": filter_out_hishel_metadata(value.request.metadata),
59
+ },
60
+ "response": {
61
+ "status_code": value.response.status_code,
62
+ "headers": value.response.headers._headers,
63
+ "extra": filter_out_hishel_metadata(value.response.metadata),
64
+ },
65
+ "meta": {
66
+ "created_at": value.meta.created_at,
67
+ "deleted_at": value.meta.deleted_at,
68
+ },
69
+ "cache_key": value.cache_key,
70
+ }
71
+ ),
72
+ )
73
+ assert False, f"Unexpected kind: {kind}"
74
+
75
+
76
+ @overload
77
+ def unpack(
78
+ value: bytes,
79
+ /,
80
+ kind: Literal["pair"],
81
+ ) -> "Entry": ...
82
+
83
+
84
+ @overload
85
+ def unpack(
86
+ value: bytes,
87
+ /,
88
+ kind: Literal["entry_db_key_index"],
89
+ ) -> uuid.UUID: ...
90
+
91
+
92
+ @overload
93
+ def unpack(
94
+ value: Optional[bytes],
95
+ /,
96
+ kind: Literal["pair"],
97
+ ) -> Optional["Entry"]: ...
98
+
99
+
100
+ @overload
101
+ def unpack(
102
+ value: Optional[bytes],
103
+ /,
104
+ kind: Literal["entry_db_key_index"],
105
+ ) -> Optional[uuid.UUID]: ...
106
+
107
+
108
+ def unpack(
109
+ value: Optional[bytes],
110
+ /,
111
+ kind: Literal["pair", "entry_db_key_index"],
112
+ ) -> Union["Entry", uuid.UUID, None]:
113
+ from hishel import Entry
114
+
115
+ if value is None:
116
+ return None
117
+ if kind == "entry_db_key_index":
118
+ return uuid.UUID(bytes=value)
119
+ elif kind == "pair":
120
+ data = msgpack.unpackb(value)
121
+ id = uuid.UUID(bytes=data["id"])
122
+ return Entry(
123
+ id=id,
124
+ request=Request(
125
+ method=data["request"]["method"],
126
+ url=data["request"]["url"],
127
+ headers=Headers(data["request"]["headers"]),
128
+ metadata=data["request"]["extra"],
129
+ stream=iter([]),
130
+ ),
131
+ response=(
132
+ Response(
133
+ status_code=data["response"]["status_code"],
134
+ headers=Headers(data["response"]["headers"]),
135
+ metadata=data["response"]["extra"],
136
+ stream=iter([]),
137
+ )
138
+ ),
139
+ meta=EntryMeta(
140
+ created_at=data["meta"]["created_at"],
141
+ deleted_at=data["meta"]["deleted_at"],
142
+ ),
143
+ cache_key=data["cache_key"],
144
+ )
@@ -0,0 +1,71 @@
1
+ from __future__ import annotations
2
+
3
+ import abc
4
+ import time
5
+ import typing as tp
6
+ import uuid
7
+
8
+ from ..models import Entry, Request, Response
9
+
10
+
11
+ class SyncBaseStorage(abc.ABC):
12
+ @abc.abstractmethod
13
+ def create_entry(self, request: Request, response: Response, key: str, id_: uuid.UUID | None = None) -> Entry:
14
+ raise NotImplementedError()
15
+
16
+ @abc.abstractmethod
17
+ def get_entries(self, key: str) -> tp.List[Entry]:
18
+ raise NotImplementedError()
19
+
20
+ @abc.abstractmethod
21
+ def update_entry(
22
+ self,
23
+ id: uuid.UUID,
24
+ new_entry: tp.Union[Entry, tp.Callable[[Entry], Entry]],
25
+ ) -> tp.Optional[Entry]:
26
+ raise NotImplementedError()
27
+
28
+ @abc.abstractmethod
29
+ def remove_entry(self, id: uuid.UUID) -> None:
30
+ raise NotImplementedError()
31
+
32
+ def close(self) -> None:
33
+ pass
34
+
35
+ def is_soft_deleted(self, pair: Entry) -> bool:
36
+ """
37
+ Check if a pair is soft deleted based on its metadata.
38
+
39
+ Args:
40
+ pair: The request pair to check.
41
+
42
+ Returns:
43
+ True if the pair is soft deleted, False otherwise.
44
+ """
45
+ return pair.meta.deleted_at is not None and pair.meta.deleted_at > 0
46
+
47
+ def is_safe_to_hard_delete(self, pair: Entry) -> bool:
48
+ """
49
+ Check if a pair is safe to hard delete based on its metadata.
50
+
51
+ If the pair has been soft deleted for more than 1 hour, it is considered safe to hard delete.
52
+
53
+ Args:
54
+ pair: The request pair to check.
55
+
56
+ Returns:
57
+ True if the pair is safe to hard delete, False otherwise.
58
+ """
59
+ return bool(pair.meta.deleted_at is not None and (pair.meta.deleted_at + 3600 < time.time()))
60
+
61
+ def mark_pair_as_deleted(self, pair: Entry) -> Entry:
62
+ """
63
+ Mark a pair as soft deleted by setting its deleted_at timestamp.
64
+
65
+ Args:
66
+ pair: The request pair to mark as deleted.
67
+ Returns:
68
+ The updated request pair with the deleted_at timestamp set.
69
+ """
70
+ pair.meta.deleted_at = time.time()
71
+ return pair