hishel 0.1.5__py3-none-any.whl → 1.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. hishel/__init__.py +59 -52
  2. hishel/_async_cache.py +213 -0
  3. hishel/_async_httpx.py +236 -0
  4. hishel/{beta/_core → _core}/_headers.py +11 -1
  5. hishel/{beta/_core → _core}/_spec.py +270 -136
  6. hishel/_core/_storages/_async_base.py +71 -0
  7. hishel/_core/_storages/_async_sqlite.py +420 -0
  8. hishel/_core/_storages/_packing.py +144 -0
  9. hishel/_core/_storages/_sync_base.py +71 -0
  10. hishel/_core/_storages/_sync_sqlite.py +420 -0
  11. hishel/{beta/_core → _core}/models.py +100 -37
  12. hishel/_policies.py +49 -0
  13. hishel/_sync_cache.py +213 -0
  14. hishel/_sync_httpx.py +236 -0
  15. hishel/_utils.py +37 -366
  16. hishel/asgi.py +400 -0
  17. hishel/fastapi.py +263 -0
  18. hishel/httpx.py +12 -0
  19. hishel/{beta/requests.py → requests.py} +31 -25
  20. hishel-1.0.0b1.dist-info/METADATA +509 -0
  21. hishel-1.0.0b1.dist-info/RECORD +24 -0
  22. hishel/_async/__init__.py +0 -5
  23. hishel/_async/_client.py +0 -30
  24. hishel/_async/_mock.py +0 -43
  25. hishel/_async/_pool.py +0 -201
  26. hishel/_async/_storages.py +0 -768
  27. hishel/_async/_transports.py +0 -282
  28. hishel/_controller.py +0 -581
  29. hishel/_exceptions.py +0 -10
  30. hishel/_files.py +0 -54
  31. hishel/_headers.py +0 -215
  32. hishel/_lfu_cache.py +0 -71
  33. hishel/_lmdb_types_.pyi +0 -53
  34. hishel/_s3.py +0 -122
  35. hishel/_serializers.py +0 -329
  36. hishel/_sync/__init__.py +0 -5
  37. hishel/_sync/_client.py +0 -30
  38. hishel/_sync/_mock.py +0 -43
  39. hishel/_sync/_pool.py +0 -201
  40. hishel/_sync/_storages.py +0 -768
  41. hishel/_sync/_transports.py +0 -282
  42. hishel/_synchronization.py +0 -37
  43. hishel/beta/__init__.py +0 -59
  44. hishel/beta/_async_cache.py +0 -167
  45. hishel/beta/_core/__init__.py +0 -0
  46. hishel/beta/_core/_async/_storages/_sqlite.py +0 -411
  47. hishel/beta/_core/_base/_storages/_base.py +0 -272
  48. hishel/beta/_core/_base/_storages/_packing.py +0 -165
  49. hishel/beta/_core/_sync/_storages/_sqlite.py +0 -411
  50. hishel/beta/_sync_cache.py +0 -167
  51. hishel/beta/httpx.py +0 -328
  52. hishel-0.1.5.dist-info/METADATA +0 -258
  53. hishel-0.1.5.dist-info/RECORD +0 -41
  54. {hishel-0.1.5.dist-info → hishel-1.0.0b1.dist-info}/WHEEL +0 -0
  55. {hishel-0.1.5.dist-info → hishel-1.0.0b1.dist-info}/licenses/LICENSE +0 -0
@@ -1,411 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import time
4
- import uuid
5
- from dataclasses import replace
6
- from typing import (
7
- AsyncIterable,
8
- AsyncIterator,
9
- Callable,
10
- List,
11
- Literal,
12
- Optional,
13
- Union,
14
- )
15
-
16
- import anysqlite
17
-
18
- from hishel.beta._core._base._storages._base import AsyncBaseStorage, ensure_cache_dict
19
- from hishel.beta._core._base._storages._packing import pack, unpack
20
- from hishel.beta._core.models import (
21
- CompletePair,
22
- IncompletePair,
23
- Pair,
24
- PairMeta,
25
- Request,
26
- Response,
27
- )
28
-
29
-
30
- class AsyncSqliteStorage(AsyncBaseStorage):
31
- _STREAM_KIND = {"request": 0, "response": 1}
32
- _COMPLETE_CHUNK_NUMBER = -1
33
-
34
- def __init__(
35
- self,
36
- *,
37
- connection: Optional[anysqlite.Connection] = None,
38
- database_path: str = "hishel_cache.db",
39
- default_ttl: Optional[float] = None,
40
- refresh_ttl_on_access: bool = True,
41
- ) -> None:
42
- base_path = ensure_cache_dict()
43
-
44
- self.connection = connection
45
- self.database_path = base_path / database_path
46
- self.default_ttl = default_ttl
47
- self.refresh_ttl_on_access = refresh_ttl_on_access
48
- self.last_cleanup = float("-inf")
49
- self._initialized = False
50
-
51
- async def _ensure_connection(self) -> anysqlite.Connection:
52
- """Ensure connection is established and database is initialized."""
53
- if self.connection is None:
54
- self.connection = await anysqlite.connect(str(self.database_path))
55
- if not self._initialized:
56
- await self._initialize_database()
57
- self._initialized = True
58
- return self.connection
59
-
60
- async def _initialize_database(self) -> None:
61
- """Initialize the database schema."""
62
- assert self.connection is not None
63
- cursor = await self.connection.cursor()
64
-
65
- # Table for storing request/response pairs
66
- await cursor.execute("""
67
- CREATE TABLE IF NOT EXISTS entries (
68
- id BLOB PRIMARY KEY,
69
- cache_key BLOB,
70
- data BLOB NOT NULL,
71
- created_at REAL NOT NULL,
72
- deleted_at REAL
73
- )
74
- """)
75
-
76
- # Table for storing stream chunks
77
- await cursor.execute("""
78
- CREATE TABLE IF NOT EXISTS streams (
79
- entry_id BLOB NOT NULL,
80
- kind INTEGER NOT NULL,
81
- chunk_number INTEGER NOT NULL,
82
- chunk_data BLOB NOT NULL,
83
- PRIMARY KEY (entry_id, kind, chunk_number),
84
- FOREIGN KEY (entry_id) REFERENCES entries(id) ON DELETE CASCADE
85
- )
86
- """)
87
-
88
- # Indexes for performance
89
- await cursor.execute("CREATE INDEX IF NOT EXISTS idx_entries_deleted_at ON entries(deleted_at)")
90
- await cursor.execute("CREATE INDEX IF NOT EXISTS idx_entries_cache_key ON entries(cache_key)")
91
- # Note: PRIMARY KEY (entry_id, kind, chunk_number) already provides an index
92
- # for queries like: entry_id = ? AND kind = ? AND chunk_number = ?
93
-
94
- await self.connection.commit()
95
-
96
- async def create_pair(
97
- self,
98
- request: Request,
99
- id: uuid.UUID | None = None,
100
- ) -> IncompletePair:
101
- pair_id = id if id is not None else uuid.uuid4()
102
- pair_meta = PairMeta(
103
- created_at=time.time(),
104
- )
105
-
106
- pair = IncompletePair(id=pair_id, request=request, meta=pair_meta)
107
-
108
- packed_pair = pack(pair, kind="pair")
109
-
110
- connection = await self._ensure_connection()
111
- cursor = await connection.cursor()
112
- await cursor.execute(
113
- "INSERT INTO entries (id, cache_key, data, created_at, deleted_at) VALUES (?, ?, ?, ?, ?)",
114
- (pair_id.bytes, None, packed_pair, pair_meta.created_at, None),
115
- )
116
- await connection.commit()
117
-
118
- assert isinstance(request.stream, AsyncIterable), "Request stream must be an AsyncIterable, not Iterable"
119
-
120
- request = Request(
121
- method=request.method,
122
- url=request.url,
123
- headers=request.headers,
124
- metadata=request.metadata,
125
- stream=self._save_stream(request.stream, pair_id.bytes, "request"),
126
- )
127
-
128
- return replace(pair, request=request)
129
-
130
- async def add_response(
131
- self,
132
- pair_id: uuid.UUID,
133
- response: Response,
134
- key: str | bytes,
135
- ) -> CompletePair:
136
- if isinstance(key, str):
137
- key = key.encode("utf-8")
138
-
139
- connection = await self._ensure_connection()
140
- cursor = await connection.cursor()
141
-
142
- # Get the existing pair
143
- await cursor.execute("SELECT data FROM entries WHERE id = ?", (pair_id.bytes,))
144
- result = await cursor.fetchone()
145
-
146
- if result is None:
147
- raise ValueError(f"Entry with ID {pair_id} not found.")
148
-
149
- pair = unpack(result[0], kind="pair")
150
-
151
- assert isinstance(response.stream, (AsyncIterator, AsyncIterable))
152
- response = replace(response, stream=self._save_stream(response.stream, pair_id.bytes, "response"))
153
-
154
- await self._delete_stream(pair.id.bytes, cursor, type="response")
155
- complete_pair = CompletePair(id=pair.id, request=pair.request, response=response, meta=pair.meta, cache_key=key)
156
-
157
- # Update the entry with the complete pair and set cache_key
158
- await cursor.execute(
159
- "UPDATE entries SET data = ?, cache_key = ? WHERE id = ?",
160
- (pack(complete_pair, kind="pair"), key, pair_id.bytes),
161
- )
162
- await connection.commit()
163
-
164
- return complete_pair
165
-
166
- async def get_pairs(self, key: str) -> List[CompletePair]:
167
- final_pairs: List[CompletePair] = []
168
-
169
- connection = await self._ensure_connection()
170
- cursor = await connection.cursor()
171
- # Query entries directly by cache_key
172
- await cursor.execute("SELECT id, data FROM entries WHERE cache_key = ?", (key.encode("utf-8"),))
173
-
174
- for row in await cursor.fetchall():
175
- pair_data = unpack(row[1], kind="pair")
176
-
177
- if isinstance(pair_data, IncompletePair):
178
- continue
179
-
180
- final_pairs.append(pair_data)
181
-
182
- pairs_with_streams: List[CompletePair] = []
183
-
184
- for pair in final_pairs:
185
- pairs_with_streams.append(
186
- replace(
187
- pair,
188
- response=replace(
189
- pair.response,
190
- stream=self._stream_data_from_cache(pair.id.bytes, "response"),
191
- ),
192
- request=replace(
193
- pair.request,
194
- stream=self._stream_data_from_cache(pair.id.bytes, "request"),
195
- ),
196
- )
197
- )
198
- return pairs_with_streams
199
-
200
- async def update_pair(
201
- self,
202
- id: uuid.UUID,
203
- new_pair: Union[CompletePair, Callable[[CompletePair], CompletePair]],
204
- ) -> Optional[CompletePair]:
205
- connection = await self._ensure_connection()
206
- cursor = await connection.cursor()
207
- await cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
208
- result = await cursor.fetchone()
209
-
210
- if result is None:
211
- return None
212
-
213
- pair = unpack(result[0], kind="pair")
214
-
215
- if isinstance(pair, IncompletePair):
216
- return None
217
-
218
- if isinstance(new_pair, CompletePair):
219
- complete_pair = new_pair
220
- else:
221
- complete_pair = new_pair(pair)
222
-
223
- if pair.id != complete_pair.id:
224
- raise ValueError("Pair ID mismatch")
225
-
226
- await cursor.execute("UPDATE entries SET data = ? WHERE id = ?", (pack(complete_pair, kind="pair"), id.bytes))
227
-
228
- if pair.cache_key != complete_pair.cache_key:
229
- await cursor.execute(
230
- "UPDATE entries SET cache_key = ? WHERE id = ?",
231
- (complete_pair.cache_key, complete_pair.id.bytes),
232
- )
233
-
234
- await connection.commit()
235
-
236
- return complete_pair
237
-
238
- async def remove(self, id: uuid.UUID) -> None:
239
- connection = await self._ensure_connection()
240
- cursor = await connection.cursor()
241
- await cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
242
- result = await cursor.fetchone()
243
-
244
- if result is None:
245
- return None
246
-
247
- pair = unpack(result[0], kind="pair")
248
- await self._soft_delete_pair(pair, cursor)
249
- await connection.commit()
250
-
251
- async def _is_stream_complete(
252
- self, kind: Literal["request", "response"], pair_id: uuid.UUID, cursor: anysqlite.Cursor
253
- ) -> bool:
254
- kind_id = self._STREAM_KIND[kind]
255
- # Check if there's a completion marker (chunk_number = -1)
256
- await cursor.execute(
257
- "SELECT 1 FROM streams WHERE entry_id = ? AND kind = ? AND chunk_number = ? LIMIT 1",
258
- (pair_id.bytes, kind_id, self._COMPLETE_CHUNK_NUMBER),
259
- )
260
- return await cursor.fetchone() is not None
261
-
262
- async def _soft_delete_pair(self, pair: Union[CompletePair, IncompletePair], cursor: anysqlite.Cursor) -> None:
263
- """
264
- Mark the pair as deleted by setting the deleted_at timestamp.
265
- """
266
- marked_pair = self.mark_pair_as_deleted(pair)
267
- await cursor.execute(
268
- "UPDATE entries SET data = ?, deleted_at = ? WHERE id = ?",
269
- (pack(marked_pair, kind="pair"), marked_pair.meta.deleted_at, pair.id.bytes),
270
- )
271
-
272
- async def _is_pair_expired(self, pair: Pair, cursor: anysqlite.Cursor) -> bool:
273
- """
274
- Check if the pair is expired.
275
- """
276
- ttl = pair.request.metadata["hishel_ttl"] if "hishel_ttl" in pair.request.metadata else self.default_ttl
277
- created_at = pair.meta.created_at
278
- if ttl is None:
279
- return False
280
- return created_at + ttl < time.time()
281
-
282
- async def _batch_cleanup(
283
- self,
284
- ) -> None:
285
- """
286
- Cleanup expired pairs in the database.
287
- """
288
- should_mark_as_deleted: List[Union[CompletePair, IncompletePair]] = []
289
- should_hard_delete: List[Union[CompletePair, IncompletePair]] = []
290
-
291
- connection = await self._ensure_connection()
292
- cursor = await connection.cursor()
293
- await cursor.execute("SELECT id, data FROM entries")
294
-
295
- for row in await cursor.fetchall():
296
- pair = unpack(row[1], kind="pair")
297
- if pair is None:
298
- continue
299
- if await self._is_pair_expired(pair, cursor) and not self.is_soft_deleted(pair):
300
- should_mark_as_deleted.append(pair)
301
-
302
- if (self.is_soft_deleted(pair) and self.is_safe_to_hard_delete(pair)) or await self._is_corrupted(
303
- pair, cursor
304
- ):
305
- should_hard_delete.append(pair)
306
-
307
- for pair in should_mark_as_deleted:
308
- await self._soft_delete_pair(pair, cursor)
309
-
310
- for pair in should_hard_delete:
311
- await self._hard_delete_pair(pair, cursor)
312
-
313
- await connection.commit()
314
-
315
- async def _is_corrupted(self, pair: IncompletePair | CompletePair, cursor: anysqlite.Cursor) -> bool:
316
- # if pair was created more than 1 hour ago and still not completed
317
- if pair.meta.created_at + 3600 < time.time() and isinstance(pair, IncompletePair):
318
- return True
319
-
320
- if isinstance(pair, CompletePair) and not await self._is_stream_complete("request", pair.id, cursor):
321
- return True
322
- return False
323
-
324
- async def _hard_delete_pair(self, pair: CompletePair | IncompletePair, cursor: anysqlite.Cursor) -> None:
325
- """
326
- Permanently delete the pair from the database.
327
- """
328
- await cursor.execute("DELETE FROM entries WHERE id = ?", (pair.id.bytes,))
329
-
330
- # Delete all streams (both request and response) for this entry
331
- await self._delete_stream(pair.id.bytes, cursor)
332
-
333
- async def _delete_stream(
334
- self,
335
- entry_id: bytes,
336
- cursor: anysqlite.Cursor,
337
- type: Literal["request", "response", "all"] = "all",
338
- ) -> None:
339
- """
340
- Delete all streams (both request and response) associated with the given entry ID.
341
- """
342
- if type == "request":
343
- await cursor.execute(
344
- "DELETE FROM streams WHERE entry_id = ? AND kind = ?", (entry_id, self._STREAM_KIND["request"])
345
- )
346
- elif type == "response":
347
- await cursor.execute(
348
- "DELETE FROM streams WHERE entry_id = ? AND kind = ?", (entry_id, self._STREAM_KIND["response"])
349
- )
350
- elif type == "all":
351
- await cursor.execute("DELETE FROM streams WHERE entry_id = ?", (entry_id,))
352
-
353
- async def _save_stream(
354
- self,
355
- stream: AsyncIterator[bytes],
356
- entry_id: bytes,
357
- kind: Literal["response", "request"],
358
- ) -> AsyncIterator[bytes]:
359
- """
360
- Wrapper around an async iterator that also saves the data to the cache in chunks.
361
- """
362
- kind_id = self._STREAM_KIND[kind]
363
- chunk_number = 0
364
- async for chunk in stream:
365
- connection = await self._ensure_connection()
366
- cursor = await connection.cursor()
367
- await cursor.execute(
368
- "INSERT INTO streams (entry_id, kind, chunk_number, chunk_data) VALUES (?, ?, ?, ?)",
369
- (entry_id, kind_id, chunk_number, chunk),
370
- )
371
- await connection.commit()
372
- chunk_number += 1
373
- yield chunk
374
-
375
- # Mark end of stream with chunk_number = -1
376
- connection = await self._ensure_connection()
377
- cursor = await connection.cursor()
378
- await cursor.execute(
379
- "INSERT INTO streams (entry_id, kind, chunk_number, chunk_data) VALUES (?, ?, ?, ?)",
380
- (entry_id, kind_id, self._COMPLETE_CHUNK_NUMBER, b""),
381
- )
382
- await connection.commit()
383
-
384
- async def _stream_data_from_cache(
385
- self,
386
- entry_id: bytes,
387
- kind: Literal["response", "request"],
388
- ) -> AsyncIterator[bytes]:
389
- """
390
- Get an async iterator that yields the stream data from the cache.
391
- """
392
- kind_id = self._STREAM_KIND[kind]
393
- chunk_number = 0
394
-
395
- connection = await self._ensure_connection()
396
- while True:
397
- cursor = await connection.cursor()
398
- await cursor.execute(
399
- "SELECT chunk_data FROM streams WHERE entry_id = ? AND kind = ? AND chunk_number = ?",
400
- (entry_id, kind_id, chunk_number),
401
- )
402
- result = await cursor.fetchone()
403
-
404
- if result is None:
405
- break
406
- chunk = result[0]
407
- # chunk_number = -1 is the completion marker with empty data
408
- if chunk == b"":
409
- break
410
- yield chunk
411
- chunk_number += 1
@@ -1,272 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import abc
4
- import time
5
- import typing as tp
6
- import uuid
7
- from abc import ABC
8
- from pathlib import Path
9
-
10
- from hishel.beta._core.models import CompletePair, IncompletePair, Request, Response
11
-
12
-
13
- class SyncBaseStorage(ABC):
14
- @abc.abstractmethod
15
- def create_pair(
16
- self,
17
- request: Request,
18
- id: uuid.UUID | None = None,
19
- ) -> IncompletePair:
20
- """
21
- Store a request in the backend under the given key.
22
-
23
- Args:
24
- request: The request object to store.
25
-
26
- Returns:
27
- The created IncompletePair object representing the stored request.
28
-
29
- Raises:
30
- NotImplementedError: Must be implemented in subclasses.
31
- """
32
- raise NotImplementedError()
33
-
34
- @abc.abstractmethod
35
- def add_response(self, pair_id: uuid.UUID, response: Response, key: str | bytes) -> CompletePair:
36
- """
37
- Add a response to an existing request pair.
38
-
39
- Args:
40
- pair_id: The unique identifier of the request pair.
41
- response: The response object to add.
42
- key: The cache key associated with the request pair.
43
-
44
- Returns:
45
- The updated response object.
46
-
47
- Raises:
48
- NotImplementedError: Must be implemented in subclasses.
49
- """
50
- raise NotImplementedError()
51
-
52
- @abc.abstractmethod
53
- def get_pairs(self, key: str) -> tp.List[CompletePair]:
54
- """
55
- Retrieve all responses associated with a given key.
56
-
57
- Args:
58
- key: The unique identifier for the request pairs.
59
- complete_only: If True, only return pairs with responses. If False,
60
- only return pairs without responses. If None, return all pairs.
61
- """
62
- raise NotImplementedError()
63
-
64
- @abc.abstractmethod
65
- def update_pair(
66
- self,
67
- id: uuid.UUID,
68
- new_pair: tp.Union[CompletePair, tp.Callable[[CompletePair], CompletePair]],
69
- ) -> tp.Optional[CompletePair]:
70
- """
71
- Update an existing request pair.
72
-
73
- Args:
74
- id: The unique identifier of the request pair to update.
75
- new_pair: The new pair data or a callable that takes the current pair
76
- and returns the updated pair.
77
- """
78
- raise NotImplementedError()
79
-
80
- @abc.abstractmethod
81
- def remove(self, id: uuid.UUID) -> None:
82
- """
83
- Remove a request pair from the storage.
84
-
85
- Args:
86
- id: The unique identifier of the request pair to remove.
87
- """
88
- raise NotImplementedError()
89
-
90
- def close(self) -> None:
91
- """
92
- Close any resources held by the storage backend.
93
- """
94
- pass
95
-
96
- def is_soft_deleted(self, pair: IncompletePair | CompletePair) -> bool:
97
- """
98
- Check if a pair is soft deleted based on its metadata.
99
-
100
- Args:
101
- pair: The request pair to check.
102
-
103
- Returns:
104
- True if the pair is soft deleted, False otherwise.
105
- """
106
- return pair.meta.deleted_at is not None and pair.meta.deleted_at > 0
107
-
108
- def is_safe_to_hard_delete(self, pair: IncompletePair | CompletePair) -> bool:
109
- """
110
- Check if a pair is safe to hard delete based on its metadata.
111
-
112
- If the pair has been soft deleted for more than 1 hour, it is considered safe to hard delete.
113
-
114
- Args:
115
- pair: The request pair to check.
116
-
117
- Returns:
118
- True if the pair is safe to hard delete, False otherwise.
119
- """
120
- return bool(pair.meta.deleted_at is not None and (pair.meta.deleted_at + 3600 < time.time()))
121
-
122
- @tp.overload
123
- def mark_pair_as_deleted(self, pair: CompletePair) -> CompletePair: ...
124
- @tp.overload
125
- def mark_pair_as_deleted(self, pair: IncompletePair) -> IncompletePair: ...
126
- def mark_pair_as_deleted(self, pair: CompletePair | IncompletePair) -> CompletePair | IncompletePair:
127
- """
128
- Mark a pair as soft deleted by setting its deleted_at timestamp.
129
-
130
- Args:
131
- pair: The request pair to mark as deleted.
132
- Returns:
133
- The updated request pair with the deleted_at timestamp set.
134
- """
135
- pair.meta.deleted_at = time.time()
136
- return pair
137
-
138
-
139
- class AsyncBaseStorage(ABC):
140
- @abc.abstractmethod
141
- async def create_pair(
142
- self,
143
- request: Request,
144
- id: uuid.UUID | None = None,
145
- ) -> IncompletePair:
146
- """
147
- Store a request in the backend under the given key.
148
-
149
- Args:
150
- request: The request object to store.
151
-
152
- Returns:
153
- The created IncompletePair object representing the stored request.
154
-
155
- Raises:
156
- NotImplementedError: Must be implemented in subclasses.
157
- """
158
- raise NotImplementedError()
159
-
160
- @abc.abstractmethod
161
- async def add_response(self, pair_id: uuid.UUID, response: Response, key: str | bytes) -> CompletePair:
162
- """
163
- Add a response to an existing request pair.
164
-
165
- Args:
166
- pair_id: The unique identifier of the request pair.
167
- response: The response object to add.
168
- key: The cache key associated with the request pair.
169
-
170
- Returns:
171
- The updated response object.
172
-
173
- Raises:
174
- NotImplementedError: Must be implemented in subclasses.
175
- """
176
- raise NotImplementedError()
177
-
178
- @abc.abstractmethod
179
- async def get_pairs(self, key: str) -> tp.List[CompletePair]:
180
- """
181
- Retrieve all responses associated with a given key.
182
-
183
- Args:
184
- key: The unique identifier for the request pairs.
185
- """
186
- raise NotImplementedError()
187
-
188
- @abc.abstractmethod
189
- async def update_pair(
190
- self,
191
- id: uuid.UUID,
192
- new_pair: tp.Union[CompletePair, tp.Callable[[CompletePair], CompletePair]],
193
- ) -> tp.Optional[CompletePair]:
194
- """
195
- Update an existing request pair.
196
-
197
- Args:
198
- id: The unique identifier of the request pair to update.
199
- new_pair: The new pair data or a callable that takes the current pair
200
- and returns the updated pair.
201
- """
202
- raise NotImplementedError()
203
-
204
- @abc.abstractmethod
205
- async def remove(self, id: uuid.UUID) -> None:
206
- """
207
- Remove a request pair from the storage.
208
-
209
- Args:
210
- id: The unique identifier of the request pair to remove.
211
- """
212
- raise NotImplementedError()
213
-
214
- async def close(self) -> None:
215
- """
216
- Close any resources held by the storage backend.
217
- """
218
- pass
219
-
220
- def is_soft_deleted(self, pair: IncompletePair | CompletePair) -> bool:
221
- """
222
- Check if a pair is soft deleted based on its metadata.
223
-
224
- Args:
225
- pair: The request pair to check.
226
-
227
- Returns:
228
- True if the pair is soft deleted, False otherwise.
229
- """
230
- return pair.meta.deleted_at is not None and pair.meta.deleted_at > 0
231
-
232
- def is_safe_to_hard_delete(self, pair: IncompletePair | CompletePair) -> bool:
233
- """
234
- Check if a pair is safe to hard delete based on its metadata.
235
-
236
- If the pair has been soft deleted for more than 1 hour, it is considered safe to hard delete.
237
-
238
- Args:
239
- pair: The request pair to check.
240
-
241
- Returns:
242
- True if the pair is safe to hard delete, False otherwise.
243
- """
244
- return bool(pair.meta.deleted_at is not None and (pair.meta.deleted_at + 3600 < time.time()))
245
-
246
- @tp.overload
247
- def mark_pair_as_deleted(self, pair: CompletePair) -> CompletePair: ...
248
- @tp.overload
249
- def mark_pair_as_deleted(self, pair: IncompletePair) -> IncompletePair: ...
250
- def mark_pair_as_deleted(self, pair: CompletePair | IncompletePair) -> CompletePair | IncompletePair:
251
- """
252
- Mark a pair as soft deleted by setting its deleted_at timestamp.
253
-
254
- Args:
255
- pair: The request pair to mark as deleted.
256
- Returns:
257
- The updated request pair with the deleted_at timestamp set.
258
- """
259
- pair.meta.deleted_at = time.time()
260
- return pair
261
-
262
-
263
- def ensure_cache_dict(base_path: str | None = None) -> Path:
264
- _base_path = Path(base_path) if base_path is not None else Path(".cache/hishel")
265
- _gitignore_file = _base_path / ".gitignore"
266
-
267
- _base_path.mkdir(parents=True, exist_ok=True)
268
-
269
- if not _gitignore_file.is_file():
270
- with open(_gitignore_file, "w", encoding="utf-8") as f:
271
- f.write("# Automatically created by Hishel\n*")
272
- return _base_path