hishel 0.1.4__py3-none-any.whl → 1.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. hishel/__init__.py +59 -52
  2. hishel/_async_cache.py +213 -0
  3. hishel/_async_httpx.py +236 -0
  4. hishel/_core/_headers.py +646 -0
  5. hishel/{beta/_core → _core}/_spec.py +270 -136
  6. hishel/_core/_storages/_async_base.py +71 -0
  7. hishel/_core/_storages/_async_sqlite.py +420 -0
  8. hishel/_core/_storages/_packing.py +144 -0
  9. hishel/_core/_storages/_sync_base.py +71 -0
  10. hishel/_core/_storages/_sync_sqlite.py +420 -0
  11. hishel/{beta/_core → _core}/models.py +100 -37
  12. hishel/_policies.py +49 -0
  13. hishel/_sync_cache.py +213 -0
  14. hishel/_sync_httpx.py +236 -0
  15. hishel/_utils.py +37 -366
  16. hishel/asgi.py +400 -0
  17. hishel/fastapi.py +263 -0
  18. hishel/httpx.py +12 -0
  19. hishel/{beta/requests.py → requests.py} +41 -30
  20. hishel-1.0.0b1.dist-info/METADATA +509 -0
  21. hishel-1.0.0b1.dist-info/RECORD +24 -0
  22. hishel/_async/__init__.py +0 -5
  23. hishel/_async/_client.py +0 -30
  24. hishel/_async/_mock.py +0 -43
  25. hishel/_async/_pool.py +0 -201
  26. hishel/_async/_storages.py +0 -768
  27. hishel/_async/_transports.py +0 -282
  28. hishel/_controller.py +0 -581
  29. hishel/_exceptions.py +0 -10
  30. hishel/_files.py +0 -54
  31. hishel/_headers.py +0 -215
  32. hishel/_lfu_cache.py +0 -71
  33. hishel/_lmdb_types_.pyi +0 -53
  34. hishel/_s3.py +0 -122
  35. hishel/_serializers.py +0 -329
  36. hishel/_sync/__init__.py +0 -5
  37. hishel/_sync/_client.py +0 -30
  38. hishel/_sync/_mock.py +0 -43
  39. hishel/_sync/_pool.py +0 -201
  40. hishel/_sync/_storages.py +0 -768
  41. hishel/_sync/_transports.py +0 -282
  42. hishel/_synchronization.py +0 -37
  43. hishel/beta/__init__.py +0 -59
  44. hishel/beta/_async_cache.py +0 -167
  45. hishel/beta/_core/__init__.py +0 -0
  46. hishel/beta/_core/_async/_storages/_sqlite.py +0 -411
  47. hishel/beta/_core/_base/_storages/_base.py +0 -260
  48. hishel/beta/_core/_base/_storages/_packing.py +0 -165
  49. hishel/beta/_core/_headers.py +0 -301
  50. hishel/beta/_core/_sync/_storages/_sqlite.py +0 -411
  51. hishel/beta/_sync_cache.py +0 -167
  52. hishel/beta/httpx.py +0 -317
  53. hishel-0.1.4.dist-info/METADATA +0 -404
  54. hishel-0.1.4.dist-info/RECORD +0 -41
  55. {hishel-0.1.4.dist-info → hishel-1.0.0b1.dist-info}/WHEEL +0 -0
  56. {hishel-0.1.4.dist-info → hishel-1.0.0b1.dist-info}/licenses/LICENSE +0 -0
@@ -1,411 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import time
4
- import uuid
5
- from dataclasses import replace
6
- from typing import (
7
- Iterable,
8
- Iterator,
9
- Callable,
10
- List,
11
- Literal,
12
- Optional,
13
- Union,
14
- )
15
-
16
- import sqlite3
17
-
18
- from hishel.beta._core._base._storages._base import SyncBaseStorage, ensure_cache_dict
19
- from hishel.beta._core._base._storages._packing import pack, unpack
20
- from hishel.beta._core.models import (
21
- CompletePair,
22
- IncompletePair,
23
- Pair,
24
- PairMeta,
25
- Request,
26
- Response,
27
- )
28
-
29
-
30
- class SyncSqliteStorage(SyncBaseStorage):
31
- _STREAM_KIND = {"request": 0, "response": 1}
32
- _COMPLETE_CHUNK_NUMBER = -1
33
-
34
- def __init__(
35
- self,
36
- *,
37
- connection: Optional[sqlite3.Connection] = None,
38
- database_path: str = "hishel_cache.db",
39
- default_ttl: Optional[float] = None,
40
- refresh_ttl_on_access: bool = True,
41
- ) -> None:
42
- base_path = ensure_cache_dict()
43
-
44
- self.connection = connection
45
- self.database_path = base_path / database_path
46
- self.default_ttl = default_ttl
47
- self.refresh_ttl_on_access = refresh_ttl_on_access
48
- self.last_cleanup = float("-inf")
49
- self._initialized = False
50
-
51
- def _ensure_connection(self) -> sqlite3.Connection:
52
- """Ensure connection is established and database is initialized."""
53
- if self.connection is None:
54
- self.connection = sqlite3.connect(str(self.database_path))
55
- if not self._initialized:
56
- self._initialize_database()
57
- self._initialized = True
58
- return self.connection
59
-
60
- def _initialize_database(self) -> None:
61
- """Initialize the database schema."""
62
- assert self.connection is not None
63
- cursor = self.connection.cursor()
64
-
65
- # Table for storing request/response pairs
66
- cursor.execute("""
67
- CREATE TABLE IF NOT EXISTS entries (
68
- id BLOB PRIMARY KEY,
69
- cache_key BLOB,
70
- data BLOB NOT NULL,
71
- created_at REAL NOT NULL,
72
- deleted_at REAL
73
- )
74
- """)
75
-
76
- # Table for storing stream chunks
77
- cursor.execute("""
78
- CREATE TABLE IF NOT EXISTS streams (
79
- entry_id BLOB NOT NULL,
80
- kind INTEGER NOT NULL,
81
- chunk_number INTEGER NOT NULL,
82
- chunk_data BLOB NOT NULL,
83
- PRIMARY KEY (entry_id, kind, chunk_number),
84
- FOREIGN KEY (entry_id) REFERENCES entries(id) ON DELETE CASCADE
85
- )
86
- """)
87
-
88
- # Indexes for performance
89
- cursor.execute("CREATE INDEX IF NOT EXISTS idx_entries_deleted_at ON entries(deleted_at)")
90
- cursor.execute("CREATE INDEX IF NOT EXISTS idx_entries_cache_key ON entries(cache_key)")
91
- # Note: PRIMARY KEY (entry_id, kind, chunk_number) already provides an index
92
- # for queries like: entry_id = ? AND kind = ? AND chunk_number = ?
93
-
94
- self.connection.commit()
95
-
96
- def create_pair(
97
- self,
98
- request: Request,
99
- id: uuid.UUID | None = None,
100
- ) -> IncompletePair:
101
- pair_id = id if id is not None else uuid.uuid4()
102
- pair_meta = PairMeta(
103
- created_at=time.time(),
104
- )
105
-
106
- pair = IncompletePair(id=pair_id, request=request, meta=pair_meta)
107
-
108
- packed_pair = pack(pair, kind="pair")
109
-
110
- connection = self._ensure_connection()
111
- cursor = connection.cursor()
112
- cursor.execute(
113
- "INSERT INTO entries (id, cache_key, data, created_at, deleted_at) VALUES (?, ?, ?, ?, ?)",
114
- (pair_id.bytes, None, packed_pair, pair_meta.created_at, None),
115
- )
116
- connection.commit()
117
-
118
- assert isinstance(request.stream, Iterable), "Request stream must be an Iterable, not Iterable"
119
-
120
- request = Request(
121
- method=request.method,
122
- url=request.url,
123
- headers=request.headers,
124
- metadata=request.metadata,
125
- stream=self._save_stream(request.stream, pair_id.bytes, "request"),
126
- )
127
-
128
- return replace(pair, request=request)
129
-
130
- def add_response(
131
- self,
132
- pair_id: uuid.UUID,
133
- response: Response,
134
- key: str | bytes,
135
- ) -> CompletePair:
136
- if isinstance(key, str):
137
- key = key.encode("utf-8")
138
-
139
- connection = self._ensure_connection()
140
- cursor = connection.cursor()
141
-
142
- # Get the existing pair
143
- cursor.execute("SELECT data FROM entries WHERE id = ?", (pair_id.bytes,))
144
- result = cursor.fetchone()
145
-
146
- if result is None:
147
- raise ValueError(f"Entry with ID {pair_id} not found.")
148
-
149
- pair = unpack(result[0], kind="pair")
150
-
151
- assert isinstance(response.stream, (Iterator, Iterable))
152
- response = replace(response, stream=self._save_stream(response.stream, pair_id.bytes, "response"))
153
-
154
- self._delete_stream(pair.id.bytes, cursor, type="response")
155
- complete_pair = CompletePair(id=pair.id, request=pair.request, response=response, meta=pair.meta, cache_key=key)
156
-
157
- # Update the entry with the complete pair and set cache_key
158
- cursor.execute(
159
- "UPDATE entries SET data = ?, cache_key = ? WHERE id = ?",
160
- (pack(complete_pair, kind="pair"), key, pair_id.bytes),
161
- )
162
- connection.commit()
163
-
164
- return complete_pair
165
-
166
- def get_pairs(self, key: str) -> List[CompletePair]:
167
- final_pairs: List[CompletePair] = []
168
-
169
- connection = self._ensure_connection()
170
- cursor = connection.cursor()
171
- # Query entries directly by cache_key
172
- cursor.execute("SELECT id, data FROM entries WHERE cache_key = ?", (key.encode("utf-8"),))
173
-
174
- for row in cursor.fetchall():
175
- pair_data = unpack(row[1], kind="pair")
176
-
177
- if isinstance(pair_data, IncompletePair):
178
- continue
179
-
180
- final_pairs.append(pair_data)
181
-
182
- pairs_with_streams: List[CompletePair] = []
183
-
184
- for pair in final_pairs:
185
- pairs_with_streams.append(
186
- replace(
187
- pair,
188
- response=replace(
189
- pair.response,
190
- stream=self._stream_data_from_cache(pair.id.bytes, "response"),
191
- ),
192
- request=replace(
193
- pair.request,
194
- stream=self._stream_data_from_cache(pair.id.bytes, "request"),
195
- ),
196
- )
197
- )
198
- return pairs_with_streams
199
-
200
- def update_pair(
201
- self,
202
- id: uuid.UUID,
203
- new_pair: Union[CompletePair, Callable[[CompletePair], CompletePair]],
204
- ) -> Optional[CompletePair]:
205
- connection = self._ensure_connection()
206
- cursor = connection.cursor()
207
- cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
208
- result = cursor.fetchone()
209
-
210
- if result is None:
211
- return None
212
-
213
- pair = unpack(result[0], kind="pair")
214
-
215
- if isinstance(pair, IncompletePair):
216
- return None
217
-
218
- if isinstance(new_pair, CompletePair):
219
- complete_pair = new_pair
220
- else:
221
- complete_pair = new_pair(pair)
222
-
223
- if pair.id != complete_pair.id:
224
- raise ValueError("Pair ID mismatch")
225
-
226
- cursor.execute("UPDATE entries SET data = ? WHERE id = ?", (pack(complete_pair, kind="pair"), id.bytes))
227
-
228
- if pair.cache_key != complete_pair.cache_key:
229
- cursor.execute(
230
- "UPDATE entries SET cache_key = ? WHERE id = ?",
231
- (complete_pair.cache_key, complete_pair.id.bytes),
232
- )
233
-
234
- connection.commit()
235
-
236
- return complete_pair
237
-
238
- def remove(self, id: uuid.UUID) -> None:
239
- connection = self._ensure_connection()
240
- cursor = connection.cursor()
241
- cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
242
- result = cursor.fetchone()
243
-
244
- if result is None:
245
- return None
246
-
247
- pair = unpack(result[0], kind="pair")
248
- self._soft_delete_pair(pair, cursor)
249
- connection.commit()
250
-
251
- def _is_stream_complete(
252
- self, kind: Literal["request", "response"], pair_id: uuid.UUID, cursor: sqlite3.Cursor
253
- ) -> bool:
254
- kind_id = self._STREAM_KIND[kind]
255
- # Check if there's a completion marker (chunk_number = -1)
256
- cursor.execute(
257
- "SELECT 1 FROM streams WHERE entry_id = ? AND kind = ? AND chunk_number = ? LIMIT 1",
258
- (pair_id.bytes, kind_id, self._COMPLETE_CHUNK_NUMBER),
259
- )
260
- return cursor.fetchone() is not None
261
-
262
- def _soft_delete_pair(self, pair: Union[CompletePair, IncompletePair], cursor: sqlite3.Cursor) -> None:
263
- """
264
- Mark the pair as deleted by setting the deleted_at timestamp.
265
- """
266
- marked_pair = self.mark_pair_as_deleted(pair)
267
- cursor.execute(
268
- "UPDATE entries SET data = ?, deleted_at = ? WHERE id = ?",
269
- (pack(marked_pair, kind="pair"), marked_pair.meta.deleted_at, pair.id.bytes),
270
- )
271
-
272
- def _is_pair_expired(self, pair: Pair, cursor: sqlite3.Cursor) -> bool:
273
- """
274
- Check if the pair is expired.
275
- """
276
- ttl = pair.request.metadata["hishel_ttl"] if "hishel_ttl" in pair.request.metadata else self.default_ttl
277
- created_at = pair.meta.created_at
278
- if ttl is None:
279
- return False
280
- return created_at + ttl < time.time()
281
-
282
- def _batch_cleanup(
283
- self,
284
- ) -> None:
285
- """
286
- Cleanup expired pairs in the database.
287
- """
288
- should_mark_as_deleted: List[Union[CompletePair, IncompletePair]] = []
289
- should_hard_delete: List[Union[CompletePair, IncompletePair]] = []
290
-
291
- connection = self._ensure_connection()
292
- cursor = connection.cursor()
293
- cursor.execute("SELECT id, data FROM entries")
294
-
295
- for row in cursor.fetchall():
296
- pair = unpack(row[1], kind="pair")
297
- if pair is None:
298
- continue
299
- if self._is_pair_expired(pair, cursor) and not self.is_soft_deleted(pair):
300
- should_mark_as_deleted.append(pair)
301
-
302
- if (self.is_soft_deleted(pair) and self.is_safe_to_hard_delete(pair)) or self._is_corrupted(
303
- pair, cursor
304
- ):
305
- should_hard_delete.append(pair)
306
-
307
- for pair in should_mark_as_deleted:
308
- self._soft_delete_pair(pair, cursor)
309
-
310
- for pair in should_hard_delete:
311
- self._hard_delete_pair(pair, cursor)
312
-
313
- connection.commit()
314
-
315
- def _is_corrupted(self, pair: IncompletePair | CompletePair, cursor: sqlite3.Cursor) -> bool:
316
- # if pair was created more than 1 hour ago and still not completed
317
- if pair.meta.created_at + 3600 < time.time() and isinstance(pair, IncompletePair):
318
- return True
319
-
320
- if isinstance(pair, CompletePair) and not self._is_stream_complete("request", pair.id, cursor):
321
- return True
322
- return False
323
-
324
- def _hard_delete_pair(self, pair: CompletePair | IncompletePair, cursor: sqlite3.Cursor) -> None:
325
- """
326
- Permanently delete the pair from the database.
327
- """
328
- cursor.execute("DELETE FROM entries WHERE id = ?", (pair.id.bytes,))
329
-
330
- # Delete all streams (both request and response) for this entry
331
- self._delete_stream(pair.id.bytes, cursor)
332
-
333
- def _delete_stream(
334
- self,
335
- entry_id: bytes,
336
- cursor: sqlite3.Cursor,
337
- type: Literal["request", "response", "all"] = "all",
338
- ) -> None:
339
- """
340
- Delete all streams (both request and response) associated with the given entry ID.
341
- """
342
- if type == "request":
343
- cursor.execute(
344
- "DELETE FROM streams WHERE entry_id = ? AND kind = ?", (entry_id, self._STREAM_KIND["request"])
345
- )
346
- elif type == "response":
347
- cursor.execute(
348
- "DELETE FROM streams WHERE entry_id = ? AND kind = ?", (entry_id, self._STREAM_KIND["response"])
349
- )
350
- elif type == "all":
351
- cursor.execute("DELETE FROM streams WHERE entry_id = ?", (entry_id,))
352
-
353
- def _save_stream(
354
- self,
355
- stream: Iterator[bytes],
356
- entry_id: bytes,
357
- kind: Literal["response", "request"],
358
- ) -> Iterator[bytes]:
359
- """
360
- Wrapper around an async iterator that also saves the data to the cache in chunks.
361
- """
362
- kind_id = self._STREAM_KIND[kind]
363
- chunk_number = 0
364
- for chunk in stream:
365
- connection = self._ensure_connection()
366
- cursor = connection.cursor()
367
- cursor.execute(
368
- "INSERT INTO streams (entry_id, kind, chunk_number, chunk_data) VALUES (?, ?, ?, ?)",
369
- (entry_id, kind_id, chunk_number, chunk),
370
- )
371
- connection.commit()
372
- chunk_number += 1
373
- yield chunk
374
-
375
- # Mark end of stream with chunk_number = -1
376
- connection = self._ensure_connection()
377
- cursor = connection.cursor()
378
- cursor.execute(
379
- "INSERT INTO streams (entry_id, kind, chunk_number, chunk_data) VALUES (?, ?, ?, ?)",
380
- (entry_id, kind_id, self._COMPLETE_CHUNK_NUMBER, b""),
381
- )
382
- connection.commit()
383
-
384
- def _stream_data_from_cache(
385
- self,
386
- entry_id: bytes,
387
- kind: Literal["response", "request"],
388
- ) -> Iterator[bytes]:
389
- """
390
- Get an async iterator that yields the stream data from the cache.
391
- """
392
- kind_id = self._STREAM_KIND[kind]
393
- chunk_number = 0
394
-
395
- connection = self._ensure_connection()
396
- while True:
397
- cursor = connection.cursor()
398
- cursor.execute(
399
- "SELECT chunk_data FROM streams WHERE entry_id = ? AND kind = ? AND chunk_number = ?",
400
- (entry_id, kind_id, chunk_number),
401
- )
402
- result = cursor.fetchone()
403
-
404
- if result is None:
405
- break
406
- chunk = result[0]
407
- # chunk_number = -1 is the completion marker with empty data
408
- if chunk == b"":
409
- break
410
- yield chunk
411
- chunk_number += 1
@@ -1,167 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import hashlib
4
- import logging
5
- import time
6
- from dataclasses import replace
7
- from typing import Iterator, Awaitable, Callable
8
-
9
- from typing_extensions import assert_never
10
-
11
- from hishel.beta import (
12
- AnyState,
13
- SyncBaseStorage,
14
- SyncSqliteStorage,
15
- CacheMiss,
16
- CacheOptions,
17
- CouldNotBeStored,
18
- FromCache,
19
- IdleClient,
20
- NeedRevalidation,
21
- NeedToBeUpdated,
22
- Request,
23
- Response,
24
- StoreAndUse,
25
- create_idle_state,
26
- )
27
- from hishel.beta._core._spec import InvalidatePairs, vary_headers_match
28
- from hishel.beta._core.models import CompletePair
29
-
30
- logger = logging.getLogger("hishel.integrations.clients")
31
-
32
-
33
- class SyncCacheProxy:
34
- """
35
- A proxy for HTTP caching in clients.
36
-
37
- This class is independent of any specific HTTP library and works only with internal models.
38
- It delegates request execution to a user-provided callable, making it compatible with any
39
- HTTP client. Caching behavior can be configured to either fully respect HTTP
40
- caching rules or bypass them entirely.
41
- """
42
-
43
- def __init__(
44
- self,
45
- send_request: Callable[[Request], Response],
46
- storage: SyncBaseStorage | None = None,
47
- cache_options: CacheOptions | None = None,
48
- ignore_specification: bool = False,
49
- ) -> None:
50
- self.send_request = send_request
51
- self.storage = storage if storage is not None else SyncSqliteStorage()
52
- self.cache_options = cache_options if cache_options is not None else CacheOptions()
53
- self.ignore_specification = ignore_specification
54
-
55
- def handle_request(self, request: Request) -> Response:
56
- if self.ignore_specification or request.metadata.get("hishel_spec_ignore"):
57
- return self._handle_request_ignoring_spec(request)
58
- return self._handle_request_respecting_spec(request)
59
-
60
- def _get_key_for_request(self, request: Request) -> str:
61
- if request.metadata.get("hishel_body_key"):
62
- assert isinstance(request.stream, Iterator)
63
- collected = b"".join([chunk for chunk in request.stream])
64
- hash_ = hashlib.sha256(collected).hexdigest()
65
- return f"{str(request.url)}-{hash_}"
66
- return str(request.url)
67
-
68
- def _maybe_refresh_pair_ttl(self, pair: CompletePair) -> None:
69
- if pair.request.metadata.get("hishel_refresh_ttl_on_access"):
70
- self.storage.update_pair(
71
- pair.id,
72
- lambda complete_pair: replace(complete_pair, meta=replace(complete_pair.meta, created_at=time.time())),
73
- )
74
-
75
- def _handle_request_ignoring_spec(self, request: Request) -> Response:
76
- logger.debug("Trying to get cached response ignoring specification")
77
- pairs = self.storage.get_pairs(self._get_key_for_request(request))
78
-
79
- logger.debug(f"Found {len(pairs)} cached pairs for the request")
80
-
81
- for pair in pairs:
82
- if (
83
- str(pair.request.url) == str(request.url)
84
- and pair.request.method == request.method
85
- and vary_headers_match(
86
- request,
87
- pair,
88
- )
89
- ):
90
- logger.debug(
91
- "Found matching cached response for the request",
92
- )
93
- pair.response.metadata["hishel_from_cache"] = True # type: ignore
94
- self._maybe_refresh_pair_ttl(pair)
95
- return pair.response
96
-
97
- incomplete_pair = self.storage.create_pair(
98
- request,
99
- )
100
- response = self.send_request(incomplete_pair.request)
101
-
102
- logger.debug("Storing response in cache ignoring specification")
103
- complete_pair = self.storage.add_response(
104
- incomplete_pair.id, response, self._get_key_for_request(request)
105
- )
106
- return complete_pair.response
107
-
108
- def _handle_request_respecting_spec(self, request: Request) -> Response:
109
- state: AnyState = create_idle_state("client", self.cache_options)
110
-
111
- while state:
112
- logger.debug(f"Handling state: {state.__class__.__name__}")
113
- if isinstance(state, IdleClient):
114
- state = self._handle_idle_state(state, request)
115
- elif isinstance(state, CacheMiss):
116
- state = self._handle_cache_miss(state)
117
- elif isinstance(state, StoreAndUse):
118
- return self._handle_store_and_use(state, request)
119
- elif isinstance(state, CouldNotBeStored):
120
- return state.response
121
- elif isinstance(state, NeedRevalidation):
122
- state = self._handle_revalidation(state)
123
- elif isinstance(state, FromCache):
124
- self._maybe_refresh_pair_ttl(state.pair)
125
- return state.pair.response
126
- elif isinstance(state, NeedToBeUpdated):
127
- state = self._handle_update(state)
128
- elif isinstance(state, InvalidatePairs):
129
- state = self._handle_invalidate_pairs(state)
130
- else:
131
- assert_never(state)
132
-
133
- raise RuntimeError("Unreachable")
134
-
135
- def _handle_idle_state(self, state: IdleClient, request: Request) -> AnyState:
136
- stored_pairs = self.storage.get_pairs(self._get_key_for_request(request))
137
- return state.next(request, stored_pairs)
138
-
139
- def _handle_cache_miss(self, state: CacheMiss) -> AnyState:
140
- incomplete_pair = self.storage.create_pair(state.request)
141
- response = self.send_request(incomplete_pair.request)
142
- return state.next(response, incomplete_pair.id)
143
-
144
- def _handle_store_and_use(self, state: StoreAndUse, request: Request) -> Response:
145
- complete_pair = self.storage.add_response(
146
- state.pair_id, state.response, self._get_key_for_request(request)
147
- )
148
- return complete_pair.response
149
-
150
- def _handle_revalidation(self, state: NeedRevalidation) -> AnyState:
151
- revalidation_response = self.send_request(state.request)
152
- return state.next(revalidation_response)
153
-
154
- def _handle_update(self, state: NeedToBeUpdated) -> AnyState:
155
- for pair in state.updating_pairs:
156
- self.storage.update_pair(
157
- pair.id,
158
- lambda complete_pair: replace(
159
- complete_pair, response=replace(pair.response, headers=pair.response.headers)
160
- ),
161
- )
162
- return state.next()
163
-
164
- def _handle_invalidate_pairs(self, state: InvalidatePairs) -> AnyState:
165
- for pair_id in state.pair_ids:
166
- self.storage.remove(pair_id)
167
- return state.next()