hishel 0.1.3__py3-none-any.whl → 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,167 @@
1
+ from __future__ import annotations
2
+
3
+ import hashlib
4
+ import logging
5
+ import time
6
+ from dataclasses import replace
7
+ from typing import Iterator, Awaitable, Callable
8
+
9
+ from typing_extensions import assert_never
10
+
11
+ from hishel.beta import (
12
+ AnyState,
13
+ SyncBaseStorage,
14
+ SyncSqliteStorage,
15
+ CacheMiss,
16
+ CacheOptions,
17
+ CouldNotBeStored,
18
+ FromCache,
19
+ IdleClient,
20
+ NeedRevalidation,
21
+ NeedToBeUpdated,
22
+ Request,
23
+ Response,
24
+ StoreAndUse,
25
+ create_idle_state,
26
+ )
27
+ from hishel.beta._core._spec import InvalidatePairs, vary_headers_match
28
+ from hishel.beta._core.models import CompletePair
29
+
30
+ logger = logging.getLogger("hishel.integrations.clients")
31
+
32
+
33
+ class SyncCacheProxy:
34
+ """
35
+ A proxy for HTTP caching in clients.
36
+
37
+ This class is independent of any specific HTTP library and works only with internal models.
38
+ It delegates request execution to a user-provided callable, making it compatible with any
39
+ HTTP client. Caching behavior can be configured to either fully respect HTTP
40
+ caching rules or bypass them entirely.
41
+ """
42
+
43
+ def __init__(
44
+ self,
45
+ send_request: Callable[[Request], Response],
46
+ storage: SyncBaseStorage | None = None,
47
+ cache_options: CacheOptions | None = None,
48
+ ignore_specification: bool = False,
49
+ ) -> None:
50
+ self.send_request = send_request
51
+ self.storage = storage if storage is not None else SyncSqliteStorage()
52
+ self.cache_options = cache_options if cache_options is not None else CacheOptions()
53
+ self.ignore_specification = ignore_specification
54
+
55
+ def handle_request(self, request: Request) -> Response:
56
+ if self.ignore_specification or request.metadata.get("hishel_spec_ignore"):
57
+ return self._handle_request_ignoring_spec(request)
58
+ return self._handle_request_respecting_spec(request)
59
+
60
+ def _get_key_for_request(self, request: Request) -> str:
61
+ if request.metadata.get("hishel_body_key"):
62
+ assert isinstance(request.stream, Iterator)
63
+ collected = b"".join([chunk for chunk in request.stream])
64
+ hash_ = hashlib.sha256(collected).hexdigest()
65
+ return f"{str(request.url)}-{hash_}"
66
+ return str(request.url)
67
+
68
+ def _maybe_refresh_pair_ttl(self, pair: CompletePair) -> None:
69
+ if pair.request.metadata.get("hishel_refresh_ttl_on_access"):
70
+ self.storage.update_pair(
71
+ pair.id,
72
+ lambda complete_pair: replace(complete_pair, meta=replace(complete_pair.meta, created_at=time.time())),
73
+ )
74
+
75
+ def _handle_request_ignoring_spec(self, request: Request) -> Response:
76
+ logger.debug("Trying to get cached response ignoring specification")
77
+ pairs = self.storage.get_pairs(self._get_key_for_request(request))
78
+
79
+ logger.debug(f"Found {len(pairs)} cached pairs for the request")
80
+
81
+ for pair in pairs:
82
+ if (
83
+ str(pair.request.url) == str(request.url)
84
+ and pair.request.method == request.method
85
+ and vary_headers_match(
86
+ request,
87
+ pair,
88
+ )
89
+ ):
90
+ logger.debug(
91
+ "Found matching cached response for the request",
92
+ )
93
+ pair.response.metadata["hishel_from_cache"] = True # type: ignore
94
+ self._maybe_refresh_pair_ttl(pair)
95
+ return pair.response
96
+
97
+ incomplete_pair = self.storage.create_pair(
98
+ request,
99
+ )
100
+ response = self.send_request(incomplete_pair.request)
101
+
102
+ logger.debug("Storing response in cache ignoring specification")
103
+ complete_pair = self.storage.add_response(
104
+ incomplete_pair.id, response, self._get_key_for_request(request)
105
+ )
106
+ return complete_pair.response
107
+
108
+ def _handle_request_respecting_spec(self, request: Request) -> Response:
109
+ state: AnyState = create_idle_state("client", self.cache_options)
110
+
111
+ while state:
112
+ logger.debug(f"Handling state: {state.__class__.__name__}")
113
+ if isinstance(state, IdleClient):
114
+ state = self._handle_idle_state(state, request)
115
+ elif isinstance(state, CacheMiss):
116
+ state = self._handle_cache_miss(state)
117
+ elif isinstance(state, StoreAndUse):
118
+ return self._handle_store_and_use(state, request)
119
+ elif isinstance(state, CouldNotBeStored):
120
+ return state.response
121
+ elif isinstance(state, NeedRevalidation):
122
+ state = self._handle_revalidation(state)
123
+ elif isinstance(state, FromCache):
124
+ self._maybe_refresh_pair_ttl(state.pair)
125
+ return state.pair.response
126
+ elif isinstance(state, NeedToBeUpdated):
127
+ state = self._handle_update(state)
128
+ elif isinstance(state, InvalidatePairs):
129
+ state = self._handle_invalidate_pairs(state)
130
+ else:
131
+ assert_never(state)
132
+
133
+ raise RuntimeError("Unreachable")
134
+
135
+ def _handle_idle_state(self, state: IdleClient, request: Request) -> AnyState:
136
+ stored_pairs = self.storage.get_pairs(self._get_key_for_request(request))
137
+ return state.next(request, stored_pairs)
138
+
139
+ def _handle_cache_miss(self, state: CacheMiss) -> AnyState:
140
+ incomplete_pair = self.storage.create_pair(state.request)
141
+ response = self.send_request(incomplete_pair.request)
142
+ return state.next(response, incomplete_pair.id)
143
+
144
+ def _handle_store_and_use(self, state: StoreAndUse, request: Request) -> Response:
145
+ complete_pair = self.storage.add_response(
146
+ state.pair_id, state.response, self._get_key_for_request(request)
147
+ )
148
+ return complete_pair.response
149
+
150
+ def _handle_revalidation(self, state: NeedRevalidation) -> AnyState:
151
+ revalidation_response = self.send_request(state.request)
152
+ return state.next(revalidation_response)
153
+
154
+ def _handle_update(self, state: NeedToBeUpdated) -> AnyState:
155
+ for pair in state.updating_pairs:
156
+ self.storage.update_pair(
157
+ pair.id,
158
+ lambda complete_pair: replace(
159
+ complete_pair, response=replace(pair.response, headers=pair.response.headers)
160
+ ),
161
+ )
162
+ return state.next()
163
+
164
+ def _handle_invalidate_pairs(self, state: InvalidatePairs) -> AnyState:
165
+ for pair_id in state.pair_ids:
166
+ self.storage.remove(pair_id)
167
+ return state.next()
hishel/beta/httpx.py ADDED
@@ -0,0 +1,328 @@
1
+ from __future__ import annotations
2
+
3
+ import ssl
4
+ import typing as t
5
+ from typing import AsyncIterator, Iterable, Iterator, Union, overload
6
+
7
+ import httpx
8
+
9
+ from hishel.beta import Headers, Request, Response
10
+ from hishel.beta._async_cache import AsyncCacheProxy
11
+ from hishel.beta._core._base._storages._base import AsyncBaseStorage, SyncBaseStorage
12
+ from hishel.beta._core._spec import (
13
+ CacheOptions,
14
+ )
15
+ from hishel.beta._core.models import AnyIterable
16
+ from hishel.beta._sync_cache import SyncCacheProxy
17
+
18
+ SOCKET_OPTION = t.Union[
19
+ t.Tuple[int, int, int],
20
+ t.Tuple[int, int, t.Union[bytes, bytearray]],
21
+ t.Tuple[int, int, None, int],
22
+ ]
23
+
24
+ # 128 KB
25
+ CHUNK_SIZE = 131072
26
+
27
+
28
+ class IteratorStream(httpx.SyncByteStream, httpx.AsyncByteStream):
29
+ def __init__(self, iterator: Iterator[bytes] | AsyncIterator[bytes]) -> None:
30
+ self.iterator = iterator
31
+
32
+ def __iter__(self) -> Iterator[bytes]:
33
+ assert isinstance(self.iterator, (Iterator))
34
+ yield from self.iterator
35
+
36
+ async def __aiter__(self) -> AsyncIterator[bytes]:
37
+ assert isinstance(self.iterator, (AsyncIterator))
38
+ async for chunk in self.iterator:
39
+ yield chunk
40
+
41
+
42
+ @overload
43
+ def internal_to_httpx(
44
+ value: Request,
45
+ ) -> httpx.Request: ...
46
+ @overload
47
+ def internal_to_httpx(
48
+ value: Response,
49
+ ) -> httpx.Response: ...
50
+ def internal_to_httpx(
51
+ value: Union[Request, Response],
52
+ ) -> Union[httpx.Request, httpx.Response]:
53
+ """
54
+ Convert internal Request/Response to httpx.Request/httpx.Response.
55
+ """
56
+ if isinstance(value, Request):
57
+ return httpx.Request(
58
+ method=value.method,
59
+ url=value.url,
60
+ headers=value.headers,
61
+ stream=IteratorStream(value.stream),
62
+ extensions=value.metadata,
63
+ )
64
+ elif isinstance(value, Response):
65
+ return httpx.Response(
66
+ status_code=value.status_code,
67
+ headers=value.headers,
68
+ stream=IteratorStream(value.stream),
69
+ extensions=value.metadata,
70
+ )
71
+
72
+
73
+ @overload
74
+ def httpx_to_internal(
75
+ value: httpx.Request,
76
+ ) -> Request: ...
77
+ @overload
78
+ def httpx_to_internal(
79
+ value: httpx.Response,
80
+ ) -> Response: ...
81
+ def httpx_to_internal(
82
+ value: Union[httpx.Request, httpx.Response],
83
+ ) -> Union[Request, Response]:
84
+ """
85
+ Convert httpx.Request/httpx.Response to internal Request/Response.
86
+ """
87
+ stream: Union[Iterator[bytes], AsyncIterator[bytes]]
88
+ try:
89
+ stream = AnyIterable(value.content)
90
+ except (httpx.RequestNotRead, httpx.ResponseNotRead):
91
+ if isinstance(value, httpx.Response):
92
+ stream = (
93
+ value.iter_raw(chunk_size=CHUNK_SIZE)
94
+ if isinstance(value.stream, Iterable)
95
+ else value.aiter_raw(chunk_size=CHUNK_SIZE)
96
+ )
97
+ else:
98
+ stream = value.stream # type: ignore
99
+ if isinstance(value, httpx.Request):
100
+ return Request(
101
+ method=value.method,
102
+ url=str(value.url),
103
+ headers=Headers({key: value for key, value in value.headers.items()}),
104
+ stream=stream,
105
+ metadata={
106
+ "hishel_refresh_ttl_on_access": value.extensions.get("hishel_refresh_ttl_on_access"),
107
+ "hishel_ttl": value.extensions.get("hishel_ttl"),
108
+ "hishel_spec_ignore": value.extensions.get("hishel_spec_ignore"),
109
+ },
110
+ )
111
+ elif isinstance(value, httpx.Response):
112
+ return Response(
113
+ status_code=value.status_code,
114
+ headers=Headers({key: value for key, value in value.headers.items()}),
115
+ stream=stream,
116
+ metadata={},
117
+ )
118
+
119
+
120
+ class SyncCacheTransport(httpx.BaseTransport):
121
+ def __init__(
122
+ self,
123
+ next_transport: httpx.BaseTransport,
124
+ storage: SyncBaseStorage | None = None,
125
+ cache_options: CacheOptions | None = None,
126
+ ignore_specification: bool = False,
127
+ ) -> None:
128
+ self.next_transport = next_transport
129
+ self._cache_proxy: SyncCacheProxy = SyncCacheProxy(
130
+ send_request=self.sync_send_request,
131
+ storage=storage,
132
+ cache_options=cache_options,
133
+ ignore_specification=ignore_specification,
134
+ )
135
+ self.storage = self._cache_proxy.storage
136
+
137
+ def handle_request(
138
+ self,
139
+ request: httpx.Request,
140
+ ) -> httpx.Response:
141
+ internal_request = httpx_to_internal(request)
142
+ internal_response = self._cache_proxy.handle_request(internal_request)
143
+ response = internal_to_httpx(internal_response)
144
+ return response
145
+
146
+ def close(self) -> None:
147
+ self.next_transport.close()
148
+ self.storage.close()
149
+ super().close()
150
+
151
+ def sync_send_request(self, request: Request) -> Response:
152
+ httpx_request = internal_to_httpx(request)
153
+ httpx_response = self.next_transport.handle_request(httpx_request)
154
+ return httpx_to_internal(httpx_response)
155
+
156
+
157
+ class SyncCacheClient(httpx.Client):
158
+ @overload
159
+ def __init__(
160
+ self,
161
+ *,
162
+ storage: SyncBaseStorage | None = None,
163
+ cache_options: CacheOptions | None = None,
164
+ **kwargs: t.Any,
165
+ ) -> None: ...
166
+ @overload
167
+ def __init__(
168
+ self,
169
+ *args: t.Any,
170
+ **kwargs: t.Any,
171
+ ) -> None: ...
172
+ def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
173
+ self.storage: SyncBaseStorage | None = kwargs.pop("storage", None)
174
+ self.cache_options: CacheOptions | None = kwargs.pop("cache_options", None)
175
+ super().__init__(*args, **kwargs)
176
+
177
+ def _init_transport(
178
+ self,
179
+ verify: ssl.SSLContext | str | bool = True,
180
+ cert: t.Union[str, t.Tuple[str, str], t.Tuple[str, str, str], None] = None,
181
+ trust_env: bool = True,
182
+ http1: bool = True,
183
+ http2: bool = False,
184
+ limits: httpx.Limits = httpx.Limits(max_connections=100, max_keepalive_connections=20),
185
+ transport: httpx.BaseTransport | None = None,
186
+ **kwargs: t.Any,
187
+ ) -> httpx.BaseTransport:
188
+ if transport is not None:
189
+ return transport
190
+
191
+ return SyncCacheTransport(
192
+ next_transport=httpx.HTTPTransport(
193
+ verify=verify,
194
+ cert=cert,
195
+ trust_env=trust_env,
196
+ http1=http1,
197
+ http2=http2,
198
+ limits=limits,
199
+ ),
200
+ storage=self.storage,
201
+ cache_options=self.cache_options,
202
+ ignore_specification=False,
203
+ )
204
+
205
+ def _init_proxy_transport(
206
+ self,
207
+ proxy: httpx.Proxy,
208
+ verify: ssl.SSLContext | str | bool = True,
209
+ cert: t.Union[str, t.Tuple[str, str], t.Tuple[str, str, str], None] = None,
210
+ trust_env: bool = True,
211
+ http1: bool = True,
212
+ http2: bool = False,
213
+ limits: httpx.Limits = httpx.Limits(max_connections=100, max_keepalive_connections=20),
214
+ **kwargs: t.Any,
215
+ ) -> httpx.BaseTransport:
216
+ return SyncCacheTransport(
217
+ next_transport=httpx.HTTPTransport(
218
+ verify=verify,
219
+ cert=cert,
220
+ trust_env=trust_env,
221
+ http1=http1,
222
+ http2=http2,
223
+ limits=limits,
224
+ proxy=proxy,
225
+ ),
226
+ storage=self.storage,
227
+ cache_options=self.cache_options,
228
+ ignore_specification=False,
229
+ )
230
+
231
+
232
+ class AsyncCacheTransport(httpx.AsyncBaseTransport):
233
+ def __init__(
234
+ self,
235
+ next_transport: httpx.AsyncBaseTransport,
236
+ storage: AsyncBaseStorage | None = None,
237
+ cache_options: CacheOptions | None = None,
238
+ ignore_specification: bool = False,
239
+ ) -> None:
240
+ self.next_transport = next_transport
241
+ self._cache_proxy: AsyncCacheProxy = AsyncCacheProxy(
242
+ send_request=self.async_send_request,
243
+ storage=storage,
244
+ cache_options=cache_options,
245
+ ignore_specification=ignore_specification,
246
+ )
247
+ self.storage = self._cache_proxy.storage
248
+
249
+ async def handle_async_request(
250
+ self,
251
+ request: httpx.Request,
252
+ ) -> httpx.Response:
253
+ internal_request = httpx_to_internal(request)
254
+ internal_response = await self._cache_proxy.handle_request(internal_request)
255
+ response = internal_to_httpx(internal_response)
256
+ return response
257
+
258
+ async def aclose(self) -> None:
259
+ await self.next_transport.aclose()
260
+ await self.storage.close()
261
+ await super().aclose()
262
+
263
+ async def async_send_request(self, request: Request) -> Response:
264
+ httpx_request = internal_to_httpx(request)
265
+ httpx_response = await self.next_transport.handle_async_request(httpx_request)
266
+ return httpx_to_internal(httpx_response)
267
+
268
+
269
+ class AsyncCacheClient(httpx.AsyncClient):
270
+ def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
271
+ self.storage: AsyncBaseStorage | None = kwargs.pop("storage", None)
272
+ self.cache_options: CacheOptions | None = kwargs.pop("cache_options", None)
273
+ self.ignore_specification: bool = kwargs.pop("ignore_specification", False)
274
+ super().__init__(*args, **kwargs)
275
+
276
+ def _init_transport(
277
+ self,
278
+ verify: ssl.SSLContext | str | bool = True,
279
+ cert: t.Union[str, t.Tuple[str, str], t.Tuple[str, str, str], None] = None,
280
+ trust_env: bool = True,
281
+ http1: bool = True,
282
+ http2: bool = False,
283
+ limits: httpx.Limits = httpx.Limits(max_connections=100, max_keepalive_connections=20),
284
+ transport: httpx.AsyncBaseTransport | None = None,
285
+ **kwargs: t.Any,
286
+ ) -> httpx.AsyncBaseTransport:
287
+ if transport is not None:
288
+ return transport
289
+
290
+ return AsyncCacheTransport(
291
+ next_transport=httpx.AsyncHTTPTransport(
292
+ verify=verify,
293
+ cert=cert,
294
+ trust_env=trust_env,
295
+ http1=http1,
296
+ http2=http2,
297
+ limits=limits,
298
+ ),
299
+ storage=self.storage,
300
+ cache_options=self.cache_options,
301
+ ignore_specification=False,
302
+ )
303
+
304
+ def _init_proxy_transport(
305
+ self,
306
+ proxy: httpx.Proxy,
307
+ verify: ssl.SSLContext | str | bool = True,
308
+ cert: t.Union[str, t.Tuple[str, str], t.Tuple[str, str, str], None] = None,
309
+ trust_env: bool = True,
310
+ http1: bool = True,
311
+ http2: bool = False,
312
+ limits: httpx.Limits = httpx.Limits(max_connections=100, max_keepalive_connections=20),
313
+ **kwargs: t.Any,
314
+ ) -> httpx.AsyncBaseTransport:
315
+ return AsyncCacheTransport(
316
+ next_transport=httpx.AsyncHTTPTransport(
317
+ verify=verify,
318
+ cert=cert,
319
+ trust_env=trust_env,
320
+ http1=http1,
321
+ http2=http2,
322
+ limits=limits,
323
+ proxy=proxy,
324
+ ),
325
+ storage=self.storage,
326
+ cache_options=self.cache_options,
327
+ ignore_specification=self.ignore_specification,
328
+ )
@@ -0,0 +1,198 @@
1
+ from __future__ import annotations
2
+
3
+ from io import RawIOBase
4
+ from typing import Any, Iterator, Mapping, Optional, overload
5
+
6
+ from typing_extensions import assert_never
7
+
8
+ from hishel._utils import snake_to_header
9
+ from hishel.beta import Headers, Request, Response as Response
10
+ from hishel.beta._core._base._storages._base import SyncBaseStorage
11
+ from hishel.beta._core._spec import CacheOptions
12
+ from hishel.beta._core.models import extract_metadata_from_headers
13
+ from hishel.beta._sync_cache import SyncCacheProxy
14
+
15
+ try:
16
+ import requests
17
+ from requests.adapters import HTTPAdapter
18
+ from urllib3 import HTTPResponse
19
+ from urllib3.util.retry import Retry as Retry
20
+ except ImportError: # pragma: no cover
21
+ raise ImportError(
22
+ "The 'requests' library is required to use the requests integration. "
23
+ "Install hishel with 'pip install hishel[requests]'."
24
+ )
25
+
26
+ # 128 KB
27
+ CHUNK_SIZE = 131072
28
+
29
+
30
+ class IteratorStream(RawIOBase):
31
+ def __init__(self, iterator: Iterator[bytes]):
32
+ self.iterator = iterator
33
+ self.leftover = b""
34
+
35
+ def readable(self) -> bool:
36
+ return True
37
+
38
+ def readinto(self, b: bytearray) -> Optional[int]: # type: ignore
39
+ chunk = self.read(len(b))
40
+ if not chunk:
41
+ return 0
42
+ n = len(chunk)
43
+ b[:n] = chunk
44
+ return n
45
+
46
+ def read(self, size: int = -1) -> bytes:
47
+ if size is None or size < 0:
48
+ result = self.leftover + b"".join(self.iterator)
49
+ self.leftover = b""
50
+ return result
51
+
52
+ while len(self.leftover) < size:
53
+ try:
54
+ self.leftover += next(self.iterator)
55
+ except StopIteration:
56
+ break
57
+
58
+ result = self.leftover[:size]
59
+ self.leftover = self.leftover[size:]
60
+ return result
61
+
62
+
63
+ @overload
64
+ def requests_to_internal(
65
+ model: requests.models.PreparedRequest,
66
+ ) -> Request: ...
67
+
68
+
69
+ @overload
70
+ def requests_to_internal(
71
+ model: requests.models.Response,
72
+ ) -> Response: ...
73
+
74
+
75
+ def requests_to_internal(
76
+ model: requests.models.PreparedRequest | requests.models.Response,
77
+ ) -> Request | Response:
78
+ if isinstance(model, requests.models.PreparedRequest):
79
+ body: bytes
80
+ if isinstance(model.body, str):
81
+ body = model.body.encode("utf-8")
82
+ elif isinstance(model.body, bytes):
83
+ body = model.body
84
+ else:
85
+ body = b""
86
+ assert model.method
87
+ return Request(
88
+ method=model.method,
89
+ url=str(model.url),
90
+ headers=Headers(model.headers),
91
+ stream=iter([body]),
92
+ metadata=extract_metadata_from_headers(model.headers),
93
+ )
94
+ elif isinstance(model, requests.models.Response):
95
+ try:
96
+ stream = model.raw.stream(amt=CHUNK_SIZE, decode_content=None)
97
+ except requests.exceptions.StreamConsumedError:
98
+ stream = iter([model.content])
99
+
100
+ return Response(
101
+ status_code=model.status_code,
102
+ headers=Headers(model.headers),
103
+ stream=stream,
104
+ )
105
+ else:
106
+ assert_never(model)
107
+ raise RuntimeError("This line should never be reached, but is here to satisfy type checkers.")
108
+
109
+
110
+ @overload
111
+ def internal_to_requests(model: Request) -> requests.models.PreparedRequest: ...
112
+ @overload
113
+ def internal_to_requests(model: Response) -> requests.models.Response: ...
114
+ def internal_to_requests(model: Request | Response) -> requests.models.Response | requests.models.PreparedRequest:
115
+ if isinstance(model, Response):
116
+ response = requests.models.Response()
117
+
118
+ assert isinstance(model.stream, Iterator)
119
+ stream = IteratorStream(model.stream)
120
+
121
+ urllib_response = HTTPResponse(
122
+ body=stream,
123
+ headers={**model.headers, **{snake_to_header(k): str(v) for k, v in model.metadata.items()}},
124
+ status=model.status_code,
125
+ preload_content=False,
126
+ decode_content=False,
127
+ )
128
+
129
+ # Set up the response object
130
+ response.raw = urllib_response
131
+ response.status_code = model.status_code
132
+ response.headers.update(model.headers)
133
+ response.headers.update({snake_to_header(k): str(v) for k, v in model.metadata.items()})
134
+ response.url = "" # Will be set by requests
135
+
136
+ return response
137
+ else:
138
+ assert isinstance(model.stream, Iterator)
139
+ request = requests.Request(
140
+ method=model.method,
141
+ url=model.url,
142
+ headers=model.headers,
143
+ data=b"".join(model.stream) if model.stream else None,
144
+ )
145
+ return request.prepare()
146
+
147
+
148
+ class CacheAdapter(HTTPAdapter):
149
+ """
150
+ A custom HTTPAdapter that can be used with requests to capture HTTP interactions
151
+ for snapshot testing.
152
+ """
153
+
154
+ def __init__(
155
+ self,
156
+ pool_connections: int = 10,
157
+ pool_maxsize: int = 10,
158
+ max_retries: int = 0,
159
+ pool_block: bool = False,
160
+ storage: SyncBaseStorage | None = None,
161
+ cache_options: CacheOptions | None = None,
162
+ ignore_specification: bool = False,
163
+ ):
164
+ super().__init__(pool_connections, pool_maxsize, max_retries, pool_block)
165
+ self._cache_proxy = SyncCacheProxy(
166
+ send_request=self.send_request,
167
+ storage=storage,
168
+ cache_options=cache_options,
169
+ ignore_specification=ignore_specification,
170
+ )
171
+ self.storage = self._cache_proxy.storage
172
+
173
+ def send(
174
+ self,
175
+ request: requests.models.PreparedRequest,
176
+ stream: bool = False,
177
+ timeout: None | float | tuple[float, float] | tuple[float, None] = None,
178
+ verify: bool | str = True,
179
+ cert: None | bytes | str | tuple[bytes | str, bytes | str] = None,
180
+ proxies: Mapping[str, str] | None = None,
181
+ ) -> requests.models.Response:
182
+ internal_request = requests_to_internal(request)
183
+ internal_response = self._cache_proxy.handle_request(internal_request)
184
+ response = internal_to_requests(internal_response)
185
+
186
+ # Set the original request on the response
187
+ response.request = request
188
+ response.connection = self # type: ignore
189
+
190
+ return response
191
+
192
+ def send_request(self, request: Request) -> Response:
193
+ requests_request = internal_to_requests(request)
194
+ response = super().send(requests_request, stream=True)
195
+ return requests_to_internal(response)
196
+
197
+ def close(self) -> Any:
198
+ self.storage.close()