hishel 1.0.0.dev2__py3-none-any.whl → 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hishel/httpx.py CHANGED
@@ -1,20 +1,5 @@
1
- from __future__ import annotations
2
-
3
- import ssl
4
- import typing as t
5
- from typing import AsyncIterator, Iterable, Iterator, Union, overload
6
-
7
- from hishel import Headers, Request, Response
8
- from hishel._async_cache import AsyncCacheProxy
9
- from hishel._core._base._storages._base import AsyncBaseStorage, SyncBaseStorage
10
- from hishel._core._spec import (
11
- CacheOptions,
12
- )
13
- from hishel._core.models import AnyIterable
14
- from hishel._sync_cache import SyncCacheProxy
15
-
16
1
  try:
17
- import httpx
2
+ import httpx # noqa: F401
18
3
  except ImportError as e:
19
4
  raise ImportError(
20
5
  "httpx is required to use hishel.httpx module. "
@@ -22,314 +7,6 @@ except ImportError as e:
22
7
  "e.g., 'pip install hishel[httpx]'."
23
8
  ) from e
24
9
 
25
- SOCKET_OPTION = t.Union[
26
- t.Tuple[int, int, int],
27
- t.Tuple[int, int, t.Union[bytes, bytearray]],
28
- t.Tuple[int, int, None, int],
29
- ]
30
-
31
- # 128 KB
32
- CHUNK_SIZE = 131072
33
-
34
-
35
- class IteratorStream(httpx.SyncByteStream, httpx.AsyncByteStream):
36
- def __init__(self, iterator: Iterator[bytes] | AsyncIterator[bytes]) -> None:
37
- self.iterator = iterator
38
-
39
- def __iter__(self) -> Iterator[bytes]:
40
- assert isinstance(self.iterator, (Iterator))
41
- yield from self.iterator
42
-
43
- async def __aiter__(self) -> AsyncIterator[bytes]:
44
- assert isinstance(self.iterator, (AsyncIterator))
45
- async for chunk in self.iterator:
46
- yield chunk
47
-
48
-
49
- @overload
50
- def internal_to_httpx(
51
- value: Request,
52
- ) -> httpx.Request: ...
53
- @overload
54
- def internal_to_httpx(
55
- value: Response,
56
- ) -> httpx.Response: ...
57
- def internal_to_httpx(
58
- value: Union[Request, Response],
59
- ) -> Union[httpx.Request, httpx.Response]:
60
- """
61
- Convert internal Request/Response to httpx.Request/httpx.Response.
62
- """
63
- if isinstance(value, Request):
64
- return httpx.Request(
65
- method=value.method,
66
- url=value.url,
67
- headers=value.headers,
68
- stream=IteratorStream(value.stream),
69
- extensions=value.metadata,
70
- )
71
- elif isinstance(value, Response):
72
- return httpx.Response(
73
- status_code=value.status_code,
74
- headers=value.headers,
75
- stream=IteratorStream(value.stream),
76
- extensions=value.metadata,
77
- )
78
-
79
-
80
- @overload
81
- def httpx_to_internal(
82
- value: httpx.Request,
83
- ) -> Request: ...
84
- @overload
85
- def httpx_to_internal(
86
- value: httpx.Response,
87
- ) -> Response: ...
88
- def httpx_to_internal(
89
- value: Union[httpx.Request, httpx.Response],
90
- ) -> Union[Request, Response]:
91
- """
92
- Convert httpx.Request/httpx.Response to internal Request/Response.
93
- """
94
- stream: Union[Iterator[bytes], AsyncIterator[bytes]]
95
- try:
96
- stream = AnyIterable(value.content)
97
- except (httpx.RequestNotRead, httpx.ResponseNotRead):
98
- if isinstance(value, httpx.Response):
99
- stream = (
100
- value.iter_raw(chunk_size=CHUNK_SIZE)
101
- if isinstance(value.stream, Iterable)
102
- else value.aiter_raw(chunk_size=CHUNK_SIZE)
103
- )
104
- else:
105
- stream = value.stream # type: ignore
106
- if isinstance(value, httpx.Request):
107
- return Request(
108
- method=value.method,
109
- url=str(value.url),
110
- headers=Headers({key: value for key, value in value.headers.items()}),
111
- stream=stream,
112
- metadata={
113
- "hishel_refresh_ttl_on_access": value.extensions.get("hishel_refresh_ttl_on_access"),
114
- "hishel_ttl": value.extensions.get("hishel_ttl"),
115
- "hishel_spec_ignore": value.extensions.get("hishel_spec_ignore"),
116
- },
117
- )
118
- elif isinstance(value, httpx.Response):
119
- return Response(
120
- status_code=value.status_code,
121
- headers=Headers({key: value for key, value in value.headers.items()}),
122
- stream=stream,
123
- metadata={},
124
- )
125
-
126
-
127
- class SyncCacheTransport(httpx.BaseTransport):
128
- def __init__(
129
- self,
130
- next_transport: httpx.BaseTransport,
131
- storage: SyncBaseStorage | None = None,
132
- cache_options: CacheOptions | None = None,
133
- ignore_specification: bool = False,
134
- ) -> None:
135
- self.next_transport = next_transport
136
- self._cache_proxy: SyncCacheProxy = SyncCacheProxy(
137
- send_request=self.sync_send_request,
138
- storage=storage,
139
- cache_options=cache_options,
140
- ignore_specification=ignore_specification,
141
- )
142
- self.storage = self._cache_proxy.storage
143
-
144
- def handle_request(
145
- self,
146
- request: httpx.Request,
147
- ) -> httpx.Response:
148
- internal_request = httpx_to_internal(request)
149
- internal_response = self._cache_proxy.handle_request(internal_request)
150
- response = internal_to_httpx(internal_response)
151
- return response
152
-
153
- def close(self) -> None:
154
- self.next_transport.close()
155
- self.storage.close()
156
- super().close()
157
-
158
- def sync_send_request(self, request: Request) -> Response:
159
- httpx_request = internal_to_httpx(request)
160
- httpx_response = self.next_transport.handle_request(httpx_request)
161
- return httpx_to_internal(httpx_response)
162
-
163
-
164
- class SyncCacheClient(httpx.Client):
165
- @overload
166
- def __init__(
167
- self,
168
- *,
169
- storage: SyncBaseStorage | None = None,
170
- cache_options: CacheOptions | None = None,
171
- **kwargs: t.Any,
172
- ) -> None: ...
173
- @overload
174
- def __init__(
175
- self,
176
- *args: t.Any,
177
- **kwargs: t.Any,
178
- ) -> None: ...
179
- def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
180
- self.storage: SyncBaseStorage | None = kwargs.pop("storage", None)
181
- self.cache_options: CacheOptions | None = kwargs.pop("cache_options", None)
182
- super().__init__(*args, **kwargs)
183
-
184
- def _init_transport(
185
- self,
186
- verify: ssl.SSLContext | str | bool = True,
187
- cert: t.Union[str, t.Tuple[str, str], t.Tuple[str, str, str], None] = None,
188
- trust_env: bool = True,
189
- http1: bool = True,
190
- http2: bool = False,
191
- limits: httpx.Limits = httpx.Limits(max_connections=100, max_keepalive_connections=20),
192
- transport: httpx.BaseTransport | None = None,
193
- **kwargs: t.Any,
194
- ) -> httpx.BaseTransport:
195
- if transport is not None:
196
- return transport
197
-
198
- return SyncCacheTransport(
199
- next_transport=httpx.HTTPTransport(
200
- verify=verify,
201
- cert=cert,
202
- trust_env=trust_env,
203
- http1=http1,
204
- http2=http2,
205
- limits=limits,
206
- ),
207
- storage=self.storage,
208
- cache_options=self.cache_options,
209
- ignore_specification=False,
210
- )
211
-
212
- def _init_proxy_transport(
213
- self,
214
- proxy: httpx.Proxy,
215
- verify: ssl.SSLContext | str | bool = True,
216
- cert: t.Union[str, t.Tuple[str, str], t.Tuple[str, str, str], None] = None,
217
- trust_env: bool = True,
218
- http1: bool = True,
219
- http2: bool = False,
220
- limits: httpx.Limits = httpx.Limits(max_connections=100, max_keepalive_connections=20),
221
- **kwargs: t.Any,
222
- ) -> httpx.BaseTransport:
223
- return SyncCacheTransport(
224
- next_transport=httpx.HTTPTransport(
225
- verify=verify,
226
- cert=cert,
227
- trust_env=trust_env,
228
- http1=http1,
229
- http2=http2,
230
- limits=limits,
231
- proxy=proxy,
232
- ),
233
- storage=self.storage,
234
- cache_options=self.cache_options,
235
- ignore_specification=False,
236
- )
237
-
238
-
239
- class AsyncCacheTransport(httpx.AsyncBaseTransport):
240
- def __init__(
241
- self,
242
- next_transport: httpx.AsyncBaseTransport,
243
- storage: AsyncBaseStorage | None = None,
244
- cache_options: CacheOptions | None = None,
245
- ignore_specification: bool = False,
246
- ) -> None:
247
- self.next_transport = next_transport
248
- self._cache_proxy: AsyncCacheProxy = AsyncCacheProxy(
249
- send_request=self.async_send_request,
250
- storage=storage,
251
- cache_options=cache_options,
252
- ignore_specification=ignore_specification,
253
- )
254
- self.storage = self._cache_proxy.storage
255
-
256
- async def handle_async_request(
257
- self,
258
- request: httpx.Request,
259
- ) -> httpx.Response:
260
- internal_request = httpx_to_internal(request)
261
- internal_response = await self._cache_proxy.handle_request(internal_request)
262
- response = internal_to_httpx(internal_response)
263
- return response
264
-
265
- async def aclose(self) -> None:
266
- await self.next_transport.aclose()
267
- await self.storage.close()
268
- await super().aclose()
269
-
270
- async def async_send_request(self, request: Request) -> Response:
271
- httpx_request = internal_to_httpx(request)
272
- httpx_response = await self.next_transport.handle_async_request(httpx_request)
273
- return httpx_to_internal(httpx_response)
274
-
275
-
276
- class AsyncCacheClient(httpx.AsyncClient):
277
- def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
278
- self.storage: AsyncBaseStorage | None = kwargs.pop("storage", None)
279
- self.cache_options: CacheOptions | None = kwargs.pop("cache_options", None)
280
- self.ignore_specification: bool = kwargs.pop("ignore_specification", False)
281
- super().__init__(*args, **kwargs)
282
-
283
- def _init_transport(
284
- self,
285
- verify: ssl.SSLContext | str | bool = True,
286
- cert: t.Union[str, t.Tuple[str, str], t.Tuple[str, str, str], None] = None,
287
- trust_env: bool = True,
288
- http1: bool = True,
289
- http2: bool = False,
290
- limits: httpx.Limits = httpx.Limits(max_connections=100, max_keepalive_connections=20),
291
- transport: httpx.AsyncBaseTransport | None = None,
292
- **kwargs: t.Any,
293
- ) -> httpx.AsyncBaseTransport:
294
- if transport is not None:
295
- return transport
296
-
297
- return AsyncCacheTransport(
298
- next_transport=httpx.AsyncHTTPTransport(
299
- verify=verify,
300
- cert=cert,
301
- trust_env=trust_env,
302
- http1=http1,
303
- http2=http2,
304
- limits=limits,
305
- ),
306
- storage=self.storage,
307
- cache_options=self.cache_options,
308
- ignore_specification=False,
309
- )
310
10
 
311
- def _init_proxy_transport(
312
- self,
313
- proxy: httpx.Proxy,
314
- verify: ssl.SSLContext | str | bool = True,
315
- cert: t.Union[str, t.Tuple[str, str], t.Tuple[str, str, str], None] = None,
316
- trust_env: bool = True,
317
- http1: bool = True,
318
- http2: bool = False,
319
- limits: httpx.Limits = httpx.Limits(max_connections=100, max_keepalive_connections=20),
320
- **kwargs: t.Any,
321
- ) -> httpx.AsyncBaseTransport:
322
- return AsyncCacheTransport(
323
- next_transport=httpx.AsyncHTTPTransport(
324
- verify=verify,
325
- cert=cert,
326
- trust_env=trust_env,
327
- http1=http1,
328
- http2=http2,
329
- limits=limits,
330
- proxy=proxy,
331
- ),
332
- storage=self.storage,
333
- cache_options=self.cache_options,
334
- ignore_specification=self.ignore_specification,
335
- )
11
+ from ._async_httpx import AsyncCacheClient as AsyncCacheClient, AsyncCacheTransport as AsyncCacheTransport
12
+ from ._sync_httpx import SyncCacheClient as SyncCacheClient, SyncCacheTransport as SyncCacheTransport
hishel/requests.py CHANGED
@@ -6,9 +6,9 @@ from typing import Any, Iterator, Mapping, Optional, overload
6
6
  from typing_extensions import assert_never
7
7
 
8
8
  from hishel import Headers, Request, Response as Response
9
- from hishel._core._base._storages._base import SyncBaseStorage
10
- from hishel._core._spec import CacheOptions
9
+ from hishel._core._storages._sync_base import SyncBaseStorage
11
10
  from hishel._core.models import extract_metadata_from_headers
11
+ from hishel._policies import CachePolicy
12
12
  from hishel._sync_cache import SyncCacheProxy
13
13
  from hishel._utils import snake_to_header
14
14
 
@@ -27,7 +27,7 @@ except ImportError: # pragma: no cover
27
27
  CHUNK_SIZE = 131072
28
28
 
29
29
 
30
- class IteratorStream(RawIOBase):
30
+ class _IteratorStream(RawIOBase):
31
31
  def __init__(self, iterator: Iterator[bytes]):
32
32
  self.iterator = iterator
33
33
  self.leftover = b""
@@ -61,18 +61,18 @@ class IteratorStream(RawIOBase):
61
61
 
62
62
 
63
63
  @overload
64
- def requests_to_internal(
64
+ def _requests_to_internal(
65
65
  model: requests.models.PreparedRequest,
66
66
  ) -> Request: ...
67
67
 
68
68
 
69
69
  @overload
70
- def requests_to_internal(
70
+ def _requests_to_internal(
71
71
  model: requests.models.Response,
72
72
  ) -> Response: ...
73
73
 
74
74
 
75
- def requests_to_internal(
75
+ def _requests_to_internal(
76
76
  model: requests.models.PreparedRequest | requests.models.Response,
77
77
  ) -> Request | Response:
78
78
  if isinstance(model, requests.models.PreparedRequest):
@@ -108,19 +108,24 @@ def requests_to_internal(
108
108
 
109
109
 
110
110
  @overload
111
- def internal_to_requests(model: Request) -> requests.models.PreparedRequest: ...
111
+ def _internal_to_requests(model: Request) -> requests.models.PreparedRequest: ...
112
112
  @overload
113
- def internal_to_requests(model: Response) -> requests.models.Response: ...
114
- def internal_to_requests(model: Request | Response) -> requests.models.Response | requests.models.PreparedRequest:
113
+ def _internal_to_requests(model: Response) -> requests.models.Response: ...
114
+ def _internal_to_requests(
115
+ model: Request | Response,
116
+ ) -> requests.models.Response | requests.models.PreparedRequest:
115
117
  if isinstance(model, Response):
116
118
  response = requests.models.Response()
117
119
 
118
120
  assert isinstance(model.stream, Iterator)
119
- stream = IteratorStream(model.stream)
121
+ stream = _IteratorStream(model.stream)
120
122
 
121
123
  urllib_response = HTTPResponse(
122
124
  body=stream,
123
- headers={**model.headers, **{snake_to_header(k): str(v) for k, v in model.metadata.items()}},
125
+ headers={
126
+ **model.headers,
127
+ **{snake_to_header(k): str(v) for k, v in model.metadata.items()},
128
+ },
124
129
  status=model.status_code,
125
130
  preload_content=False,
126
131
  decode_content=False,
@@ -158,15 +163,13 @@ class CacheAdapter(HTTPAdapter):
158
163
  max_retries: int = 0,
159
164
  pool_block: bool = False,
160
165
  storage: SyncBaseStorage | None = None,
161
- cache_options: CacheOptions | None = None,
162
- ignore_specification: bool = False,
166
+ policy: CachePolicy | None = None,
163
167
  ):
164
168
  super().__init__(pool_connections, pool_maxsize, max_retries, pool_block)
165
169
  self._cache_proxy = SyncCacheProxy(
166
- send_request=self.send_request,
170
+ request_sender=self._send_request,
167
171
  storage=storage,
168
- cache_options=cache_options,
169
- ignore_specification=ignore_specification,
172
+ policy=policy,
170
173
  )
171
174
  self.storage = self._cache_proxy.storage
172
175
 
@@ -179,9 +182,9 @@ class CacheAdapter(HTTPAdapter):
179
182
  cert: None | bytes | str | tuple[bytes | str, bytes | str] = None,
180
183
  proxies: Mapping[str, str] | None = None,
181
184
  ) -> requests.models.Response:
182
- internal_request = requests_to_internal(request)
185
+ internal_request = _requests_to_internal(request)
183
186
  internal_response = self._cache_proxy.handle_request(internal_request)
184
- response = internal_to_requests(internal_response)
187
+ response = _internal_to_requests(internal_response)
185
188
 
186
189
  # Set the original request on the response
187
190
  response.request = request
@@ -189,10 +192,13 @@ class CacheAdapter(HTTPAdapter):
189
192
 
190
193
  return response
191
194
 
192
- def send_request(self, request: Request) -> Response:
193
- requests_request = internal_to_requests(request)
194
- response = super().send(requests_request, stream=True)
195
- return requests_to_internal(response)
195
+ def _send_request(self, request: Request) -> Response:
196
+ requests_request = _internal_to_requests(request)
197
+ response = super().send(
198
+ requests_request,
199
+ stream=True,
200
+ )
201
+ return _requests_to_internal(response)
196
202
 
197
203
  def close(self) -> Any:
198
204
  self.storage.close()