pyqwest 0.3.0__cp310-cp310-musllinux_1_2_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,5 @@
1
+ from __future__ import annotations
2
+
3
+ __all__ = ["AsyncPyqwestTransport", "PyqwestTransport"]
4
+
5
+ from ._transport import AsyncPyqwestTransport, PyqwestTransport
@@ -0,0 +1,281 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import contextlib
5
+ from typing import TYPE_CHECKING, cast
6
+
7
+ import httpx
8
+ from h2.errors import ErrorCodes
9
+ from h2.events import StreamReset
10
+
11
+ from pyqwest import (
12
+ Headers,
13
+ HTTPTransport,
14
+ Request,
15
+ Response,
16
+ StreamError,
17
+ StreamErrorCode,
18
+ SyncHTTPTransport,
19
+ SyncRequest,
20
+ SyncResponse,
21
+ )
22
+
23
+ if TYPE_CHECKING:
24
+ from collections.abc import AsyncIterator, Iterator
25
+
26
+
27
+ class AsyncPyqwestTransport(httpx.AsyncBaseTransport):
28
+ """An HTTPX transport implementation that delegates to pyqwest.
29
+
30
+ This can be used with any existing code using httpx.AsyncClient, and will enable
31
+ use of bidirectional streaming and response trailers.
32
+ """
33
+
34
+ _transport: HTTPTransport
35
+
36
+ def __init__(self, transport: HTTPTransport) -> None:
37
+ """Creates a new AsyncPyQwestTransport.
38
+
39
+ Args:
40
+ transport: The pyqwest HTTPTransport to delegate requests to.
41
+ """
42
+ self._transport = transport
43
+
44
+ async def handle_async_request(
45
+ self, httpx_request: httpx.Request
46
+ ) -> httpx.Response:
47
+ request_headers = convert_headers(httpx_request.headers)
48
+ request_content = async_request_content(httpx_request.stream)
49
+ timeout = convert_timeout(httpx_request.extensions)
50
+
51
+ try:
52
+ response = await self._transport.execute(
53
+ Request(
54
+ httpx_request.method,
55
+ str(httpx_request.url),
56
+ headers=request_headers,
57
+ content=request_content,
58
+ timeout=timeout, # pyright: ignore[reportCallIssue]
59
+ )
60
+ )
61
+ except StreamError as e:
62
+ raise map_stream_error(e) from e
63
+
64
+ def get_trailers() -> httpx.Headers:
65
+ return httpx.Headers(tuple(response.trailers.items()))
66
+
67
+ return httpx.Response(
68
+ status_code=response.status,
69
+ headers=httpx.Headers(tuple(response.headers.items())),
70
+ stream=AsyncIteratorByteStream(response),
71
+ extensions={"get_trailers": get_trailers},
72
+ )
73
+
74
+
75
+ def async_request_content(
76
+ stream: httpx.AsyncByteStream | httpx.SyncByteStream | httpx.ByteStream,
77
+ ) -> bytes | AsyncIterator[bytes]:
78
+ match stream:
79
+ case httpx.ByteStream():
80
+ # Buffered bytes
81
+ return next(iter(stream))
82
+ case _:
83
+ return async_request_content_iter(stream)
84
+
85
+
86
+ async def async_request_content_iter(
87
+ stream: httpx.AsyncByteStream | httpx.SyncByteStream,
88
+ ) -> AsyncIterator[bytes]:
89
+ match stream:
90
+ case httpx.AsyncByteStream():
91
+ async with contextlib.aclosing(stream):
92
+ async for chunk in stream:
93
+ yield chunk
94
+ case httpx.SyncByteStream():
95
+ with contextlib.closing(stream):
96
+ stream_iter = iter(stream)
97
+ while True:
98
+ chunk = await asyncio.to_thread(next, stream_iter, None)
99
+ if chunk is None:
100
+ break
101
+ yield chunk
102
+
103
+
104
+ class AsyncIteratorByteStream(httpx.AsyncByteStream):
105
+ def __init__(self, response: Response) -> None:
106
+ self._response = response
107
+ self._is_stream_consumed = False
108
+
109
+ async def __aiter__(self) -> AsyncIterator[bytes]:
110
+ if self._is_stream_consumed:
111
+ raise httpx.StreamConsumed
112
+ self._is_stream_consumed = True
113
+ try:
114
+ async for chunk in self._response.content:
115
+ yield bytes(chunk)
116
+ except StreamError as e:
117
+ raise map_stream_error(e) from e
118
+
119
+ async def aclose(self) -> None:
120
+ await self._response.aclose()
121
+
122
+
123
+ class PyqwestTransport(httpx.BaseTransport):
124
+ """An HTTPX transport implementation that delegates to pyqwest.
125
+
126
+ This can be used with any existing code using httpx.Client, and will enable
127
+ use of bidirectional streaming and response trailers.
128
+ """
129
+
130
+ _transport: SyncHTTPTransport
131
+
132
+ def __init__(self, transport: SyncHTTPTransport) -> None:
133
+ """Creates a new PyQwestTransport.
134
+
135
+ Args:
136
+ transport: The pyqwest HTTPTransport to delegate requests to.
137
+ """
138
+ self._transport = transport
139
+
140
+ def handle_request(self, httpx_request: httpx.Request) -> httpx.Response:
141
+ request_headers = convert_headers(httpx_request.headers)
142
+ request_content = sync_request_content(httpx_request.stream)
143
+ timeout = convert_timeout(httpx_request.extensions)
144
+
145
+ try:
146
+ response = self._transport.execute_sync(
147
+ SyncRequest(
148
+ httpx_request.method,
149
+ str(httpx_request.url),
150
+ headers=request_headers,
151
+ content=request_content,
152
+ timeout=timeout, # pyright: ignore[reportCallIssue]
153
+ )
154
+ )
155
+ except StreamError as e:
156
+ raise map_stream_error(e) from e
157
+
158
+ def get_trailers() -> httpx.Headers:
159
+ return httpx.Headers(tuple(response.trailers.items()))
160
+
161
+ return httpx.Response(
162
+ status_code=response.status,
163
+ headers=httpx.Headers(tuple(response.headers.items())),
164
+ stream=IteratorByteStream(response),
165
+ extensions={"get_trailers": get_trailers},
166
+ )
167
+
168
+
169
+ def sync_request_content(
170
+ stream: httpx.AsyncByteStream | httpx.SyncByteStream | httpx.ByteStream,
171
+ ) -> bytes | Iterator[bytes]:
172
+ match stream:
173
+ case httpx.ByteStream():
174
+ # Buffered bytes
175
+ return next(iter(stream))
176
+ case _:
177
+ return sync_request_content_iter(stream)
178
+
179
+
180
+ def sync_request_content_iter(
181
+ stream: httpx.AsyncByteStream | httpx.SyncByteStream,
182
+ ) -> Iterator[bytes]:
183
+ match stream:
184
+ case httpx.AsyncByteStream():
185
+ msg = "unreachable"
186
+ raise TypeError(msg)
187
+ case httpx.SyncByteStream():
188
+ with contextlib.closing(stream):
189
+ yield from stream
190
+
191
+
192
+ class IteratorByteStream(httpx.SyncByteStream):
193
+ def __init__(self, response: SyncResponse) -> None:
194
+ self._response = response
195
+ self._is_stream_consumed = False
196
+
197
+ def __iter__(self) -> Iterator[bytes]:
198
+ if self._is_stream_consumed:
199
+ raise httpx.StreamConsumed
200
+ self._is_stream_consumed = True
201
+ try:
202
+ for chunk in self._response.content:
203
+ yield bytes(chunk)
204
+ except StreamError as e:
205
+ raise map_stream_error(e) from e
206
+
207
+ def close(self) -> None:
208
+ self._response.close()
209
+
210
+
211
+ # Headers that are managed by the transport and should not be forwarded.
212
+ TRANSPORT_HEADERS = {
213
+ "connection",
214
+ "keep-alive",
215
+ "proxy-connection",
216
+ "transfer-encoding",
217
+ "upgrade",
218
+ }
219
+
220
+
221
+ def convert_headers(headers: httpx.Headers) -> Headers:
222
+ return Headers(
223
+ (k, v) for k, v in headers.multi_items() if k.lower() not in TRANSPORT_HEADERS
224
+ )
225
+
226
+
227
+ def convert_timeout(extensions: dict) -> float | None:
228
+ httpx_timeout = cast("dict | None", extensions.get("timeout"))
229
+ if httpx_timeout is None:
230
+ return None
231
+ # reqwest does not support setting individual timeout settings
232
+ # per call, only an operation timeout, so we need to approximate
233
+ # that from the httpx timeout dict. Connect usually happens once
234
+ # and can be given a longer timeout - we assume the operation timeout
235
+ # is the max of read/write if present, or connect if not. We ignore
236
+ # pool for now
237
+ read_timeout = httpx_timeout.get("read", -1)
238
+ if read_timeout is None:
239
+ read_timeout = -1
240
+ write_timeout = httpx_timeout.get("write", -1)
241
+ if write_timeout is None:
242
+ write_timeout = -1
243
+ operation_timeout = max(read_timeout, write_timeout)
244
+ if operation_timeout != -1:
245
+ return operation_timeout
246
+ return httpx_timeout.get("connect")
247
+
248
+
249
+ def map_stream_error(e: StreamError) -> httpx.RemoteProtocolError:
250
+ match e.code:
251
+ case StreamErrorCode.NO_ERROR:
252
+ code = ErrorCodes.NO_ERROR
253
+ case StreamErrorCode.PROTOCOL_ERROR:
254
+ code = ErrorCodes.PROTOCOL_ERROR
255
+ case StreamErrorCode.INTERNAL_ERROR:
256
+ code = ErrorCodes.INTERNAL_ERROR
257
+ case StreamErrorCode.FLOW_CONTROL_ERROR:
258
+ code = ErrorCodes.FLOW_CONTROL_ERROR
259
+ case StreamErrorCode.SETTINGS_TIMEOUT:
260
+ code = ErrorCodes.SETTINGS_TIMEOUT
261
+ case StreamErrorCode.STREAM_CLOSED:
262
+ code = ErrorCodes.STREAM_CLOSED
263
+ case StreamErrorCode.FRAME_SIZE_ERROR:
264
+ code = ErrorCodes.FRAME_SIZE_ERROR
265
+ case StreamErrorCode.REFUSED_STREAM:
266
+ code = ErrorCodes.REFUSED_STREAM
267
+ case StreamErrorCode.CANCEL:
268
+ code = ErrorCodes.CANCEL
269
+ case StreamErrorCode.COMPRESSION_ERROR:
270
+ code = ErrorCodes.COMPRESSION_ERROR
271
+ case StreamErrorCode.CONNECT_ERROR:
272
+ code = ErrorCodes.CONNECT_ERROR
273
+ case StreamErrorCode.ENHANCE_YOUR_CALM:
274
+ code = ErrorCodes.ENHANCE_YOUR_CALM
275
+ case StreamErrorCode.INADEQUATE_SECURITY:
276
+ code = ErrorCodes.INADEQUATE_SECURITY
277
+ case StreamErrorCode.HTTP_1_1_REQUIRED:
278
+ code = ErrorCodes.HTTP_1_1_REQUIRED
279
+ case _:
280
+ code = ErrorCodes.INTERNAL_ERROR
281
+ return httpx.RemoteProtocolError(str(StreamReset(stream_id=-1, error_code=code)))
pyqwest/py.typed ADDED
File without changes
@@ -0,0 +1,6 @@
1
+ from __future__ import annotations
2
+
3
+ __all__ = ["ASGITransport", "WSGITransport"]
4
+
5
+ from ._asgi import ASGITransport
6
+ from ._wsgi import WSGITransport
@@ -0,0 +1,360 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import contextlib
5
+ from collections.abc import AsyncIterator
6
+ from dataclasses import dataclass
7
+ from typing import TYPE_CHECKING, Any
8
+ from urllib.parse import unquote, urlparse
9
+
10
+ from pyqwest import (
11
+ Headers,
12
+ HTTPVersion,
13
+ ReadError,
14
+ Request,
15
+ Response,
16
+ Transport,
17
+ WriteError,
18
+ )
19
+
20
+ from ._asgi_compatibility import guarantee_single_callable
21
+
22
+ if TYPE_CHECKING:
23
+ from types import TracebackType
24
+
25
+ from asgiref.typing import (
26
+ ASGI3Application,
27
+ ASGIApplication,
28
+ ASGIReceiveEvent,
29
+ ASGISendEvent,
30
+ ASGIVersions,
31
+ HTTPScope,
32
+ LifespanScope,
33
+ LifespanShutdownEvent,
34
+ LifespanStartupEvent,
35
+ )
36
+
37
+ _asgi: ASGIVersions = {"version": "3.0", "spec_version": "2.5"}
38
+ _extensions = {"http.response.trailers": {}}
39
+
40
+
41
+ @dataclass(frozen=True)
42
+ class Lifespan:
43
+ task: asyncio.Task[None]
44
+ receive_queue: asyncio.Queue[LifespanStartupEvent | LifespanShutdownEvent]
45
+ send_queue: asyncio.Queue[ASGISendEvent | Exception]
46
+
47
+
48
+ class ASGITransport(Transport):
49
+ """Transport implementation that directly invokes an ASGI application. Useful for testing.
50
+
51
+ The ASGI transport supports lifespan - to use it, make sure to use the transport as an
52
+ asynchronous context manager. Lifespan startup will be run on entering and shutdown when
53
+ exiting.
54
+ """
55
+
56
+ _app: ASGI3Application
57
+ _http_version: HTTPVersion
58
+ _client: tuple[str, int]
59
+ _state: dict[str, Any]
60
+ _lifespan: Lifespan | None
61
+
62
+ def __init__(
63
+ self,
64
+ app: ASGIApplication,
65
+ http_version: HTTPVersion = HTTPVersion.HTTP2,
66
+ client: tuple[str, int] = ("127.0.0.1", 111),
67
+ ) -> None:
68
+ """Creates a new ASGI transport.
69
+
70
+ Args:
71
+ app: The ASGI application to invoke.
72
+ http_version: The HTTP version to mimic for requests. Note, semantics such as lack of
73
+ bidirectional streaming for HTTP/1 are not enforced.
74
+ client: The (host, port) tuple to use for the client address in the ASGI scope.
75
+ """
76
+ self._app = guarantee_single_callable(app)
77
+ self._http_version = http_version
78
+ self._client = client
79
+ self._state = {}
80
+ self._lifespan = None
81
+
82
+ async def execute(self, request: Request) -> Response:
83
+ parsed_url = urlparse(request.url)
84
+ raw_path = parsed_url.path or "/"
85
+ path = unquote(raw_path)
86
+ match self._http_version:
87
+ case HTTPVersion.HTTP1:
88
+ http_version = "1.1"
89
+ case HTTPVersion.HTTP2:
90
+ http_version = "2"
91
+ case HTTPVersion.HTTP3:
92
+ http_version = "3"
93
+ case _:
94
+ http_version = "1.1"
95
+ scope: HTTPScope = {
96
+ "type": "http",
97
+ "asgi": _asgi,
98
+ "http_version": http_version,
99
+ "method": request.method,
100
+ "scheme": parsed_url.scheme,
101
+ "path": path,
102
+ "raw_path": raw_path.encode(),
103
+ "query_string": parsed_url.query.encode(),
104
+ "headers": [
105
+ (k.lower().encode("utf-8"), v.encode("utf-8"))
106
+ for k, v in request.headers.items()
107
+ ],
108
+ "server": (
109
+ parsed_url.hostname or "",
110
+ parsed_url.port or (443 if parsed_url.scheme == "https" else 80),
111
+ ),
112
+ "client": self._client,
113
+ "extensions": _extensions,
114
+ "state": self._state,
115
+ "root_path": "",
116
+ }
117
+
118
+ receive_queue: asyncio.Queue[bytes | Exception | None] = asyncio.Queue(1)
119
+
120
+ async def read_request_content() -> None:
121
+ try:
122
+ async for chunk in request.content:
123
+ if not isinstance(chunk, bytes):
124
+ msg = "Request not bytes object"
125
+ raise WriteError(msg) # noqa: TRY301
126
+ await receive_queue.put(chunk)
127
+ await receive_queue.put(None)
128
+ except Exception as e:
129
+ await receive_queue.put(e)
130
+ finally:
131
+ try:
132
+ aclose = request.content.aclose # pyright: ignore[reportAttributeAccessIssue]
133
+ except AttributeError:
134
+ pass
135
+ else:
136
+ await aclose()
137
+
138
+ # Need a separate task to read the request body to allow
139
+ # cancelling when response closes.
140
+ request_task = asyncio.create_task(read_request_content())
141
+
142
+ async def receive() -> ASGIReceiveEvent:
143
+ chunk = await receive_queue.get()
144
+ if chunk is None:
145
+ return {"type": "http.request", "body": b"", "more_body": False}
146
+ if isinstance(chunk, Exception):
147
+ if self._http_version != HTTPVersion.HTTP2:
148
+ msg = f"Request failed: {chunk}"
149
+ else:
150
+ # With HTTP/2, reqwest seems to squash the original error message.
151
+ msg = "Request failed: stream error sent by user"
152
+ raise WriteError(msg) from chunk
153
+ if isinstance(chunk, BaseException):
154
+ raise chunk
155
+ return {"type": "http.request", "body": chunk, "more_body": True}
156
+
157
+ send_queue: asyncio.Queue[ASGISendEvent | Exception] = asyncio.Queue()
158
+
159
+ async def send(message: ASGISendEvent) -> None:
160
+ await send_queue.put(message)
161
+
162
+ async def run_app() -> None:
163
+ try:
164
+ await self._app(scope, receive, send)
165
+ except asyncio.TimeoutError as e:
166
+ send_queue.put_nowait(TimeoutError(str(e)))
167
+ except Exception as e:
168
+ send_queue.put_nowait(e)
169
+
170
+ app_task = asyncio.create_task(run_app())
171
+ message = await send_queue.get()
172
+ if isinstance(message, Exception):
173
+ await app_task
174
+ if isinstance(message, TimeoutError):
175
+ raise message
176
+ return Response(
177
+ status=500,
178
+ http_version=self._http_version,
179
+ headers=Headers((("content-type", "text/plain"),)),
180
+ content=str(message).encode(),
181
+ )
182
+
183
+ assert message["type"] == "http.response.start" # noqa: S101
184
+ status = message["status"]
185
+ headers = Headers(
186
+ (
187
+ (k.decode("utf-8"), v.decode("utf-8"))
188
+ for k, v in message.get("headers", [])
189
+ )
190
+ )
191
+ trailers = (
192
+ Headers()
193
+ if self._http_version == HTTPVersion.HTTP2
194
+ and request.headers.get("te") == "trailers"
195
+ else None
196
+ )
197
+ response_content = ResponseContent(
198
+ send_queue,
199
+ request_task,
200
+ trailers,
201
+ app_task,
202
+ read_trailers=message.get("trailers", False),
203
+ )
204
+ return Response(
205
+ status=status,
206
+ http_version=self._http_version,
207
+ headers=headers,
208
+ content=response_content,
209
+ trailers=trailers,
210
+ )
211
+
212
+ async def __aenter__(self) -> ASGITransport:
213
+ await self.run_lifespan()
214
+ return self
215
+
216
+ async def __aexit__(
217
+ self,
218
+ _exc_type: type[BaseException] | None,
219
+ _exc_value: BaseException | None,
220
+ _traceback: TracebackType | None,
221
+ ) -> None:
222
+ await self.close()
223
+
224
+ async def run_lifespan(self) -> None:
225
+ scope: LifespanScope = {"type": "lifespan", "asgi": _asgi, "state": self._state}
226
+
227
+ receive_queue: asyncio.Queue[LifespanStartupEvent | LifespanShutdownEvent] = (
228
+ asyncio.Queue()
229
+ )
230
+
231
+ async def receive() -> LifespanStartupEvent | LifespanShutdownEvent:
232
+ return await receive_queue.get()
233
+
234
+ send_queue: asyncio.Queue[ASGISendEvent | Exception] = asyncio.Queue()
235
+
236
+ async def send(message: ASGISendEvent) -> None:
237
+ await send_queue.put(message)
238
+
239
+ async def run_app() -> None:
240
+ try:
241
+ await self._app(scope, receive, send)
242
+ except Exception as e:
243
+ send_queue.put_nowait(e)
244
+
245
+ task = asyncio.create_task(run_app())
246
+
247
+ receive_queue.put_nowait({"type": "lifespan.startup"})
248
+ message = await send_queue.get()
249
+ if isinstance(message, Exception):
250
+ # Lifespan not supported
251
+ await task
252
+ return
253
+
254
+ self._lifespan = Lifespan(
255
+ task=task, receive_queue=receive_queue, send_queue=send_queue
256
+ )
257
+ match message["type"]:
258
+ case "lifespan.startup.complete":
259
+ return
260
+ case "lifespan.startup.failed":
261
+ msg = "ASGI application failed to start up"
262
+ raise RuntimeError(msg)
263
+
264
+ async def close(self) -> None:
265
+ if self._lifespan is None:
266
+ return
267
+
268
+ await self._lifespan.receive_queue.put({"type": "lifespan.shutdown"})
269
+ message = await self._lifespan.send_queue.get()
270
+ await self._lifespan.task
271
+ if isinstance(message, Exception):
272
+ raise message
273
+ match message["type"]:
274
+ case "lifespan.shutdown.complete":
275
+ return
276
+ case "lifespan.shutdown.failed":
277
+ msg = "ASGI application failed to shut down cleanly"
278
+ raise RuntimeError(msg)
279
+
280
+
281
+ class CancelResponse(Exception):
282
+ pass
283
+
284
+
285
+ class ResponseContent(AsyncIterator[bytes]):
286
+ def __init__(
287
+ self,
288
+ send_queue: asyncio.Queue[ASGISendEvent | Exception],
289
+ request_task: asyncio.Task[None],
290
+ trailers: Headers | None,
291
+ task: asyncio.Task[None],
292
+ *,
293
+ read_trailers: bool,
294
+ ) -> None:
295
+ self._send_queue = send_queue
296
+ self._request_task = request_task
297
+ self._trailers = trailers
298
+ self._task = task
299
+ self._read_trailers = read_trailers
300
+
301
+ self._read_pending = False
302
+ self._closed = False
303
+
304
+ def __aiter__(self) -> AsyncIterator[bytes]:
305
+ return self
306
+
307
+ async def __anext__(self) -> bytes:
308
+ if self._closed:
309
+ raise StopAsyncIteration
310
+ err: Exception | None = None
311
+ body: bytes | None = None
312
+ while True:
313
+ self._read_pending = True
314
+ try:
315
+ message = await self._send_queue.get()
316
+ finally:
317
+ self._read_pending = False
318
+ if isinstance(message, Exception):
319
+ match message:
320
+ case CancelResponse():
321
+ err = StopAsyncIteration()
322
+ break
323
+ case WriteError() | TimeoutError():
324
+ err = message
325
+ break
326
+ case ReadError():
327
+ raise message
328
+ case Exception():
329
+ msg = "Error reading response body"
330
+ raise ReadError(msg) from message
331
+ match message["type"]:
332
+ case "http.response.body":
333
+ if not message.get("more_body", False) and not self._read_trailers:
334
+ await self._cleanup()
335
+ if (body := message.get("body", b"")) or self._closed:
336
+ return body
337
+ case "http.response.trailers":
338
+ if self._trailers is not None:
339
+ for k, v in message.get("headers", []):
340
+ self._trailers.add(k.decode("utf-8"), v.decode("utf-8"))
341
+ if not message.get("more_trailers", False):
342
+ break
343
+ await self._cleanup()
344
+ if err:
345
+ raise err
346
+ raise StopAsyncIteration
347
+
348
+ async def aclose(self) -> None:
349
+ if self._closed:
350
+ return
351
+ self._closed = True
352
+ self._send_queue.put_nowait(ReadError("Response body read cancelled"))
353
+ await self._cleanup()
354
+
355
+ async def _cleanup(self) -> None:
356
+ self._closed = True
357
+ self._request_task.cancel()
358
+ with contextlib.suppress(BaseException):
359
+ await self._request_task
360
+ await self._task