python-http_request 0.1.5__tar.gz → 0.1.6.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- python_http_request-0.1.6.1/PKG-INFO +90 -0
- {python_http_request-0.1.5 → python_http_request-0.1.6.1}/http_request/__init__.py +2 -2
- python_http_request-0.1.6.1/http_request/extension/__init__.py +4 -0
- python_http_request-0.1.6.1/http_request/extension/request.py +689 -0
- {python_http_request-0.1.5 → python_http_request-0.1.6.1}/pyproject.toml +7 -3
- python_http_request-0.1.6.1/readme.md +53 -0
- python_http_request-0.1.5/PKG-INFO +0 -48
- python_http_request-0.1.5/readme.md +0 -15
- {python_http_request-0.1.5 → python_http_request-0.1.6.1}/LICENSE +0 -0
- {python_http_request-0.1.5 → python_http_request-0.1.6.1}/http_request/py.typed +0 -0
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: python-http_request
|
|
3
|
+
Version: 0.1.6.1
|
|
4
|
+
Summary: Python http request utils.
|
|
5
|
+
Home-page: https://github.com/ChenyangGao/python-modules/tree/main/python-http_request
|
|
6
|
+
License: MIT
|
|
7
|
+
Keywords: http,request
|
|
8
|
+
Author: ChenyangGao
|
|
9
|
+
Author-email: wosiwujm@gmail.com
|
|
10
|
+
Requires-Python: >=3.12,<4.0
|
|
11
|
+
Classifier: Development Status :: 5 - Production/Stable
|
|
12
|
+
Classifier: Intended Audience :: Developers
|
|
13
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
14
|
+
Classifier: Operating System :: OS Independent
|
|
15
|
+
Classifier: Programming Language :: Python
|
|
16
|
+
Classifier: Programming Language :: Python :: 3
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
19
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
20
|
+
Classifier: Topic :: Software Development
|
|
21
|
+
Classifier: Topic :: Software Development :: Libraries
|
|
22
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
23
|
+
Requires-Dist: http_response (>=0.0.9)
|
|
24
|
+
Requires-Dist: orjson
|
|
25
|
+
Requires-Dist: python-argtools (>=0.0.2)
|
|
26
|
+
Requires-Dist: python-asynctools (>=0.1.3)
|
|
27
|
+
Requires-Dist: python-cookietools (>=0.1.4)
|
|
28
|
+
Requires-Dist: python-dicttools (>=0.0.4)
|
|
29
|
+
Requires-Dist: python-ensure (>=0.0.1)
|
|
30
|
+
Requires-Dist: python-filewrap (>=0.2.8)
|
|
31
|
+
Requires-Dist: python-texttools (>=0.0.5)
|
|
32
|
+
Requires-Dist: python-undefined (>=0.0.3)
|
|
33
|
+
Requires-Dist: yarl
|
|
34
|
+
Project-URL: Repository, https://github.com/ChenyangGao/python-modules/tree/main/python-http_request
|
|
35
|
+
Description-Content-Type: text/markdown
|
|
36
|
+
|
|
37
|
+
# Python http request utils.
|
|
38
|
+
|
|
39
|
+
## Installation
|
|
40
|
+
|
|
41
|
+
You can install from [pypi](https://pypi.org/project/python-http_request/)
|
|
42
|
+
|
|
43
|
+
```console
|
|
44
|
+
pip install -U python-http_request
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
## Usage
|
|
48
|
+
|
|
49
|
+
```python
|
|
50
|
+
import http_request
|
|
51
|
+
import http_request.extension
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
## Extension
|
|
55
|
+
|
|
56
|
+
I've implemented several modules, all of which provide a ``request`` function. Their signatures are similar, so they can be used as drop-in replacements for each other.
|
|
57
|
+
|
|
58
|
+
1. [aiohttp_client_request](https://pypi.org/project/aiohttp_client_request/)
|
|
59
|
+
1. [aiosonic_request](https://pypi.org/project/aiosonic_request/)
|
|
60
|
+
1. [asks_request](https://pypi.org/project/asks_request/)
|
|
61
|
+
1. [blacksheep_client_request](https://pypi.org/project/blacksheep_client_request/)
|
|
62
|
+
1. [curl_cffi_request](https://pypi.org/project/curl_cffi_request/)
|
|
63
|
+
1. [http_client_request](https://pypi.org/project/http_client_request/)
|
|
64
|
+
1. [httpcore_request](https://pypi.org/project/httpcore_request/)
|
|
65
|
+
1. [httpx_request](https://pypi.org/project/httpx_request/)
|
|
66
|
+
1. [pycurl_request](https://pypi.org/project/pycurl_request/)
|
|
67
|
+
1. [python-urlopen](https://pypi.org/project/python-urlopen/)
|
|
68
|
+
1. [requests_request](https://pypi.org/project/requests_request/)
|
|
69
|
+
1. [tornado_client_request](https://pypi.org/project/tornado_client_request/)
|
|
70
|
+
1. [urllib3_request](https://pypi.org/project/urllib3_request/)
|
|
71
|
+
|
|
72
|
+
To make it more general, I've encapsulated a ``request`` function
|
|
73
|
+
|
|
74
|
+
```python
|
|
75
|
+
from http_request.extension import request
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
You just need to implement a ``urlopen`` function pass to ``request``, then it can be directly extended. The ``urlopen`` function signature is roughly as follows:
|
|
79
|
+
|
|
80
|
+
```python
|
|
81
|
+
def urlopen[Response](
|
|
82
|
+
url: str,
|
|
83
|
+
method: str,
|
|
84
|
+
data=None,
|
|
85
|
+
headers: None | dict[str, str] = None,
|
|
86
|
+
**request_args,
|
|
87
|
+
) -> Response:
|
|
88
|
+
...
|
|
89
|
+
```
|
|
90
|
+
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
# encoding: utf-8
|
|
3
3
|
|
|
4
4
|
__author__ = "ChenyangGao <https://chenyanggao.github.io>"
|
|
5
|
-
__version__ = (0, 1,
|
|
5
|
+
__version__ = (0, 1, 6)
|
|
6
6
|
__all__ = [
|
|
7
7
|
"SupportsGeturl", "url_origin", "complete_url", "ensure_ascii_url",
|
|
8
8
|
"urlencode", "cookies_str_to_dict", "headers_str_to_dict_by_lines",
|
|
@@ -31,7 +31,6 @@ from typing import (
|
|
|
31
31
|
)
|
|
32
32
|
from urllib.parse import quote, urlparse, urlunparse
|
|
33
33
|
from uuid import uuid4
|
|
34
|
-
from yarl import URL
|
|
35
34
|
|
|
36
35
|
from asynctools import async_map
|
|
37
36
|
from dicttools import dict_map, iter_items
|
|
@@ -40,6 +39,7 @@ from filewrap import bio_chunk_iter, bio_chunk_async_iter, SupportsRead
|
|
|
40
39
|
from http_response import get_charset, get_mimetype
|
|
41
40
|
from orjson import dumps as json_dumps
|
|
42
41
|
from texttools import text_to_dict
|
|
42
|
+
from yarl import URL
|
|
43
43
|
|
|
44
44
|
|
|
45
45
|
type string = Buffer | str | UserString
|
|
@@ -0,0 +1,689 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# coding: utf-8
|
|
3
|
+
|
|
4
|
+
__author__ = "ChenyangGao <https://chenyanggao.github.io>"
|
|
5
|
+
__all__ = ["HTTPError", "request"]
|
|
6
|
+
|
|
7
|
+
from asyncio import to_thread
|
|
8
|
+
from collections import UserString
|
|
9
|
+
from collections.abc import (
|
|
10
|
+
AsyncIterable, Awaitable, Buffer, Callable, Iterable,
|
|
11
|
+
Iterator, Mapping,
|
|
12
|
+
)
|
|
13
|
+
from http.client import HTTPConnection, HTTPSConnection, HTTPResponse
|
|
14
|
+
from http.cookiejar import CookieJar
|
|
15
|
+
from http.cookies import BaseCookie
|
|
16
|
+
from inspect import isawaitable, isgeneratorfunction
|
|
17
|
+
from os import PathLike
|
|
18
|
+
from sys import exc_info
|
|
19
|
+
from types import EllipsisType
|
|
20
|
+
from typing import cast, overload, Any, Literal
|
|
21
|
+
from urllib.parse import urljoin, urlsplit, urlunsplit
|
|
22
|
+
from warnings import warn
|
|
23
|
+
|
|
24
|
+
from argtools import argcount
|
|
25
|
+
from cookietools import update_cookies, cookies_to_str
|
|
26
|
+
from ensure import ensure_bytes, ensure_str
|
|
27
|
+
from dicttools import iter_items
|
|
28
|
+
from filewrap import bio_chunk_iter, bio_chunk_async_iter, SupportsRead
|
|
29
|
+
from http_response import (
|
|
30
|
+
get_status_code, headers_get, decompress_response, parse_response,
|
|
31
|
+
)
|
|
32
|
+
from yarl import URL
|
|
33
|
+
|
|
34
|
+
from .. import normalize_request_args, SupportsGeturl
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
type string = Buffer | str | UserString
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def get_headers(response, /):
|
|
41
|
+
if hasattr(response, "headers"):
|
|
42
|
+
headers = response.headers
|
|
43
|
+
if isinstance(headers, (Mapping, Iterable)):
|
|
44
|
+
return headers
|
|
45
|
+
if callable(headers):
|
|
46
|
+
headers = headers()
|
|
47
|
+
return headers
|
|
48
|
+
elif hasattr(response, "getheaders"):
|
|
49
|
+
return response.getheaders()
|
|
50
|
+
elif hasattr(response, "info"):
|
|
51
|
+
return response.info()
|
|
52
|
+
raise TypeError("can't read response headers")
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def call_read(response, /):
|
|
56
|
+
if hasattr(response, "read"):
|
|
57
|
+
return response.read()
|
|
58
|
+
maybe_content_attrs = (
|
|
59
|
+
"content", "body", "iter_content", "iter_chunks",
|
|
60
|
+
"iter_chunked", "iter_bytes", "iter_lines",
|
|
61
|
+
)
|
|
62
|
+
if a := next((a for a in maybe_content_attrs if hasattr(response, a)), None):
|
|
63
|
+
content = getattr(response, a)
|
|
64
|
+
if callable(content):
|
|
65
|
+
content = content()
|
|
66
|
+
if isinstance(content, Iterator):
|
|
67
|
+
content = b"".join(map(ensure_bytes, content))
|
|
68
|
+
return content
|
|
69
|
+
raise TypeError("can't read response body")
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def call_close(response, /):
|
|
73
|
+
try:
|
|
74
|
+
if hasattr(response, "close"):
|
|
75
|
+
return response.close()
|
|
76
|
+
elif hasattr(response, "release"):
|
|
77
|
+
return response.release()
|
|
78
|
+
elif hasattr(response, "__exit__"):
|
|
79
|
+
return response.__exit__(**exc_info())
|
|
80
|
+
elif hasattr(response, "__del__"):
|
|
81
|
+
return response.__del__()
|
|
82
|
+
except Exception:
|
|
83
|
+
pass
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
async def call_async_read(response, /):
|
|
87
|
+
if hasattr(response, "aread"):
|
|
88
|
+
return await response.aread()
|
|
89
|
+
elif hasattr(response, "read"):
|
|
90
|
+
ret = response.read()
|
|
91
|
+
if isawaitable(ret):
|
|
92
|
+
ret = await ret
|
|
93
|
+
return ret
|
|
94
|
+
maybe_content_attrs = (
|
|
95
|
+
"async_content", "async_body", "async_iter_content", "async_iter_chunks",
|
|
96
|
+
"async_iter_chunked", "async_iter_bytes", "async_iter_lines",
|
|
97
|
+
"acontent", "abody", "aiter_content", "aiter_chunks", "aiter_chunked",
|
|
98
|
+
"aiter_bytes", "aiter_lines",
|
|
99
|
+
"content", "body", "iter_content", "iter_chunks", "iter_chunked",
|
|
100
|
+
"iter_bytes", "iter_lines",
|
|
101
|
+
)
|
|
102
|
+
if a := next((a for a in maybe_content_attrs if hasattr(response, a)), None):
|
|
103
|
+
content = getattr(response, a)
|
|
104
|
+
if callable(content):
|
|
105
|
+
content = content()
|
|
106
|
+
if isawaitable(content):
|
|
107
|
+
content = await content
|
|
108
|
+
if isinstance(content, AsyncIterable):
|
|
109
|
+
data = bytearray()
|
|
110
|
+
async for chunk in content:
|
|
111
|
+
data += chunk
|
|
112
|
+
content = data
|
|
113
|
+
elif isinstance(content, Iterator):
|
|
114
|
+
content = b"".join(map(ensure_bytes, content))
|
|
115
|
+
return content
|
|
116
|
+
raise TypeError("can't read response body")
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
async def call_async_close(response, /):
|
|
120
|
+
try:
|
|
121
|
+
if hasattr(response, "aclose"):
|
|
122
|
+
return await response.aclose()
|
|
123
|
+
elif hasattr(response, "close"):
|
|
124
|
+
ret = response.close()
|
|
125
|
+
if isawaitable(ret):
|
|
126
|
+
ret = await ret
|
|
127
|
+
return ret
|
|
128
|
+
elif hasattr(response, "async_release"):
|
|
129
|
+
return await response.async_release()
|
|
130
|
+
elif hasattr(response, "release"):
|
|
131
|
+
ret = response.release()
|
|
132
|
+
if isawaitable(ret):
|
|
133
|
+
ret = await ret
|
|
134
|
+
return ret
|
|
135
|
+
elif hasattr(response, "__aexit__"):
|
|
136
|
+
return await response.__aexit__(**exc_info())
|
|
137
|
+
elif hasattr(response, "__exit__"):
|
|
138
|
+
return response.__exit__(**exc_info())
|
|
139
|
+
elif hasattr(response, "__del__"):
|
|
140
|
+
return response.__del__()
|
|
141
|
+
except Exception:
|
|
142
|
+
pass
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def urlopen(
|
|
146
|
+
url: str,
|
|
147
|
+
method: str = "GET",
|
|
148
|
+
data=None,
|
|
149
|
+
headers=None,
|
|
150
|
+
**request_args,
|
|
151
|
+
) -> HTTPResponse:
|
|
152
|
+
urlp = urlsplit(url)
|
|
153
|
+
if urlp.scheme == "https":
|
|
154
|
+
con: HTTPConnection = HTTPSConnection(urlp.netloc)
|
|
155
|
+
else:
|
|
156
|
+
con = HTTPConnection(urlp.netloc)
|
|
157
|
+
con.request(
|
|
158
|
+
method,
|
|
159
|
+
urlunsplit(urlp._replace(scheme="", netloc="")),
|
|
160
|
+
data,
|
|
161
|
+
headers,
|
|
162
|
+
)
|
|
163
|
+
return con.getresponse()
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
urlopen_async = lambda *a, **k: to_thread(urlopen, *a, **k)
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
class HTTPError(OSError):
|
|
170
|
+
|
|
171
|
+
def __init__(
|
|
172
|
+
self,
|
|
173
|
+
/,
|
|
174
|
+
*args,
|
|
175
|
+
url: str,
|
|
176
|
+
status: int,
|
|
177
|
+
method: str,
|
|
178
|
+
response,
|
|
179
|
+
):
|
|
180
|
+
super().__init__(*args)
|
|
181
|
+
self.url = url
|
|
182
|
+
self.status = status
|
|
183
|
+
self.method = method
|
|
184
|
+
self.response = response
|
|
185
|
+
|
|
186
|
+
def __repr__(self, /):
|
|
187
|
+
return f"{type(self).__module__}.{type(self).__qualname__}({self})"
|
|
188
|
+
|
|
189
|
+
def __str__(self):
|
|
190
|
+
args = ",".join(map(repr, self.args))
|
|
191
|
+
url = self.url
|
|
192
|
+
status = self.status
|
|
193
|
+
method = self.method
|
|
194
|
+
response = self.response
|
|
195
|
+
kwargs = f"{url=!r}, {status=!r}, {method=!r}, {response=!r}"
|
|
196
|
+
if args:
|
|
197
|
+
args += kwargs
|
|
198
|
+
else:
|
|
199
|
+
args = kwargs
|
|
200
|
+
return args
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
@overload
|
|
204
|
+
def request_sync[Response](
|
|
205
|
+
url: string | SupportsGeturl | URL,
|
|
206
|
+
method: string = "GET",
|
|
207
|
+
params: None | string | Mapping | Iterable[tuple[Any, Any]] = None,
|
|
208
|
+
data: Any = None,
|
|
209
|
+
json: Any = None,
|
|
210
|
+
files: None | Mapping[string, Any] | Iterable[tuple[string, Any]] = None,
|
|
211
|
+
headers: None | Mapping[string, string] | Iterable[tuple[string, string]] = None,
|
|
212
|
+
follow_redirects: bool = True,
|
|
213
|
+
raise_for_status: bool = True,
|
|
214
|
+
cookies: None | CookieJar | BaseCookie = None,
|
|
215
|
+
urlopen: Callable[..., Response] = urlopen, # type: ignore
|
|
216
|
+
dont_decompress: None | bool = None,
|
|
217
|
+
*,
|
|
218
|
+
parse: None | EllipsisType = None,
|
|
219
|
+
**request_kwargs,
|
|
220
|
+
) -> Response:
|
|
221
|
+
...
|
|
222
|
+
@overload
|
|
223
|
+
def request_sync[Response](
|
|
224
|
+
url: string | SupportsGeturl | URL,
|
|
225
|
+
method: string = "GET",
|
|
226
|
+
params: None | string | Mapping | Iterable[tuple[Any, Any]] = None,
|
|
227
|
+
data: Any = None,
|
|
228
|
+
json: Any = None,
|
|
229
|
+
files: None | Mapping[string, Any] | Iterable[tuple[string, Any]] = None,
|
|
230
|
+
headers: None | Mapping[string, string] | Iterable[tuple[string, string]] = None,
|
|
231
|
+
follow_redirects: bool = True,
|
|
232
|
+
raise_for_status: bool = True,
|
|
233
|
+
cookies: None | CookieJar | BaseCookie = None,
|
|
234
|
+
urlopen: Callable[..., Response] = urlopen, # type: ignore
|
|
235
|
+
dont_decompress: None | bool = None,
|
|
236
|
+
*,
|
|
237
|
+
parse: Literal[False],
|
|
238
|
+
**request_kwargs,
|
|
239
|
+
) -> bytes:
|
|
240
|
+
...
|
|
241
|
+
@overload
|
|
242
|
+
def request_sync[Response](
|
|
243
|
+
url: string | SupportsGeturl | URL,
|
|
244
|
+
method: string = "GET",
|
|
245
|
+
params: None | string | Mapping | Iterable[tuple[Any, Any]] = None,
|
|
246
|
+
data: Any = None,
|
|
247
|
+
json: Any = None,
|
|
248
|
+
files: None | Mapping[string, Any] | Iterable[tuple[string, Any]] = None,
|
|
249
|
+
headers: None | Mapping[string, string] | Iterable[tuple[string, string]] = None,
|
|
250
|
+
follow_redirects: bool = True,
|
|
251
|
+
raise_for_status: bool = True,
|
|
252
|
+
cookies: None | CookieJar | BaseCookie = None,
|
|
253
|
+
urlopen: Callable[..., Response] = urlopen, # type: ignore
|
|
254
|
+
dont_decompress: None | bool = None,
|
|
255
|
+
*,
|
|
256
|
+
parse: Literal[True],
|
|
257
|
+
**request_kwargs,
|
|
258
|
+
) -> bytes | str | dict | list | int | float | bool | None:
|
|
259
|
+
...
|
|
260
|
+
@overload
|
|
261
|
+
def request_sync[Response, T](
|
|
262
|
+
url: string | SupportsGeturl | URL,
|
|
263
|
+
method: string = "GET",
|
|
264
|
+
params: None | string | Mapping | Iterable[tuple[Any, Any]] = None,
|
|
265
|
+
data: Any = None,
|
|
266
|
+
json: Any = None,
|
|
267
|
+
files: None | Mapping[string, Any] | Iterable[tuple[string, Any]] = None,
|
|
268
|
+
headers: None | Mapping[string, string] | Iterable[tuple[string, string]] = None,
|
|
269
|
+
follow_redirects: bool = True,
|
|
270
|
+
raise_for_status: bool = True,
|
|
271
|
+
cookies: None | CookieJar | BaseCookie = None,
|
|
272
|
+
urlopen: Callable[..., Response] = urlopen, # type: ignore
|
|
273
|
+
dont_decompress: None | bool = None,
|
|
274
|
+
*,
|
|
275
|
+
parse: Callable[[Response, bytes], T] | Callable[[Response], T],
|
|
276
|
+
**request_kwargs,
|
|
277
|
+
) -> T:
|
|
278
|
+
...
|
|
279
|
+
def request_sync[Response, T](
|
|
280
|
+
url: string | SupportsGeturl | URL,
|
|
281
|
+
method: string = "GET",
|
|
282
|
+
params: None | string | Mapping | Iterable[tuple[Any, Any]] = None,
|
|
283
|
+
data: Any = None,
|
|
284
|
+
json: Any = None,
|
|
285
|
+
files: None | Mapping[string, Any] | Iterable[tuple[string, Any]] = None,
|
|
286
|
+
headers: None | Mapping[string, string] | Iterable[tuple[string, string]] = None,
|
|
287
|
+
follow_redirects: bool = True,
|
|
288
|
+
raise_for_status: bool = True,
|
|
289
|
+
cookies: None | CookieJar | BaseCookie = None,
|
|
290
|
+
urlopen: Callable[..., Response] = urlopen, # type: ignore
|
|
291
|
+
dont_decompress: None | bool = None,
|
|
292
|
+
*,
|
|
293
|
+
parse: None | EllipsisType| bool | Callable[[Response, bytes], T] | Callable[[Response], T] = None,
|
|
294
|
+
**request_kwargs,
|
|
295
|
+
) -> Response | bytes | str | dict | list | int | float | bool | None | T:
|
|
296
|
+
if isinstance(data, PathLike):
|
|
297
|
+
data = open(data, "rb")
|
|
298
|
+
body = data
|
|
299
|
+
request_kwargs.update(normalize_request_args(
|
|
300
|
+
method=method,
|
|
301
|
+
url=url,
|
|
302
|
+
params=params,
|
|
303
|
+
data=data,
|
|
304
|
+
json=json,
|
|
305
|
+
files=files,
|
|
306
|
+
headers=headers,
|
|
307
|
+
ensure_ascii=True,
|
|
308
|
+
ensure_bytes=True,
|
|
309
|
+
))
|
|
310
|
+
request_url: str = request_kwargs["url"]
|
|
311
|
+
headers = cast(dict, request_kwargs["headers"])
|
|
312
|
+
no_default_cookie_header = "cookie" not in headers
|
|
313
|
+
response_cookies = CookieJar()
|
|
314
|
+
while True:
|
|
315
|
+
if no_default_cookie_header:
|
|
316
|
+
headers["cookie"] = cookies_to_str(response_cookies if cookies is None else cookies, request_url)
|
|
317
|
+
response: Response = urlopen(**request_kwargs)
|
|
318
|
+
if hasattr(response, "cookies"):
|
|
319
|
+
response_cookies = response.cookies
|
|
320
|
+
if callable(response_cookies):
|
|
321
|
+
response_cookies = response_cookies()
|
|
322
|
+
if cookies is not None and response_cookies:
|
|
323
|
+
update_cookies(cookies, response_cookies) # type: ignore
|
|
324
|
+
else:
|
|
325
|
+
setattr(response, "cookies", response_cookies)
|
|
326
|
+
set_cookies: list[str] = []
|
|
327
|
+
if response_headers := get_headers(response):
|
|
328
|
+
set_cookies.extend(
|
|
329
|
+
v for k, v in iter_items(response_headers)
|
|
330
|
+
if v and ensure_str(k).lower() in ("set-cookie", "set-cookie2")
|
|
331
|
+
)
|
|
332
|
+
if set_cookies:
|
|
333
|
+
base_cookies: BaseCookie = BaseCookie()
|
|
334
|
+
for set_cookie in set_cookies:
|
|
335
|
+
base_cookies.load(set_cookie)
|
|
336
|
+
if cookies is not None:
|
|
337
|
+
update_cookies(cookies, base_cookies) # type: ignore
|
|
338
|
+
update_cookies(response_cookies, base_cookies)
|
|
339
|
+
status_code = get_status_code(response)
|
|
340
|
+
if status_code >= 400 and raise_for_status:
|
|
341
|
+
raise HTTPError(
|
|
342
|
+
url=request_kwargs["url"],
|
|
343
|
+
status=status_code,
|
|
344
|
+
method=request_kwargs["method"],
|
|
345
|
+
response=response,
|
|
346
|
+
)
|
|
347
|
+
elif 300 <= status_code < 400 and follow_redirects:
|
|
348
|
+
location = headers_get(response, "location")
|
|
349
|
+
if location and not isinstance(location, (Buffer, UserString, str)):
|
|
350
|
+
location = location[0]
|
|
351
|
+
if location:
|
|
352
|
+
location = ensure_str(location)
|
|
353
|
+
request_url = request_kwargs["url"] = urljoin(request_url, location)
|
|
354
|
+
if body and status_code in (307, 308):
|
|
355
|
+
if isinstance(body, SupportsRead):
|
|
356
|
+
try:
|
|
357
|
+
body.seek(0) # type: ignore
|
|
358
|
+
request_kwargs["data"] = bio_chunk_iter(body)
|
|
359
|
+
except Exception:
|
|
360
|
+
warn(f"unseekable-stream: {body!r}")
|
|
361
|
+
elif not isinstance(body, Buffer):
|
|
362
|
+
warn(f"failed to resend request body: {body!r}, when {status_code} redirects")
|
|
363
|
+
else:
|
|
364
|
+
if status_code == 303:
|
|
365
|
+
request_kwargs["method"] = "GET"
|
|
366
|
+
body = None
|
|
367
|
+
request_kwargs["data"] = None
|
|
368
|
+
if request_kwargs["method"] != "HEAD":
|
|
369
|
+
call_read(response)
|
|
370
|
+
call_close(response)
|
|
371
|
+
del response
|
|
372
|
+
continue
|
|
373
|
+
if parse is None:
|
|
374
|
+
return response
|
|
375
|
+
try:
|
|
376
|
+
if parse is ...:
|
|
377
|
+
return response
|
|
378
|
+
if isinstance(parse, bool):
|
|
379
|
+
ac = 2
|
|
380
|
+
if parse:
|
|
381
|
+
parse = cast(Callable[[Response, bytes], T], parse_response)
|
|
382
|
+
else:
|
|
383
|
+
parse = lambda _, content: content
|
|
384
|
+
else:
|
|
385
|
+
ac = argcount(parse)
|
|
386
|
+
if ac == 1:
|
|
387
|
+
return cast(Callable[[Response], T], parse)(response)
|
|
388
|
+
else:
|
|
389
|
+
content = call_read(response)
|
|
390
|
+
if not dont_decompress:
|
|
391
|
+
content = decompress_response(content, response)
|
|
392
|
+
return cast(Callable[[Response, bytes], T], parse)(response, content)
|
|
393
|
+
finally:
|
|
394
|
+
call_close(response)
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
@overload
|
|
398
|
+
async def request_async[Response](
|
|
399
|
+
url: string | SupportsGeturl | URL,
|
|
400
|
+
method: string = "GET",
|
|
401
|
+
params: None | string | Mapping | Iterable[tuple[Any, Any]] = None,
|
|
402
|
+
data: Any = None,
|
|
403
|
+
json: Any = None,
|
|
404
|
+
files: None | Mapping[string, Any] | Iterable[tuple[string, Any]] = None,
|
|
405
|
+
headers: None | Mapping[string, string] | Iterable[tuple[string, string]] = None,
|
|
406
|
+
follow_redirects: bool = True,
|
|
407
|
+
raise_for_status: bool = True,
|
|
408
|
+
cookies: None | CookieJar | BaseCookie = None,
|
|
409
|
+
urlopen: Callable[..., Response] = urlopen_async, # type: ignore
|
|
410
|
+
dont_decompress: None | bool = None,
|
|
411
|
+
*,
|
|
412
|
+
parse: None | EllipsisType = None,
|
|
413
|
+
**request_kwargs,
|
|
414
|
+
) -> Response:
|
|
415
|
+
...
|
|
416
|
+
@overload
|
|
417
|
+
async def request_async[Response](
|
|
418
|
+
url: string | SupportsGeturl | URL,
|
|
419
|
+
method: string = "GET",
|
|
420
|
+
params: None | string | Mapping | Iterable[tuple[Any, Any]] = None,
|
|
421
|
+
data: Any = None,
|
|
422
|
+
json: Any = None,
|
|
423
|
+
files: None | Mapping[string, Any] | Iterable[tuple[string, Any]] = None,
|
|
424
|
+
headers: None | Mapping[string, string] | Iterable[tuple[string, string]] = None,
|
|
425
|
+
follow_redirects: bool = True,
|
|
426
|
+
raise_for_status: bool = True,
|
|
427
|
+
cookies: None | CookieJar | BaseCookie = None,
|
|
428
|
+
urlopen: Callable[..., Response] = urlopen_async, # type: ignore
|
|
429
|
+
dont_decompress: None | bool = None,
|
|
430
|
+
*,
|
|
431
|
+
parse: Literal[False],
|
|
432
|
+
**request_kwargs,
|
|
433
|
+
) -> bytes:
|
|
434
|
+
...
|
|
435
|
+
@overload
|
|
436
|
+
async def request_async[Response](
|
|
437
|
+
url: string | SupportsGeturl | URL,
|
|
438
|
+
method: string = "GET",
|
|
439
|
+
params: None | string | Mapping | Iterable[tuple[Any, Any]] = None,
|
|
440
|
+
data: Any = None,
|
|
441
|
+
json: Any = None,
|
|
442
|
+
files: None | Mapping[string, Any] | Iterable[tuple[string, Any]] = None,
|
|
443
|
+
headers: None | Mapping[string, string] | Iterable[tuple[string, string]] = None,
|
|
444
|
+
follow_redirects: bool = True,
|
|
445
|
+
raise_for_status: bool = True,
|
|
446
|
+
cookies: None | CookieJar | BaseCookie = None,
|
|
447
|
+
urlopen: Callable[..., Response] = urlopen_async, # type: ignore
|
|
448
|
+
dont_decompress: None | bool = None,
|
|
449
|
+
*,
|
|
450
|
+
parse: Literal[True],
|
|
451
|
+
**request_kwargs,
|
|
452
|
+
) -> bytes | str | dict | list | int | float | bool | None:
|
|
453
|
+
...
|
|
454
|
+
@overload
|
|
455
|
+
async def request_async[Response, T](
|
|
456
|
+
url: string | SupportsGeturl | URL,
|
|
457
|
+
method: string = "GET",
|
|
458
|
+
params: None | string | Mapping | Iterable[tuple[Any, Any]] = None,
|
|
459
|
+
data: Any = None,
|
|
460
|
+
json: Any = None,
|
|
461
|
+
files: None | Mapping[string, Any] | Iterable[tuple[string, Any]] = None,
|
|
462
|
+
headers: None | Mapping[string, string] | Iterable[tuple[string, string]] = None,
|
|
463
|
+
follow_redirects: bool = True,
|
|
464
|
+
raise_for_status: bool = True,
|
|
465
|
+
cookies: None | CookieJar | BaseCookie = None,
|
|
466
|
+
urlopen: Callable[..., Response] = urlopen_async, # type: ignore
|
|
467
|
+
dont_decompress: None | bool = None,
|
|
468
|
+
*,
|
|
469
|
+
parse: Callable[[Response, bytes], T] | Callable[[Response], T],
|
|
470
|
+
**request_kwargs,
|
|
471
|
+
) -> T:
|
|
472
|
+
...
|
|
473
|
+
async def request_async[Response, T](
|
|
474
|
+
url: string | SupportsGeturl | URL,
|
|
475
|
+
method: string = "GET",
|
|
476
|
+
params: None | string | Mapping | Iterable[tuple[Any, Any]] = None,
|
|
477
|
+
data: Any = None,
|
|
478
|
+
json: Any = None,
|
|
479
|
+
files: None | Mapping[string, Any] | Iterable[tuple[string, Any]] = None,
|
|
480
|
+
headers: None | Mapping[string, string] | Iterable[tuple[string, string]] = None,
|
|
481
|
+
follow_redirects: bool = True,
|
|
482
|
+
raise_for_status: bool = True,
|
|
483
|
+
cookies: None | CookieJar | BaseCookie = None,
|
|
484
|
+
urlopen: Callable[..., Response] = urlopen_async, # type: ignore
|
|
485
|
+
dont_decompress: None | bool = None,
|
|
486
|
+
*,
|
|
487
|
+
parse: None | EllipsisType| bool | Callable[[Response, bytes], T] | Callable[[Response], T] = None,
|
|
488
|
+
**request_kwargs,
|
|
489
|
+
) -> Response | bytes | str | dict | list | int | float | bool | None | T:
|
|
490
|
+
if isinstance(data, PathLike):
|
|
491
|
+
data = open(data, "rb")
|
|
492
|
+
body = data
|
|
493
|
+
request_kwargs.update(normalize_request_args(
|
|
494
|
+
method=method,
|
|
495
|
+
url=url,
|
|
496
|
+
params=params,
|
|
497
|
+
data=data,
|
|
498
|
+
json=json,
|
|
499
|
+
files=files,
|
|
500
|
+
headers=headers,
|
|
501
|
+
ensure_ascii=True,
|
|
502
|
+
ensure_bytes=True,
|
|
503
|
+
))
|
|
504
|
+
request_url: str = request_kwargs["url"]
|
|
505
|
+
headers = cast(dict, request_kwargs["headers"])
|
|
506
|
+
no_default_cookie_header = "cookie" not in headers
|
|
507
|
+
response_cookies = CookieJar()
|
|
508
|
+
while True:
|
|
509
|
+
if no_default_cookie_header:
|
|
510
|
+
headers["cookie"] = cookies_to_str(response_cookies if cookies is None else cookies, request_url)
|
|
511
|
+
resp = urlopen(**request_kwargs)
|
|
512
|
+
if isawaitable(resp):
|
|
513
|
+
resp = await resp
|
|
514
|
+
response: Response = resp
|
|
515
|
+
if hasattr(response, "cookies"):
|
|
516
|
+
response_cookies = response.cookies
|
|
517
|
+
if callable(response_cookies):
|
|
518
|
+
response_cookies = response_cookies()
|
|
519
|
+
if cookies is not None and response_cookies:
|
|
520
|
+
update_cookies(cookies, response_cookies) # type: ignore
|
|
521
|
+
else:
|
|
522
|
+
setattr(response, "cookies", response_cookies)
|
|
523
|
+
set_cookies: list[str] = []
|
|
524
|
+
if response_headers := get_headers(response):
|
|
525
|
+
set_cookies.extend(
|
|
526
|
+
v for k, v in iter_items(response_headers)
|
|
527
|
+
if v and ensure_str(k).lower() in ("set-cookie", "set-cookie2")
|
|
528
|
+
)
|
|
529
|
+
if set_cookies:
|
|
530
|
+
base_cookies: BaseCookie = BaseCookie()
|
|
531
|
+
for set_cookie in set_cookies:
|
|
532
|
+
base_cookies.load(set_cookie)
|
|
533
|
+
if cookies is not None:
|
|
534
|
+
update_cookies(cookies, base_cookies) # type: ignore
|
|
535
|
+
update_cookies(response_cookies, base_cookies)
|
|
536
|
+
status_code = get_status_code(response)
|
|
537
|
+
if status_code >= 400 and raise_for_status:
|
|
538
|
+
raise HTTPError(
|
|
539
|
+
url=request_kwargs["url"],
|
|
540
|
+
status=status_code,
|
|
541
|
+
method=request_kwargs["method"],
|
|
542
|
+
response=response,
|
|
543
|
+
)
|
|
544
|
+
elif 300 <= status_code < 400 and follow_redirects:
|
|
545
|
+
location = headers_get(response, "location")
|
|
546
|
+
if location and not isinstance(location, (Buffer, UserString, str)):
|
|
547
|
+
location = location[0]
|
|
548
|
+
if location:
|
|
549
|
+
location = ensure_str(location)
|
|
550
|
+
request_url = request_kwargs["url"] = urljoin(request_url, location)
|
|
551
|
+
if body and status_code in (307, 308):
|
|
552
|
+
if isinstance(body, SupportsRead):
|
|
553
|
+
try:
|
|
554
|
+
from asynctools import ensure_async
|
|
555
|
+
await ensure_async(body.seek)(0) # type: ignore
|
|
556
|
+
request_kwargs["data"] = bio_chunk_async_iter(body)
|
|
557
|
+
except Exception:
|
|
558
|
+
warn(f"unseekable-stream: {body!r}")
|
|
559
|
+
elif not isinstance(body, Buffer):
|
|
560
|
+
warn(f"failed to resend request body: {body!r}, when {status_code} redirects")
|
|
561
|
+
else:
|
|
562
|
+
if status_code == 303:
|
|
563
|
+
request_kwargs["method"] = "GET"
|
|
564
|
+
body = None
|
|
565
|
+
request_kwargs["data"] = None
|
|
566
|
+
if request_kwargs["method"] != "HEAD":
|
|
567
|
+
await call_async_read(response)
|
|
568
|
+
await call_async_close(response)
|
|
569
|
+
del response
|
|
570
|
+
continue
|
|
571
|
+
if parse is None:
|
|
572
|
+
return response
|
|
573
|
+
try:
|
|
574
|
+
if parse is ...:
|
|
575
|
+
return response
|
|
576
|
+
if isinstance(parse, bool):
|
|
577
|
+
ac = 2
|
|
578
|
+
if parse:
|
|
579
|
+
parse = cast(Callable[[Response, bytes], T], parse_response)
|
|
580
|
+
else:
|
|
581
|
+
parse = lambda _, content: content
|
|
582
|
+
else:
|
|
583
|
+
ac = argcount(parse)
|
|
584
|
+
if ac == 1:
|
|
585
|
+
return cast(Callable[[Response], T], parse)(response)
|
|
586
|
+
else:
|
|
587
|
+
content = await call_async_read(response)
|
|
588
|
+
if not dont_decompress:
|
|
589
|
+
content = decompress_response(content, response)
|
|
590
|
+
return cast(Callable[[Response, bytes], T], parse)(response, content)
|
|
591
|
+
finally:
|
|
592
|
+
await call_async_close(response)
|
|
593
|
+
|
|
594
|
+
|
|
595
|
+
@overload
|
|
596
|
+
def request[Response, T](
|
|
597
|
+
url: string | SupportsGeturl | URL,
|
|
598
|
+
method: string = "GET",
|
|
599
|
+
params: None | string | Mapping | Iterable[tuple[Any, Any]] = None,
|
|
600
|
+
data: Any = None,
|
|
601
|
+
json: Any = None,
|
|
602
|
+
files: None | Mapping[string, Any] | Iterable[tuple[string, Any]] = None,
|
|
603
|
+
headers: None | Mapping[string, string] | Iterable[tuple[string, string]] = None,
|
|
604
|
+
follow_redirects: bool = True,
|
|
605
|
+
raise_for_status: bool = True,
|
|
606
|
+
cookies: None | CookieJar | BaseCookie = None,
|
|
607
|
+
urlopen: None | Callable[..., Response] = None,
|
|
608
|
+
dont_decompress: None | bool = None,
|
|
609
|
+
*,
|
|
610
|
+
parse: None | EllipsisType | bool | Callable[[Response, bytes], T] | Callable[[Response], T] = None,
|
|
611
|
+
async_: Literal[False] = False,
|
|
612
|
+
**request_kwargs,
|
|
613
|
+
) -> Response | bytes | str | dict | list | int | float | bool | None | T:
|
|
614
|
+
...
|
|
615
|
+
@overload
|
|
616
|
+
def request[Response, T](
|
|
617
|
+
url: string | SupportsGeturl | URL,
|
|
618
|
+
method: string = "GET",
|
|
619
|
+
params: None | string | Mapping | Iterable[tuple[Any, Any]] = None,
|
|
620
|
+
data: Any = None,
|
|
621
|
+
json: Any = None,
|
|
622
|
+
files: None | Mapping[string, Any] | Iterable[tuple[string, Any]] = None,
|
|
623
|
+
headers: None | Mapping[string, string] | Iterable[tuple[string, string]] = None,
|
|
624
|
+
follow_redirects: bool = True,
|
|
625
|
+
raise_for_status: bool = True,
|
|
626
|
+
cookies: None | CookieJar | BaseCookie = None,
|
|
627
|
+
urlopen: None | Callable[..., Response] = None,
|
|
628
|
+
dont_decompress: None | bool = None,
|
|
629
|
+
*,
|
|
630
|
+
parse: None | EllipsisType | bool | Callable[[Response, bytes], T] | Callable[[Response, bytes], Awaitable[T]] | Callable[[Response], T] | Callable[[Response], Awaitable[T]] = None,
|
|
631
|
+
async_: Literal[True],
|
|
632
|
+
**request_kwargs,
|
|
633
|
+
) -> Awaitable[Response | bytes | str | dict | list | int | float | bool | None | T]:
|
|
634
|
+
...
|
|
635
|
+
def request[Response, T](
|
|
636
|
+
url: string | SupportsGeturl | URL,
|
|
637
|
+
method: string = "GET",
|
|
638
|
+
params: None | string | Mapping | Iterable[tuple[Any, Any]] = None,
|
|
639
|
+
data: Any = None,
|
|
640
|
+
json: Any = None,
|
|
641
|
+
files: None | Mapping[string, Any] | Iterable[tuple[string, Any]] = None,
|
|
642
|
+
headers: None | Mapping[string, string] | Iterable[tuple[string, string]] = None,
|
|
643
|
+
follow_redirects: bool = True,
|
|
644
|
+
raise_for_status: bool = True,
|
|
645
|
+
cookies: None | CookieJar | BaseCookie = None,
|
|
646
|
+
urlopen: None | Callable[..., Response] = None,
|
|
647
|
+
dont_decompress: None | bool = None,
|
|
648
|
+
*,
|
|
649
|
+
parse: None | EllipsisType | bool | Callable[[Response, bytes], T] | Callable[[Response, bytes], Awaitable[T]] | Callable[[Response], T] | Callable[[Response], Awaitable[T]] = None,
|
|
650
|
+
async_: Literal[False, True] = False,
|
|
651
|
+
**request_kwargs,
|
|
652
|
+
) -> Response | bytes | str | dict | list | int | float | bool | None | T | Awaitable[Response | bytes | str | dict | list | int | float | bool | None | T]:
|
|
653
|
+
if callable(urlopen):
|
|
654
|
+
if isgeneratorfunction(urlopen):
|
|
655
|
+
async_ = True
|
|
656
|
+
request_kwargs["urlopen"] = urlopen
|
|
657
|
+
if async_:
|
|
658
|
+
return request_async(
|
|
659
|
+
url=url,
|
|
660
|
+
method=method,
|
|
661
|
+
params=params,
|
|
662
|
+
data=data,
|
|
663
|
+
json=json,
|
|
664
|
+
files=files,
|
|
665
|
+
headers=headers,
|
|
666
|
+
follow_redirects=follow_redirects,
|
|
667
|
+
raise_for_status=raise_for_status,
|
|
668
|
+
cookies=cookies,
|
|
669
|
+
dont_decompress=dont_decompress,
|
|
670
|
+
parse=parse, # type: ignore
|
|
671
|
+
**request_kwargs,
|
|
672
|
+
)
|
|
673
|
+
else:
|
|
674
|
+
return request_sync(
|
|
675
|
+
url=url,
|
|
676
|
+
method=method,
|
|
677
|
+
params=params,
|
|
678
|
+
data=data,
|
|
679
|
+
json=json,
|
|
680
|
+
files=files,
|
|
681
|
+
headers=headers,
|
|
682
|
+
follow_redirects=follow_redirects,
|
|
683
|
+
raise_for_status=raise_for_status,
|
|
684
|
+
cookies=cookies,
|
|
685
|
+
dont_decompress=dont_decompress,
|
|
686
|
+
parse=parse, # type: ignore
|
|
687
|
+
**request_kwargs,
|
|
688
|
+
)
|
|
689
|
+
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "python-http_request"
|
|
3
|
-
version = "0.1.
|
|
4
|
-
description = "Python http
|
|
3
|
+
version = "0.1.6.1"
|
|
4
|
+
description = "Python http request utils."
|
|
5
5
|
authors = ["ChenyangGao <wosiwujm@gmail.com>"]
|
|
6
6
|
license = "MIT"
|
|
7
7
|
readme = "readme.md"
|
|
@@ -29,11 +29,15 @@ include = [
|
|
|
29
29
|
python = "^3.12"
|
|
30
30
|
http_response = ">=0.0.9"
|
|
31
31
|
orjson = "*"
|
|
32
|
+
python-argtools = ">=0.0.2"
|
|
32
33
|
python-asynctools = ">=0.1.3"
|
|
34
|
+
python-cookietools = ">=0.1.4"
|
|
33
35
|
python-dicttools = ">=0.0.4"
|
|
34
36
|
python-ensure = ">=0.0.1"
|
|
35
37
|
python-filewrap = ">=0.2.8"
|
|
36
|
-
python-texttools = ">=0.0.
|
|
38
|
+
python-texttools = ">=0.0.5"
|
|
39
|
+
python-undefined = ">=0.0.3"
|
|
40
|
+
yarl = "*"
|
|
37
41
|
|
|
38
42
|
[build-system]
|
|
39
43
|
requires = ["poetry-core"]
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
# Python http request utils.
|
|
2
|
+
|
|
3
|
+
## Installation
|
|
4
|
+
|
|
5
|
+
You can install from [pypi](https://pypi.org/project/python-http_request/)
|
|
6
|
+
|
|
7
|
+
```console
|
|
8
|
+
pip install -U python-http_request
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Usage
|
|
12
|
+
|
|
13
|
+
```python
|
|
14
|
+
import http_request
|
|
15
|
+
import http_request.extension
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
## Extension
|
|
19
|
+
|
|
20
|
+
I've implemented several modules, all of which provide a ``request`` function. Their signatures are similar, so they can be used as drop-in replacements for each other.
|
|
21
|
+
|
|
22
|
+
1. [aiohttp_client_request](https://pypi.org/project/aiohttp_client_request/)
|
|
23
|
+
1. [aiosonic_request](https://pypi.org/project/aiosonic_request/)
|
|
24
|
+
1. [asks_request](https://pypi.org/project/asks_request/)
|
|
25
|
+
1. [blacksheep_client_request](https://pypi.org/project/blacksheep_client_request/)
|
|
26
|
+
1. [curl_cffi_request](https://pypi.org/project/curl_cffi_request/)
|
|
27
|
+
1. [http_client_request](https://pypi.org/project/http_client_request/)
|
|
28
|
+
1. [httpcore_request](https://pypi.org/project/httpcore_request/)
|
|
29
|
+
1. [httpx_request](https://pypi.org/project/httpx_request/)
|
|
30
|
+
1. [pycurl_request](https://pypi.org/project/pycurl_request/)
|
|
31
|
+
1. [python-urlopen](https://pypi.org/project/python-urlopen/)
|
|
32
|
+
1. [requests_request](https://pypi.org/project/requests_request/)
|
|
33
|
+
1. [tornado_client_request](https://pypi.org/project/tornado_client_request/)
|
|
34
|
+
1. [urllib3_request](https://pypi.org/project/urllib3_request/)
|
|
35
|
+
|
|
36
|
+
To make it more general, I've encapsulated a ``request`` function
|
|
37
|
+
|
|
38
|
+
```python
|
|
39
|
+
from http_request.extension import request
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
You just need to implement a ``urlopen`` function pass to ``request``, then it can be directly extended. The ``urlopen`` function signature is roughly as follows:
|
|
43
|
+
|
|
44
|
+
```python
|
|
45
|
+
def urlopen[Response](
|
|
46
|
+
url: str,
|
|
47
|
+
method: str,
|
|
48
|
+
data=None,
|
|
49
|
+
headers: None | dict[str, str] = None,
|
|
50
|
+
**request_args,
|
|
51
|
+
) -> Response:
|
|
52
|
+
...
|
|
53
|
+
```
|
|
@@ -1,48 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: python-http_request
|
|
3
|
-
Version: 0.1.5
|
|
4
|
-
Summary: Python http response utils.
|
|
5
|
-
Home-page: https://github.com/ChenyangGao/python-modules/tree/main/python-http_request
|
|
6
|
-
License: MIT
|
|
7
|
-
Keywords: http,request
|
|
8
|
-
Author: ChenyangGao
|
|
9
|
-
Author-email: wosiwujm@gmail.com
|
|
10
|
-
Requires-Python: >=3.12,<4.0
|
|
11
|
-
Classifier: Development Status :: 5 - Production/Stable
|
|
12
|
-
Classifier: Intended Audience :: Developers
|
|
13
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
14
|
-
Classifier: Operating System :: OS Independent
|
|
15
|
-
Classifier: Programming Language :: Python
|
|
16
|
-
Classifier: Programming Language :: Python :: 3
|
|
17
|
-
Classifier: Programming Language :: Python :: 3.12
|
|
18
|
-
Classifier: Programming Language :: Python :: 3.13
|
|
19
|
-
Classifier: Programming Language :: Python :: 3 :: Only
|
|
20
|
-
Classifier: Topic :: Software Development
|
|
21
|
-
Classifier: Topic :: Software Development :: Libraries
|
|
22
|
-
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
23
|
-
Requires-Dist: http_response (>=0.0.9)
|
|
24
|
-
Requires-Dist: orjson
|
|
25
|
-
Requires-Dist: python-asynctools (>=0.1.3)
|
|
26
|
-
Requires-Dist: python-dicttools (>=0.0.4)
|
|
27
|
-
Requires-Dist: python-ensure (>=0.0.1)
|
|
28
|
-
Requires-Dist: python-filewrap (>=0.2.8)
|
|
29
|
-
Requires-Dist: python-texttools (>=0.0.4)
|
|
30
|
-
Project-URL: Repository, https://github.com/ChenyangGao/python-modules/tree/main/python-http_request
|
|
31
|
-
Description-Content-Type: text/markdown
|
|
32
|
-
|
|
33
|
-
# Python http response utils.
|
|
34
|
-
|
|
35
|
-
## Installation
|
|
36
|
-
|
|
37
|
-
You can install from [pypi](https://pypi.org/project/python-http_request/)
|
|
38
|
-
|
|
39
|
-
```console
|
|
40
|
-
pip install -U python-http_request
|
|
41
|
-
```
|
|
42
|
-
|
|
43
|
-
## Usage
|
|
44
|
-
|
|
45
|
-
```python
|
|
46
|
-
import http_request
|
|
47
|
-
```
|
|
48
|
-
|
|
File without changes
|
|
File without changes
|