python-http_request 0.0.7.1__tar.gz → 0.0.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {python_http_request-0.0.7.1 → python_http_request-0.0.9}/PKG-INFO +10 -7
- python_http_request-0.0.9/http_request/__init__.py +449 -0
- {python_http_request-0.0.7.1 → python_http_request-0.0.9}/pyproject.toml +10 -7
- python_http_request-0.0.7.1/http_request/__init__.py +0 -273
- {python_http_request-0.0.7.1 → python_http_request-0.0.9}/LICENSE +0 -0
- {python_http_request-0.0.7.1 → python_http_request-0.0.9}/http_request/py.typed +0 -0
- {python_http_request-0.0.7.1 → python_http_request-0.0.9}/readme.md +0 -0
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: python-http_request
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.9
|
|
4
4
|
Summary: Python http response utils.
|
|
5
|
-
Home-page: https://github.com/ChenyangGao/
|
|
5
|
+
Home-page: https://github.com/ChenyangGao/python-modules/tree/main/python-http_request
|
|
6
6
|
License: MIT
|
|
7
7
|
Keywords: http,request
|
|
8
8
|
Author: ChenyangGao
|
|
@@ -20,11 +20,14 @@ Classifier: Programming Language :: Python :: 3 :: Only
|
|
|
20
20
|
Classifier: Topic :: Software Development
|
|
21
21
|
Classifier: Topic :: Software Development :: Libraries
|
|
22
22
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
23
|
-
Requires-Dist:
|
|
24
|
-
Requires-Dist:
|
|
25
|
-
Requires-Dist: python-
|
|
26
|
-
Requires-Dist: python-
|
|
27
|
-
|
|
23
|
+
Requires-Dist: http_response (>=0.0.4)
|
|
24
|
+
Requires-Dist: orjson
|
|
25
|
+
Requires-Dist: python-asynctools (>=0.1.3)
|
|
26
|
+
Requires-Dist: python-dicttools (>=0.0.1)
|
|
27
|
+
Requires-Dist: python-ensure (>=0.0.1)
|
|
28
|
+
Requires-Dist: python-filewrap (>=0.2.8)
|
|
29
|
+
Requires-Dist: python-texttools (>=0.0.4)
|
|
30
|
+
Project-URL: Repository, https://github.com/ChenyangGao/python-modules/tree/main/python-http_request
|
|
28
31
|
Description-Content-Type: text/markdown
|
|
29
32
|
|
|
30
33
|
# Python http response utils.
|
|
@@ -0,0 +1,449 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# encoding: utf-8
|
|
3
|
+
|
|
4
|
+
__author__ = "ChenyangGao <https://chenyanggao.github.io>"
|
|
5
|
+
__version__ = (0, 0, 9)
|
|
6
|
+
__all__ = [
|
|
7
|
+
"SupportsGeturl", "url_origin", "complete_url", "ensure_ascii_url",
|
|
8
|
+
"urlencode", "cookies_str_to_dict", "headers_str_to_dict_by_lines",
|
|
9
|
+
"headers_str_to_dict", "encode_multipart_data", "encode_multipart_data_async",
|
|
10
|
+
"normalize_request_args",
|
|
11
|
+
]
|
|
12
|
+
|
|
13
|
+
from collections import UserString
|
|
14
|
+
from collections.abc import (
|
|
15
|
+
AsyncIterable, AsyncIterator, Buffer, Iterable, Iterator,
|
|
16
|
+
Mapping, Sequence,
|
|
17
|
+
)
|
|
18
|
+
from decimal import Decimal
|
|
19
|
+
from fractions import Fraction
|
|
20
|
+
from io import TextIOWrapper
|
|
21
|
+
from itertools import batched
|
|
22
|
+
from mimetypes import guess_type
|
|
23
|
+
from numbers import Integral, Real
|
|
24
|
+
from os import PathLike
|
|
25
|
+
from os.path import basename
|
|
26
|
+
from re import compile as re_compile, Pattern
|
|
27
|
+
from string import punctuation
|
|
28
|
+
from typing import runtime_checkable, Any, Final, Protocol, TypedDict
|
|
29
|
+
from urllib.parse import quote, urlparse, urlunparse
|
|
30
|
+
from uuid import uuid4
|
|
31
|
+
from yarl import URL
|
|
32
|
+
|
|
33
|
+
from asynctools import async_map
|
|
34
|
+
from dicttools import dict_map, iter_items
|
|
35
|
+
from ensure import ensure_bytes, ensure_buffer, ensure_str
|
|
36
|
+
from filewrap import bio_chunk_iter, bio_chunk_async_iter, SupportsRead
|
|
37
|
+
from http_response import get_charset, get_mimetype
|
|
38
|
+
from orjson import dumps as json_dumps
|
|
39
|
+
from texttools import text_to_dict
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
type string = Buffer | str | UserString
|
|
43
|
+
|
|
44
|
+
QUERY_KEY_TRANSTAB: Final = {k: f"%{k:02X}" for k in b"&="}
|
|
45
|
+
CRE_URL_SCHEME_match: Final = re_compile(r"(?i:[a-z][a-z0-9.+-]*)://").match
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class RequestArgs(TypedDict):
|
|
49
|
+
method: str
|
|
50
|
+
url: str
|
|
51
|
+
data: Buffer | Iterable[Buffer] | AsyncIterable[Buffer]
|
|
52
|
+
headers: dict[str, str]
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@runtime_checkable
|
|
56
|
+
class SupportsGeturl[AnyStr: (bytes, str)](Protocol):
|
|
57
|
+
def geturl(self) -> AnyStr: ...
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def url_origin(url: str, /, default_port: int = 0) -> str:
|
|
61
|
+
if url.startswith("/"):
|
|
62
|
+
url = "http://localhost" + url
|
|
63
|
+
elif url.startswith("//"):
|
|
64
|
+
url = "http:" + url
|
|
65
|
+
elif url.startswith("://"):
|
|
66
|
+
url = "http" + url
|
|
67
|
+
elif not CRE_URL_SCHEME_match(url):
|
|
68
|
+
url = "http://" + url
|
|
69
|
+
urlp = urlparse(url)
|
|
70
|
+
scheme, netloc = urlp.scheme or "http", urlp.netloc or "localhost"
|
|
71
|
+
if default_port and not urlp.port:
|
|
72
|
+
netloc = netloc.removesuffix(":") + f":{default_port}"
|
|
73
|
+
return f"{scheme}://{netloc}"
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def complete_url(url: str, /, default_port: int = 0) -> str:
|
|
77
|
+
if url.startswith("/"):
|
|
78
|
+
url = "http://localhost" + url
|
|
79
|
+
elif url.startswith("//"):
|
|
80
|
+
url = "http:" + url
|
|
81
|
+
elif url.startswith("://"):
|
|
82
|
+
url = "http" + url
|
|
83
|
+
elif not CRE_URL_SCHEME_match(url):
|
|
84
|
+
url = "http://" + url
|
|
85
|
+
urlp = urlparse(url)
|
|
86
|
+
repl = {"query": "", "fragment": ""}
|
|
87
|
+
if not urlp.scheme:
|
|
88
|
+
repl["scheme"] = "http"
|
|
89
|
+
netloc = urlp.netloc
|
|
90
|
+
if not netloc:
|
|
91
|
+
netloc = "localhost"
|
|
92
|
+
if default_port and not urlp.port:
|
|
93
|
+
netloc = netloc.removesuffix(":") + f":{default_port}"
|
|
94
|
+
repl["netloc"] = netloc
|
|
95
|
+
return urlunparse(urlp._replace(**repl)).rstrip("/")
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def ensure_ascii_url(url: str, /) -> str:
|
|
99
|
+
if url.isascii():
|
|
100
|
+
return url
|
|
101
|
+
return quote(url, safe=punctuation)
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def urlencode(
|
|
105
|
+
payload: string | Mapping[Any, Any] | Iterable[tuple[Any, Any]],
|
|
106
|
+
/,
|
|
107
|
+
encoding: str = "utf-8",
|
|
108
|
+
errors: str = "strict",
|
|
109
|
+
) -> str:
|
|
110
|
+
if isinstance(payload, str):
|
|
111
|
+
return payload
|
|
112
|
+
elif isinstance(payload, UserString):
|
|
113
|
+
return str(payload)
|
|
114
|
+
elif isinstance(payload, Buffer):
|
|
115
|
+
return str(payload, encoding, errors)
|
|
116
|
+
def encode_iter(payload: Iterable[tuple[Any, Any]], /) -> Iterator[str]:
|
|
117
|
+
for i, (k, v) in enumerate(payload):
|
|
118
|
+
if i:
|
|
119
|
+
yield "&"
|
|
120
|
+
if isinstance(k, Buffer):
|
|
121
|
+
k = str(k, encoding, errors)
|
|
122
|
+
else:
|
|
123
|
+
k = str(k)
|
|
124
|
+
yield k.translate(QUERY_KEY_TRANSTAB)
|
|
125
|
+
yield "="
|
|
126
|
+
if v is True:
|
|
127
|
+
yield "true"
|
|
128
|
+
elif v is False:
|
|
129
|
+
yield "false"
|
|
130
|
+
elif v is None:
|
|
131
|
+
yield "null"
|
|
132
|
+
elif isinstance(v, (str, UserString)):
|
|
133
|
+
pass
|
|
134
|
+
elif isinstance(v, Buffer):
|
|
135
|
+
v = str(v, encoding, errors)
|
|
136
|
+
elif isinstance(v, (Mapping, Iterable)):
|
|
137
|
+
v = json_dumps(v, default=json_default).decode("utf-8")
|
|
138
|
+
else:
|
|
139
|
+
v = str(v)
|
|
140
|
+
yield v.replace("&", "%26")
|
|
141
|
+
return "".join(encode_iter(iter_items(payload)))
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def cookies_str_to_dict(
|
|
145
|
+
cookies: str,
|
|
146
|
+
/,
|
|
147
|
+
kv_sep: str | Pattern[str] = re_compile(r"\s*=\s*"),
|
|
148
|
+
entry_sep: str | Pattern[str] = re_compile(r"\s*;\s*"),
|
|
149
|
+
) -> dict[str, str]:
|
|
150
|
+
return text_to_dict(cookies.strip(), kv_sep, entry_sep)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def headers_str_to_dict(
|
|
154
|
+
headers: str,
|
|
155
|
+
/,
|
|
156
|
+
kv_sep: str | Pattern[str] = re_compile(r":\s+"),
|
|
157
|
+
entry_sep: str | Pattern[str] = re_compile("\n+"),
|
|
158
|
+
) -> dict[str, str]:
|
|
159
|
+
return text_to_dict(headers.strip(), kv_sep, entry_sep)
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def headers_str_to_dict_by_lines(headers: str, /, ) -> dict[str, str]:
|
|
163
|
+
lines = headers.strip().split("\n")
|
|
164
|
+
if len(lines) & 1:
|
|
165
|
+
lines.append("")
|
|
166
|
+
return dict(batched(lines, 2)) # type: ignore
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def encode_multipart_data(
|
|
170
|
+
data: None | Mapping[Buffer | str, Any] = None,
|
|
171
|
+
files: None | Mapping[Buffer | str, Any] = None,
|
|
172
|
+
boundary: None | str = None,
|
|
173
|
+
file_suffix: str = "",
|
|
174
|
+
) -> tuple[dict, Iterator[Buffer]]:
|
|
175
|
+
if not boundary:
|
|
176
|
+
boundary = uuid4().hex
|
|
177
|
+
boundary_bytes = bytes(boundary, "ascii")
|
|
178
|
+
elif isinstance(boundary, str):
|
|
179
|
+
boundary_bytes = bytes(boundary, "latin-1")
|
|
180
|
+
else:
|
|
181
|
+
boundary_bytes = bytes(boundary)
|
|
182
|
+
boundary = str(boundary_bytes, "latin-1")
|
|
183
|
+
boundary_line = b"--%s\r\n" % boundary_bytes
|
|
184
|
+
suffix = ensure_bytes(file_suffix)
|
|
185
|
+
if suffix and not suffix.startswith(b"."):
|
|
186
|
+
suffix = b"." + suffix
|
|
187
|
+
|
|
188
|
+
def encode_item(name, value, /, is_file=False) -> Iterator[Buffer]:
|
|
189
|
+
headers = {b"content-disposition": b'form-data; name="%s"' % bytes(quote(name), "ascii")}
|
|
190
|
+
filename = b""
|
|
191
|
+
if isinstance(value, (list, tuple)):
|
|
192
|
+
match value:
|
|
193
|
+
case [value]:
|
|
194
|
+
pass
|
|
195
|
+
case [_, value]:
|
|
196
|
+
pass
|
|
197
|
+
case [_, value, file_type]:
|
|
198
|
+
if file_type:
|
|
199
|
+
headers[b"content-type"] = ensure_bytes(file_type)
|
|
200
|
+
case [_, value, file_type, file_headers, *rest]:
|
|
201
|
+
for k, v in iter_items(file_headers):
|
|
202
|
+
headers[ensure_bytes(k).lower()] = ensure_bytes(v)
|
|
203
|
+
if file_type:
|
|
204
|
+
headers[b"content-type"] = ensure_bytes(file_type)
|
|
205
|
+
if isinstance(value, (PathLike, SupportsRead)):
|
|
206
|
+
is_file = True
|
|
207
|
+
if isinstance(value, PathLike):
|
|
208
|
+
file: SupportsRead[Buffer] = open(value, "rb")
|
|
209
|
+
elif isinstance(value, TextIOWrapper):
|
|
210
|
+
file = value.buffer
|
|
211
|
+
else:
|
|
212
|
+
file = value
|
|
213
|
+
value = bio_chunk_iter(file)
|
|
214
|
+
if not filename:
|
|
215
|
+
filename = ensure_bytes(basename(getattr(file, "name", b"") or b""))
|
|
216
|
+
elif isinstance(value, Buffer):
|
|
217
|
+
pass
|
|
218
|
+
elif isinstance(value, (str, UserString)):
|
|
219
|
+
value = ensure_bytes(value)
|
|
220
|
+
elif isinstance(value, Iterable):
|
|
221
|
+
value = map(ensure_buffer, value)
|
|
222
|
+
else:
|
|
223
|
+
value = ensure_buffer(value)
|
|
224
|
+
if is_file:
|
|
225
|
+
if filename:
|
|
226
|
+
filename = bytes(quote(filename), "ascii")
|
|
227
|
+
if suffix and not filename.endswith(suffix):
|
|
228
|
+
filename += suffix
|
|
229
|
+
else:
|
|
230
|
+
filename = bytes(uuid4().hex, "ascii") + suffix
|
|
231
|
+
if b"content-type" not in headers:
|
|
232
|
+
headers[b"content-type"] = ensure_bytes(
|
|
233
|
+
guess_type(str(filename, "latin-1"))[0] or b"application/octet-stream")
|
|
234
|
+
headers[b"content-disposition"] += b'; filename="%s"' % filename
|
|
235
|
+
yield boundary_line
|
|
236
|
+
for entry in headers.items():
|
|
237
|
+
yield b"%s: %s\r\n" % entry
|
|
238
|
+
yield b"\r\n"
|
|
239
|
+
if isinstance(value, Buffer):
|
|
240
|
+
yield value
|
|
241
|
+
else:
|
|
242
|
+
yield from value
|
|
243
|
+
|
|
244
|
+
def encode_iter() -> Iterator[Buffer]:
|
|
245
|
+
if data:
|
|
246
|
+
for name, value in iter_items(data):
|
|
247
|
+
yield boundary_line
|
|
248
|
+
yield from encode_item(name, value)
|
|
249
|
+
yield b"\r\n"
|
|
250
|
+
if files:
|
|
251
|
+
for name, value in iter_items(files):
|
|
252
|
+
yield boundary_line
|
|
253
|
+
yield from encode_item(name, value, is_file=True)
|
|
254
|
+
yield b"\r\n"
|
|
255
|
+
yield b'--%s--\r\n' % boundary_bytes
|
|
256
|
+
|
|
257
|
+
return {"content-type": "multipart/form-data; boundary="+boundary}, encode_iter()
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def encode_multipart_data_async(
|
|
261
|
+
data: None | Mapping[Buffer | str, Any] = None,
|
|
262
|
+
files: None | Mapping[Buffer | str, Any] = None,
|
|
263
|
+
boundary: None | str = None,
|
|
264
|
+
file_suffix: str = "",
|
|
265
|
+
) -> tuple[dict, AsyncIterator[Buffer]]:
|
|
266
|
+
if not boundary:
|
|
267
|
+
boundary = uuid4().hex
|
|
268
|
+
boundary_bytes = bytes(boundary, "ascii")
|
|
269
|
+
elif isinstance(boundary, str):
|
|
270
|
+
boundary_bytes = bytes(boundary, "latin-1")
|
|
271
|
+
else:
|
|
272
|
+
boundary_bytes = bytes(boundary)
|
|
273
|
+
boundary = str(boundary_bytes, "latin-1")
|
|
274
|
+
boundary_line = b"--%s\r\n" % boundary_bytes
|
|
275
|
+
suffix = ensure_bytes(file_suffix)
|
|
276
|
+
if suffix and not suffix.startswith(b"."):
|
|
277
|
+
suffix = b"." + suffix
|
|
278
|
+
|
|
279
|
+
async def encode_item(name, value, /, is_file=False) -> AsyncIterator[Buffer]:
|
|
280
|
+
headers = {b"content-disposition": b'form-data; name="%s"' % bytes(quote(name), "ascii")}
|
|
281
|
+
filename = b""
|
|
282
|
+
if isinstance(value, (list, tuple)):
|
|
283
|
+
match value:
|
|
284
|
+
case [value]:
|
|
285
|
+
pass
|
|
286
|
+
case [_, value]:
|
|
287
|
+
pass
|
|
288
|
+
case [_, value, file_type]:
|
|
289
|
+
if file_type:
|
|
290
|
+
headers[b"content-type"] = ensure_bytes(file_type)
|
|
291
|
+
case [_, value, file_type, file_headers, *rest]:
|
|
292
|
+
for k, v in iter_items(file_headers):
|
|
293
|
+
headers[ensure_bytes(k).lower()] = ensure_bytes(v)
|
|
294
|
+
if file_type:
|
|
295
|
+
headers[b"content-type"] = ensure_bytes(file_type)
|
|
296
|
+
if isinstance(value, (PathLike, SupportsRead)):
|
|
297
|
+
is_file = True
|
|
298
|
+
if isinstance(value, PathLike):
|
|
299
|
+
file: SupportsRead[Buffer] = open(value, "rb")
|
|
300
|
+
elif isinstance(value, TextIOWrapper):
|
|
301
|
+
file = value.buffer
|
|
302
|
+
else:
|
|
303
|
+
file = value
|
|
304
|
+
value = bio_chunk_async_iter(file)
|
|
305
|
+
if not filename:
|
|
306
|
+
filename = ensure_bytes(basename(getattr(file, "name", b"") or b""))
|
|
307
|
+
elif isinstance(value, Buffer):
|
|
308
|
+
pass
|
|
309
|
+
elif isinstance(value, (str, UserString)):
|
|
310
|
+
value = ensure_bytes(value)
|
|
311
|
+
elif isinstance(value, Iterable):
|
|
312
|
+
value = async_map(ensure_buffer, value)
|
|
313
|
+
else:
|
|
314
|
+
value = ensure_buffer(value)
|
|
315
|
+
if is_file:
|
|
316
|
+
if filename:
|
|
317
|
+
filename = bytes(quote(filename), "ascii")
|
|
318
|
+
if suffix and not filename.endswith(suffix):
|
|
319
|
+
filename += suffix
|
|
320
|
+
else:
|
|
321
|
+
filename = bytes(uuid4().hex, "ascii") + suffix
|
|
322
|
+
if b"content-type" not in headers:
|
|
323
|
+
headers[b"content-type"] = ensure_bytes(
|
|
324
|
+
guess_type(str(filename, "latin-1"))[0] or b"application/octet-stream")
|
|
325
|
+
headers[b"content-disposition"] += b'; filename="%s"' % filename
|
|
326
|
+
yield boundary_line
|
|
327
|
+
for entry in headers.items():
|
|
328
|
+
yield b"%s: %s\r\n" % entry
|
|
329
|
+
yield b"\r\n"
|
|
330
|
+
if isinstance(value, Buffer):
|
|
331
|
+
yield value
|
|
332
|
+
elif isinstance(value, AsyncIterable):
|
|
333
|
+
async for line in value:
|
|
334
|
+
yield line
|
|
335
|
+
else:
|
|
336
|
+
for line in value:
|
|
337
|
+
yield line
|
|
338
|
+
|
|
339
|
+
async def encode_iter() -> AsyncIterator[Buffer]:
|
|
340
|
+
if data:
|
|
341
|
+
for name, value in iter_items(data):
|
|
342
|
+
yield boundary_line
|
|
343
|
+
async for line in encode_item(name, value):
|
|
344
|
+
yield line
|
|
345
|
+
yield b"\r\n"
|
|
346
|
+
if files:
|
|
347
|
+
for name, value in iter_items(files):
|
|
348
|
+
yield boundary_line
|
|
349
|
+
async for line in encode_item(name, value, is_file=True):
|
|
350
|
+
yield line
|
|
351
|
+
yield b"\r\n"
|
|
352
|
+
yield b'--%s--\r\n' % boundary_bytes
|
|
353
|
+
|
|
354
|
+
return {"content-type": "multipart/form-data; boundary="+boundary}, encode_iter()
|
|
355
|
+
|
|
356
|
+
|
|
357
|
+
def json_default(o, /):
|
|
358
|
+
if isinstance(o, Mapping):
|
|
359
|
+
return dict(o)
|
|
360
|
+
elif isinstance(o, Buffer):
|
|
361
|
+
return ensure_str(o)
|
|
362
|
+
elif isinstance(o, UserString):
|
|
363
|
+
return str(o)
|
|
364
|
+
elif isinstance(o, Integral):
|
|
365
|
+
return int(o)
|
|
366
|
+
elif isinstance(o, (Real, Fraction, Decimal)):
|
|
367
|
+
try:
|
|
368
|
+
return float(o)
|
|
369
|
+
except Exception:
|
|
370
|
+
return str(o)
|
|
371
|
+
elif isinstance(o, (Iterator, Sequence)):
|
|
372
|
+
return list(o)
|
|
373
|
+
else:
|
|
374
|
+
return str(o)
|
|
375
|
+
|
|
376
|
+
|
|
377
|
+
def normalize_request_args(
|
|
378
|
+
method: string,
|
|
379
|
+
url: string | SupportsGeturl | URL,
|
|
380
|
+
params: Any = None,
|
|
381
|
+
data: Any = None,
|
|
382
|
+
json: Any = None,
|
|
383
|
+
headers: None | Mapping[string, Any] | Iterable[tuple[string, Any]] = None,
|
|
384
|
+
ensure_ascii: bool = False,
|
|
385
|
+
) -> RequestArgs:
|
|
386
|
+
method = ensure_str(method).upper()
|
|
387
|
+
if isinstance(url, SupportsGeturl):
|
|
388
|
+
url = url.geturl()
|
|
389
|
+
elif isinstance(url, URL):
|
|
390
|
+
url = str(url)
|
|
391
|
+
url = complete_url(ensure_str(url))
|
|
392
|
+
if params and (params := urlencode(params)):
|
|
393
|
+
urlp = urlparse(url)
|
|
394
|
+
if query := urlp.query:
|
|
395
|
+
params = query + "&" + params
|
|
396
|
+
url = urlunparse(urlp._replace(query=params))
|
|
397
|
+
if ensure_ascii:
|
|
398
|
+
url = ensure_ascii_url(url)
|
|
399
|
+
headers_ = dict_map(
|
|
400
|
+
headers or (),
|
|
401
|
+
key=lambda k: ensure_str(k).lower(),
|
|
402
|
+
value=ensure_str,
|
|
403
|
+
)
|
|
404
|
+
content_type = headers_.get("content-type", "")
|
|
405
|
+
charset = get_charset(content_type)
|
|
406
|
+
mimetype = get_mimetype(charset).lower()
|
|
407
|
+
if data is not None:
|
|
408
|
+
if isinstance(data, Buffer):
|
|
409
|
+
pass
|
|
410
|
+
elif isinstance(data, (str, UserString)):
|
|
411
|
+
data = data.encode(charset)
|
|
412
|
+
elif isinstance(data, AsyncIterable):
|
|
413
|
+
data = async_map(ensure_buffer, data)
|
|
414
|
+
elif isinstance(data, Iterator):
|
|
415
|
+
data = map(ensure_buffer, data)
|
|
416
|
+
elif mimetype == "application/json":
|
|
417
|
+
if charset == "utf-8":
|
|
418
|
+
data = json_dumps(data, default=json_default)
|
|
419
|
+
else:
|
|
420
|
+
from json import dumps
|
|
421
|
+
data = dumps(data, default=json_default).encode(charset)
|
|
422
|
+
elif isinstance(data, (Mapping, Sequence)):
|
|
423
|
+
if data:
|
|
424
|
+
data = urlencode(data, charset).encode(charset)
|
|
425
|
+
if mimetype != "application/x-www-form-urlencoded":
|
|
426
|
+
headers_["content-type"] = "application/x-www-form-urlencoded"
|
|
427
|
+
else:
|
|
428
|
+
data = str(data).encode(charset)
|
|
429
|
+
elif json is not None:
|
|
430
|
+
if isinstance(json, Buffer):
|
|
431
|
+
data = json
|
|
432
|
+
elif isinstance(data, AsyncIterable):
|
|
433
|
+
data = async_map(ensure_buffer, data)
|
|
434
|
+
if charset == "utf-8":
|
|
435
|
+
data = json_dumps(data, default=json_default)
|
|
436
|
+
else:
|
|
437
|
+
from json import dumps
|
|
438
|
+
data = dumps(data, default=json_default).encode(charset)
|
|
439
|
+
if mimetype != "application/json":
|
|
440
|
+
headers_["content-type"] = "application/json; charset=" + charset
|
|
441
|
+
elif mimetype == "application/json":
|
|
442
|
+
data = b"null"
|
|
443
|
+
return {
|
|
444
|
+
"url": url,
|
|
445
|
+
"method": method,
|
|
446
|
+
"data": data,
|
|
447
|
+
"headers": headers_
|
|
448
|
+
}
|
|
449
|
+
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "python-http_request"
|
|
3
|
-
version = "0.0.
|
|
3
|
+
version = "0.0.9"
|
|
4
4
|
description = "Python http response utils."
|
|
5
5
|
authors = ["ChenyangGao <wosiwujm@gmail.com>"]
|
|
6
6
|
license = "MIT"
|
|
7
7
|
readme = "readme.md"
|
|
8
|
-
homepage = "https://github.com/ChenyangGao/
|
|
9
|
-
repository = "https://github.com/ChenyangGao/
|
|
8
|
+
homepage = "https://github.com/ChenyangGao/python-modules/tree/main/python-http_request"
|
|
9
|
+
repository = "https://github.com/ChenyangGao/python-modules/tree/main/python-http_request"
|
|
10
10
|
keywords = ["http", "request"]
|
|
11
11
|
classifiers = [
|
|
12
12
|
"License :: OSI Approved :: MIT License",
|
|
@@ -27,10 +27,13 @@ include = [
|
|
|
27
27
|
|
|
28
28
|
[tool.poetry.dependencies]
|
|
29
29
|
python = "^3.12"
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
python-
|
|
33
|
-
|
|
30
|
+
http_response = ">=0.0.4"
|
|
31
|
+
orjson = "*"
|
|
32
|
+
python-asynctools = ">=0.1.3"
|
|
33
|
+
python-dicttools = ">=0.0.1"
|
|
34
|
+
python-ensure = ">=0.0.1"
|
|
35
|
+
python-filewrap = ">=0.2.8"
|
|
36
|
+
python-texttools = ">=0.0.4"
|
|
34
37
|
|
|
35
38
|
[build-system]
|
|
36
39
|
requires = ["poetry-core"]
|
|
@@ -1,273 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
# encoding: utf-8
|
|
3
|
-
|
|
4
|
-
__author__ = "ChenyangGao <https://chenyanggao.github.io>"
|
|
5
|
-
__version__ = (0, 0, 7)
|
|
6
|
-
__all__ = [
|
|
7
|
-
"SupportsGeturl", "url_origin", "complete_url", "cookies_str_to_dict", "headers_str_to_dict",
|
|
8
|
-
"encode_multipart_data", "encode_multipart_data_async",
|
|
9
|
-
]
|
|
10
|
-
|
|
11
|
-
from collections.abc import AsyncIterable, AsyncIterator, Buffer, ItemsView, Iterable, Iterator, Mapping
|
|
12
|
-
from itertools import chain
|
|
13
|
-
from mimetypes import guess_type
|
|
14
|
-
from os import fsdecode
|
|
15
|
-
from os.path import basename
|
|
16
|
-
from re import compile as re_compile, Pattern
|
|
17
|
-
from typing import runtime_checkable, Any, Final, Protocol, TypeVar
|
|
18
|
-
from urllib.parse import quote, urlsplit, urlunsplit
|
|
19
|
-
from uuid import uuid4
|
|
20
|
-
|
|
21
|
-
from asynctools import ensure_aiter, async_chain
|
|
22
|
-
from filewrap import bio_chunk_iter, bio_chunk_async_iter, SupportsRead
|
|
23
|
-
from integer_tool import int_to_bytes
|
|
24
|
-
from texttools import text_to_dict
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
AnyStr = TypeVar("AnyStr", bytes, str, covariant=True)
|
|
28
|
-
|
|
29
|
-
CRE_URL_SCHEME_match: Final = re_compile(r"(?i:[a-z][a-z0-9.+-]*)://").match
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
@runtime_checkable
|
|
33
|
-
class SupportsGeturl(Protocol[AnyStr]):
|
|
34
|
-
def geturl(self) -> AnyStr: ...
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
def url_origin(url: str, /, default_port: int = 0) -> str:
|
|
38
|
-
if url.startswith("/"):
|
|
39
|
-
url = "http://localhost" + url
|
|
40
|
-
elif url.startswith("//"):
|
|
41
|
-
url = "http:" + url
|
|
42
|
-
elif url.startswith("://"):
|
|
43
|
-
url = "http" + url
|
|
44
|
-
elif not CRE_URL_SCHEME_match(url):
|
|
45
|
-
url = "http://" + url
|
|
46
|
-
urlp = urlsplit(url)
|
|
47
|
-
scheme, netloc = urlp.scheme or "http", urlp.netloc or "localhost"
|
|
48
|
-
if default_port and not urlp.port:
|
|
49
|
-
netloc = netloc.removesuffix(":") + f":{default_port}"
|
|
50
|
-
return f"{scheme}://{netloc}"
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
def complete_url(url: str, /, default_port: int = 0) -> str:
|
|
54
|
-
if url.startswith("/"):
|
|
55
|
-
url = "http://localhost" + url
|
|
56
|
-
elif url.startswith("//"):
|
|
57
|
-
url = "http:" + url
|
|
58
|
-
elif url.startswith("://"):
|
|
59
|
-
url = "http" + url
|
|
60
|
-
elif not CRE_URL_SCHEME_match(url):
|
|
61
|
-
url = "http://" + url
|
|
62
|
-
urlp = urlsplit(url)
|
|
63
|
-
repl = {"query": "", "fragment": ""}
|
|
64
|
-
if not urlp.scheme:
|
|
65
|
-
repl["scheme"] = "http"
|
|
66
|
-
netloc = urlp.netloc
|
|
67
|
-
if not netloc:
|
|
68
|
-
netloc = "localhost"
|
|
69
|
-
if default_port and not urlp.port:
|
|
70
|
-
netloc = netloc.removesuffix(":") + f":{default_port}"
|
|
71
|
-
repl["netloc"] = netloc
|
|
72
|
-
return urlunsplit(urlp._replace(**repl)).rstrip("/")
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
def cookies_str_to_dict(
|
|
76
|
-
cookies: str,
|
|
77
|
-
/,
|
|
78
|
-
kv_sep: str | Pattern[str] = re_compile(r"\s*=\s*"),
|
|
79
|
-
entry_sep: str | Pattern[str] = re_compile(r"\s*;\s*"),
|
|
80
|
-
) -> dict[str, str]:
|
|
81
|
-
return text_to_dict(cookies.strip(), kv_sep, entry_sep)
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
def headers_str_to_dict(
|
|
85
|
-
headers: str,
|
|
86
|
-
/,
|
|
87
|
-
kv_sep: str | Pattern[str] = re_compile(r":\s+"),
|
|
88
|
-
entry_sep: str | Pattern[str] = re_compile("\n+"),
|
|
89
|
-
) -> dict[str, str]:
|
|
90
|
-
return text_to_dict(headers.strip(), kv_sep, entry_sep)
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
def ensure_bytes(s, /) -> bytes:
|
|
94
|
-
if isinstance(s, bytes):
|
|
95
|
-
return s
|
|
96
|
-
elif isinstance(s, memoryview):
|
|
97
|
-
return s.tobytes()
|
|
98
|
-
elif isinstance(s, Buffer):
|
|
99
|
-
return bytes(s)
|
|
100
|
-
if isinstance(s, int):
|
|
101
|
-
return int_to_bytes(s)
|
|
102
|
-
elif isinstance(s, str):
|
|
103
|
-
return bytes(s, "utf-8")
|
|
104
|
-
try:
|
|
105
|
-
return bytes(s)
|
|
106
|
-
except TypeError:
|
|
107
|
-
return bytes(str(s), "utf-8")
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
def encode_multipart_data(
|
|
111
|
-
data: None | Mapping[str, Any] = None,
|
|
112
|
-
files: None | Mapping[str, Buffer | SupportsRead[Buffer] | Iterable[Buffer]] = None,
|
|
113
|
-
boundary: None | str = None,
|
|
114
|
-
file_suffix: str = "",
|
|
115
|
-
) -> tuple[dict, Iterator[Buffer]]:
|
|
116
|
-
if not boundary:
|
|
117
|
-
boundary = uuid4().hex
|
|
118
|
-
suffix = bytes(file_suffix, "ascii")
|
|
119
|
-
if suffix and not suffix.startswith(b"."):
|
|
120
|
-
suffix = b"." + suffix
|
|
121
|
-
headers = {"Content-Type": f"multipart/form-data; boundary={boundary}"}
|
|
122
|
-
|
|
123
|
-
def encode_data(data) -> Iterator[Buffer]:
|
|
124
|
-
if not data:
|
|
125
|
-
return
|
|
126
|
-
if isinstance(data, Mapping):
|
|
127
|
-
data = ItemsView(data)
|
|
128
|
-
for name, value in data:
|
|
129
|
-
yield boundary_line
|
|
130
|
-
yield b'Content-Disposition: form-data; name="%s"\r\n\r\n' % bytes(quote(name), "ascii")
|
|
131
|
-
yield ensure_bytes(value)
|
|
132
|
-
yield b"\r\n"
|
|
133
|
-
|
|
134
|
-
def encode_files(files) -> Iterator[Buffer]:
|
|
135
|
-
if not files:
|
|
136
|
-
return
|
|
137
|
-
if isinstance(files, Mapping):
|
|
138
|
-
files = ItemsView(files)
|
|
139
|
-
for name, file in files:
|
|
140
|
-
headers: dict[bytes, bytes] = {b"Content-Disposition": b'form-data; name="%s"' % quote(name).encode("ascii")}
|
|
141
|
-
filename: bytes | str = ""
|
|
142
|
-
if isinstance(file, (list, tuple)):
|
|
143
|
-
match file:
|
|
144
|
-
case [file]:
|
|
145
|
-
pass
|
|
146
|
-
case [file_name, file]:
|
|
147
|
-
pass
|
|
148
|
-
case [file_name, file, file_type]:
|
|
149
|
-
if file_type:
|
|
150
|
-
headers[b"Content-Type"] = ensure_bytes(file_type)
|
|
151
|
-
case [file_name, file, file_type, file_headers, *rest]:
|
|
152
|
-
if isinstance(file_headers, Mapping):
|
|
153
|
-
file_headers = ItemsView(file_headers)
|
|
154
|
-
for k, v in file_headers:
|
|
155
|
-
headers[ensure_bytes(k).title()] = ensure_bytes(v)
|
|
156
|
-
if file_type:
|
|
157
|
-
headers[b"Content-Type"] = ensure_bytes(file_type)
|
|
158
|
-
if isinstance(file, Buffer):
|
|
159
|
-
pass
|
|
160
|
-
elif isinstance(file, str):
|
|
161
|
-
file = file.encode("utf-8")
|
|
162
|
-
elif hasattr(file, "read"):
|
|
163
|
-
file = bio_chunk_iter(file)
|
|
164
|
-
if not filename:
|
|
165
|
-
path = getattr(file, "name", None)
|
|
166
|
-
if path:
|
|
167
|
-
filename = basename(path)
|
|
168
|
-
if b"Content-Type" not in headers:
|
|
169
|
-
headers[b"Content-Type"] = ensure_bytes(guess_type(fsdecode(filename))[0] or b"application/octet-stream")
|
|
170
|
-
if filename:
|
|
171
|
-
name = bytes(quote(filename), "ascii")
|
|
172
|
-
if not name.endswith(suffix):
|
|
173
|
-
name += suffix
|
|
174
|
-
headers[b"Content-Disposition"] += b'; filename="%s"' % name
|
|
175
|
-
else:
|
|
176
|
-
headers[b"Content-Disposition"] += b'; filename="%032x%s"' % (uuid4().int, suffix)
|
|
177
|
-
yield boundary_line
|
|
178
|
-
for entry in headers.items():
|
|
179
|
-
yield b"%s: %s\r\n" % entry
|
|
180
|
-
yield b"\r\n"
|
|
181
|
-
if isinstance(file, Buffer):
|
|
182
|
-
yield file
|
|
183
|
-
else:
|
|
184
|
-
yield from file
|
|
185
|
-
yield b"\r\n"
|
|
186
|
-
|
|
187
|
-
boundary_line = b"--%s\r\n" % boundary.encode("utf-8")
|
|
188
|
-
return headers, chain(encode_data(data), encode_files(files), (b'--%s--\r\n' % boundary.encode("ascii"),))
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
def encode_multipart_data_async(
|
|
192
|
-
data: None | Mapping[str, Any] = None,
|
|
193
|
-
files: None | Mapping[str, Buffer | SupportsRead[Buffer] | Iterable[Buffer] | AsyncIterable[Buffer]] = None,
|
|
194
|
-
boundary: None | str = None,
|
|
195
|
-
file_suffix: str = "",
|
|
196
|
-
) -> tuple[dict, AsyncIterator[Buffer]]:
|
|
197
|
-
if not boundary:
|
|
198
|
-
boundary = uuid4().hex
|
|
199
|
-
suffix = bytes(file_suffix, "ascii")
|
|
200
|
-
if suffix and not suffix.startswith(b"."):
|
|
201
|
-
suffix = b"." + suffix
|
|
202
|
-
headers = {"Content-Type": f"multipart/form-data; boundary={boundary}"}
|
|
203
|
-
|
|
204
|
-
async def encode_data(data) -> AsyncIterator[Buffer]:
|
|
205
|
-
if not data:
|
|
206
|
-
return
|
|
207
|
-
if isinstance(data, Mapping):
|
|
208
|
-
data = ItemsView(data)
|
|
209
|
-
for name, value in data:
|
|
210
|
-
yield boundary_line
|
|
211
|
-
yield b'Content-Disposition: form-data; name="%s"\r\n\r\n' % bytes(quote(name), "ascii")
|
|
212
|
-
yield ensure_bytes(value)
|
|
213
|
-
yield b"\r\n"
|
|
214
|
-
|
|
215
|
-
async def encode_files(files) -> AsyncIterator[Buffer]:
|
|
216
|
-
if not files:
|
|
217
|
-
return
|
|
218
|
-
if isinstance(files, Mapping):
|
|
219
|
-
files = ItemsView(files)
|
|
220
|
-
for name, file in files:
|
|
221
|
-
headers: dict[bytes, bytes] = {b"Content-Disposition": b'form-data; name="%s"' % quote(name).encode("ascii")}
|
|
222
|
-
filename: bytes | str = ""
|
|
223
|
-
if isinstance(file, (list, tuple)):
|
|
224
|
-
match file:
|
|
225
|
-
case [file]:
|
|
226
|
-
pass
|
|
227
|
-
case [file_name, file]:
|
|
228
|
-
pass
|
|
229
|
-
case [file_name, file, file_type]:
|
|
230
|
-
if file_type:
|
|
231
|
-
headers[b"Content-Type"] = ensure_bytes(file_type)
|
|
232
|
-
case [file_name, file, file_type, file_headers, *rest]:
|
|
233
|
-
if isinstance(file_headers, Mapping):
|
|
234
|
-
file_headers = ItemsView(file_headers)
|
|
235
|
-
for k, v in file_headers:
|
|
236
|
-
headers[ensure_bytes(k).title()] = ensure_bytes(v)
|
|
237
|
-
if file_type:
|
|
238
|
-
headers[b"Content-Type"] = ensure_bytes(file_type)
|
|
239
|
-
if isinstance(file, Buffer):
|
|
240
|
-
pass
|
|
241
|
-
elif isinstance(file, str):
|
|
242
|
-
file = file.encode("utf-8")
|
|
243
|
-
elif hasattr(file, "read"):
|
|
244
|
-
file = bio_chunk_async_iter(file)
|
|
245
|
-
if not filename:
|
|
246
|
-
path = getattr(file, "name", None)
|
|
247
|
-
if path:
|
|
248
|
-
filename = basename(path)
|
|
249
|
-
if b"Content-Type" not in headers:
|
|
250
|
-
headers[b"Content-Type"] = ensure_bytes(guess_type(fsdecode(filename))[0] or b"application/octet-stream")
|
|
251
|
-
else:
|
|
252
|
-
file = ensure_aiter(file)
|
|
253
|
-
if filename:
|
|
254
|
-
name = bytes(quote(filename), "ascii")
|
|
255
|
-
if not name.endswith(suffix):
|
|
256
|
-
name += suffix
|
|
257
|
-
headers[b"Content-Disposition"] += b'; filename="%s"' % name
|
|
258
|
-
else:
|
|
259
|
-
headers[b"Content-Disposition"] += b'; filename="%032x%s"' % (uuid4().int, suffix)
|
|
260
|
-
yield boundary_line
|
|
261
|
-
for entry in headers.items():
|
|
262
|
-
yield b"%s: %s\r\n" % entry
|
|
263
|
-
yield b"\r\n"
|
|
264
|
-
if isinstance(file, Buffer):
|
|
265
|
-
yield file
|
|
266
|
-
else:
|
|
267
|
-
async for chunk in file:
|
|
268
|
-
yield chunk
|
|
269
|
-
yield b"\r\n"
|
|
270
|
-
|
|
271
|
-
boundary_line = b"--%s\r\n" % boundary.encode("utf-8")
|
|
272
|
-
return headers, async_chain(encode_data(data), encode_files(files), (b'--%s--\r\n' % boundary.encode("ascii"),))
|
|
273
|
-
|
|
File without changes
|
|
File without changes
|
|
File without changes
|