python-http_request 0.0.5.3__tar.gz → 0.0.7.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {python_http_request-0.0.5.3 → python_http_request-0.0.7.1}/PKG-INFO +7 -8
- python_http_request-0.0.7.1/http_request/__init__.py +273 -0
- {python_http_request-0.0.5.3 → python_http_request-0.0.7.1}/pyproject.toml +7 -7
- python_http_request-0.0.5.3/http_request/__init__.py +0 -169
- {python_http_request-0.0.5.3 → python_http_request-0.0.7.1}/LICENSE +0 -0
- {python_http_request-0.0.5.3 → python_http_request-0.0.7.1}/http_request/py.typed +0 -0
- {python_http_request-0.0.5.3 → python_http_request-0.0.7.1}/readme.md +0 -0
|
@@ -1,30 +1,29 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: python-http_request
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.7.1
|
|
4
4
|
Summary: Python http response utils.
|
|
5
5
|
Home-page: https://github.com/ChenyangGao/web-mount-packs/tree/main/python-module/python-http_request
|
|
6
6
|
License: MIT
|
|
7
7
|
Keywords: http,request
|
|
8
8
|
Author: ChenyangGao
|
|
9
9
|
Author-email: wosiwujm@gmail.com
|
|
10
|
-
Requires-Python: >=3.
|
|
10
|
+
Requires-Python: >=3.12,<4.0
|
|
11
11
|
Classifier: Development Status :: 5 - Production/Stable
|
|
12
12
|
Classifier: Intended Audience :: Developers
|
|
13
13
|
Classifier: License :: OSI Approved :: MIT License
|
|
14
14
|
Classifier: Operating System :: OS Independent
|
|
15
15
|
Classifier: Programming Language :: Python
|
|
16
16
|
Classifier: Programming Language :: Python :: 3
|
|
17
|
-
Classifier: Programming Language :: Python :: 3.10
|
|
18
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
19
17
|
Classifier: Programming Language :: Python :: 3.12
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
20
19
|
Classifier: Programming Language :: Python :: 3 :: Only
|
|
21
20
|
Classifier: Topic :: Software Development
|
|
22
21
|
Classifier: Topic :: Software Development :: Libraries
|
|
23
22
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
24
|
-
Requires-Dist: integer_tool
|
|
25
|
-
Requires-Dist: python-asynctools
|
|
26
|
-
Requires-Dist: python-filewrap (>=0.
|
|
27
|
-
Requires-Dist: python-texttools
|
|
23
|
+
Requires-Dist: integer_tool (>=0.0.2)
|
|
24
|
+
Requires-Dist: python-asynctools (>=0.0.10)
|
|
25
|
+
Requires-Dist: python-filewrap (>=0.2.6)
|
|
26
|
+
Requires-Dist: python-texttools (>=0.0.3)
|
|
28
27
|
Project-URL: Repository, https://github.com/ChenyangGao/web-mount-packs/tree/main/python-module/python-http_request
|
|
29
28
|
Description-Content-Type: text/markdown
|
|
30
29
|
|
|
@@ -0,0 +1,273 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# encoding: utf-8
|
|
3
|
+
|
|
4
|
+
__author__ = "ChenyangGao <https://chenyanggao.github.io>"
|
|
5
|
+
__version__ = (0, 0, 7)
|
|
6
|
+
__all__ = [
|
|
7
|
+
"SupportsGeturl", "url_origin", "complete_url", "cookies_str_to_dict", "headers_str_to_dict",
|
|
8
|
+
"encode_multipart_data", "encode_multipart_data_async",
|
|
9
|
+
]
|
|
10
|
+
|
|
11
|
+
from collections.abc import AsyncIterable, AsyncIterator, Buffer, ItemsView, Iterable, Iterator, Mapping
|
|
12
|
+
from itertools import chain
|
|
13
|
+
from mimetypes import guess_type
|
|
14
|
+
from os import fsdecode
|
|
15
|
+
from os.path import basename
|
|
16
|
+
from re import compile as re_compile, Pattern
|
|
17
|
+
from typing import runtime_checkable, Any, Final, Protocol, TypeVar
|
|
18
|
+
from urllib.parse import quote, urlsplit, urlunsplit
|
|
19
|
+
from uuid import uuid4
|
|
20
|
+
|
|
21
|
+
from asynctools import ensure_aiter, async_chain
|
|
22
|
+
from filewrap import bio_chunk_iter, bio_chunk_async_iter, SupportsRead
|
|
23
|
+
from integer_tool import int_to_bytes
|
|
24
|
+
from texttools import text_to_dict
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
AnyStr = TypeVar("AnyStr", bytes, str, covariant=True)
|
|
28
|
+
|
|
29
|
+
CRE_URL_SCHEME_match: Final = re_compile(r"(?i:[a-z][a-z0-9.+-]*)://").match
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@runtime_checkable
|
|
33
|
+
class SupportsGeturl(Protocol[AnyStr]):
|
|
34
|
+
def geturl(self) -> AnyStr: ...
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def url_origin(url: str, /, default_port: int = 0) -> str:
|
|
38
|
+
if url.startswith("/"):
|
|
39
|
+
url = "http://localhost" + url
|
|
40
|
+
elif url.startswith("//"):
|
|
41
|
+
url = "http:" + url
|
|
42
|
+
elif url.startswith("://"):
|
|
43
|
+
url = "http" + url
|
|
44
|
+
elif not CRE_URL_SCHEME_match(url):
|
|
45
|
+
url = "http://" + url
|
|
46
|
+
urlp = urlsplit(url)
|
|
47
|
+
scheme, netloc = urlp.scheme or "http", urlp.netloc or "localhost"
|
|
48
|
+
if default_port and not urlp.port:
|
|
49
|
+
netloc = netloc.removesuffix(":") + f":{default_port}"
|
|
50
|
+
return f"{scheme}://{netloc}"
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def complete_url(url: str, /, default_port: int = 0) -> str:
|
|
54
|
+
if url.startswith("/"):
|
|
55
|
+
url = "http://localhost" + url
|
|
56
|
+
elif url.startswith("//"):
|
|
57
|
+
url = "http:" + url
|
|
58
|
+
elif url.startswith("://"):
|
|
59
|
+
url = "http" + url
|
|
60
|
+
elif not CRE_URL_SCHEME_match(url):
|
|
61
|
+
url = "http://" + url
|
|
62
|
+
urlp = urlsplit(url)
|
|
63
|
+
repl = {"query": "", "fragment": ""}
|
|
64
|
+
if not urlp.scheme:
|
|
65
|
+
repl["scheme"] = "http"
|
|
66
|
+
netloc = urlp.netloc
|
|
67
|
+
if not netloc:
|
|
68
|
+
netloc = "localhost"
|
|
69
|
+
if default_port and not urlp.port:
|
|
70
|
+
netloc = netloc.removesuffix(":") + f":{default_port}"
|
|
71
|
+
repl["netloc"] = netloc
|
|
72
|
+
return urlunsplit(urlp._replace(**repl)).rstrip("/")
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def cookies_str_to_dict(
|
|
76
|
+
cookies: str,
|
|
77
|
+
/,
|
|
78
|
+
kv_sep: str | Pattern[str] = re_compile(r"\s*=\s*"),
|
|
79
|
+
entry_sep: str | Pattern[str] = re_compile(r"\s*;\s*"),
|
|
80
|
+
) -> dict[str, str]:
|
|
81
|
+
return text_to_dict(cookies.strip(), kv_sep, entry_sep)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def headers_str_to_dict(
|
|
85
|
+
headers: str,
|
|
86
|
+
/,
|
|
87
|
+
kv_sep: str | Pattern[str] = re_compile(r":\s+"),
|
|
88
|
+
entry_sep: str | Pattern[str] = re_compile("\n+"),
|
|
89
|
+
) -> dict[str, str]:
|
|
90
|
+
return text_to_dict(headers.strip(), kv_sep, entry_sep)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def ensure_bytes(s, /) -> bytes:
|
|
94
|
+
if isinstance(s, bytes):
|
|
95
|
+
return s
|
|
96
|
+
elif isinstance(s, memoryview):
|
|
97
|
+
return s.tobytes()
|
|
98
|
+
elif isinstance(s, Buffer):
|
|
99
|
+
return bytes(s)
|
|
100
|
+
if isinstance(s, int):
|
|
101
|
+
return int_to_bytes(s)
|
|
102
|
+
elif isinstance(s, str):
|
|
103
|
+
return bytes(s, "utf-8")
|
|
104
|
+
try:
|
|
105
|
+
return bytes(s)
|
|
106
|
+
except TypeError:
|
|
107
|
+
return bytes(str(s), "utf-8")
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def encode_multipart_data(
|
|
111
|
+
data: None | Mapping[str, Any] = None,
|
|
112
|
+
files: None | Mapping[str, Buffer | SupportsRead[Buffer] | Iterable[Buffer]] = None,
|
|
113
|
+
boundary: None | str = None,
|
|
114
|
+
file_suffix: str = "",
|
|
115
|
+
) -> tuple[dict, Iterator[Buffer]]:
|
|
116
|
+
if not boundary:
|
|
117
|
+
boundary = uuid4().hex
|
|
118
|
+
suffix = bytes(file_suffix, "ascii")
|
|
119
|
+
if suffix and not suffix.startswith(b"."):
|
|
120
|
+
suffix = b"." + suffix
|
|
121
|
+
headers = {"Content-Type": f"multipart/form-data; boundary={boundary}"}
|
|
122
|
+
|
|
123
|
+
def encode_data(data) -> Iterator[Buffer]:
|
|
124
|
+
if not data:
|
|
125
|
+
return
|
|
126
|
+
if isinstance(data, Mapping):
|
|
127
|
+
data = ItemsView(data)
|
|
128
|
+
for name, value in data:
|
|
129
|
+
yield boundary_line
|
|
130
|
+
yield b'Content-Disposition: form-data; name="%s"\r\n\r\n' % bytes(quote(name), "ascii")
|
|
131
|
+
yield ensure_bytes(value)
|
|
132
|
+
yield b"\r\n"
|
|
133
|
+
|
|
134
|
+
def encode_files(files) -> Iterator[Buffer]:
|
|
135
|
+
if not files:
|
|
136
|
+
return
|
|
137
|
+
if isinstance(files, Mapping):
|
|
138
|
+
files = ItemsView(files)
|
|
139
|
+
for name, file in files:
|
|
140
|
+
headers: dict[bytes, bytes] = {b"Content-Disposition": b'form-data; name="%s"' % quote(name).encode("ascii")}
|
|
141
|
+
filename: bytes | str = ""
|
|
142
|
+
if isinstance(file, (list, tuple)):
|
|
143
|
+
match file:
|
|
144
|
+
case [file]:
|
|
145
|
+
pass
|
|
146
|
+
case [file_name, file]:
|
|
147
|
+
pass
|
|
148
|
+
case [file_name, file, file_type]:
|
|
149
|
+
if file_type:
|
|
150
|
+
headers[b"Content-Type"] = ensure_bytes(file_type)
|
|
151
|
+
case [file_name, file, file_type, file_headers, *rest]:
|
|
152
|
+
if isinstance(file_headers, Mapping):
|
|
153
|
+
file_headers = ItemsView(file_headers)
|
|
154
|
+
for k, v in file_headers:
|
|
155
|
+
headers[ensure_bytes(k).title()] = ensure_bytes(v)
|
|
156
|
+
if file_type:
|
|
157
|
+
headers[b"Content-Type"] = ensure_bytes(file_type)
|
|
158
|
+
if isinstance(file, Buffer):
|
|
159
|
+
pass
|
|
160
|
+
elif isinstance(file, str):
|
|
161
|
+
file = file.encode("utf-8")
|
|
162
|
+
elif hasattr(file, "read"):
|
|
163
|
+
file = bio_chunk_iter(file)
|
|
164
|
+
if not filename:
|
|
165
|
+
path = getattr(file, "name", None)
|
|
166
|
+
if path:
|
|
167
|
+
filename = basename(path)
|
|
168
|
+
if b"Content-Type" not in headers:
|
|
169
|
+
headers[b"Content-Type"] = ensure_bytes(guess_type(fsdecode(filename))[0] or b"application/octet-stream")
|
|
170
|
+
if filename:
|
|
171
|
+
name = bytes(quote(filename), "ascii")
|
|
172
|
+
if not name.endswith(suffix):
|
|
173
|
+
name += suffix
|
|
174
|
+
headers[b"Content-Disposition"] += b'; filename="%s"' % name
|
|
175
|
+
else:
|
|
176
|
+
headers[b"Content-Disposition"] += b'; filename="%032x%s"' % (uuid4().int, suffix)
|
|
177
|
+
yield boundary_line
|
|
178
|
+
for entry in headers.items():
|
|
179
|
+
yield b"%s: %s\r\n" % entry
|
|
180
|
+
yield b"\r\n"
|
|
181
|
+
if isinstance(file, Buffer):
|
|
182
|
+
yield file
|
|
183
|
+
else:
|
|
184
|
+
yield from file
|
|
185
|
+
yield b"\r\n"
|
|
186
|
+
|
|
187
|
+
boundary_line = b"--%s\r\n" % boundary.encode("utf-8")
|
|
188
|
+
return headers, chain(encode_data(data), encode_files(files), (b'--%s--\r\n' % boundary.encode("ascii"),))
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def encode_multipart_data_async(
|
|
192
|
+
data: None | Mapping[str, Any] = None,
|
|
193
|
+
files: None | Mapping[str, Buffer | SupportsRead[Buffer] | Iterable[Buffer] | AsyncIterable[Buffer]] = None,
|
|
194
|
+
boundary: None | str = None,
|
|
195
|
+
file_suffix: str = "",
|
|
196
|
+
) -> tuple[dict, AsyncIterator[Buffer]]:
|
|
197
|
+
if not boundary:
|
|
198
|
+
boundary = uuid4().hex
|
|
199
|
+
suffix = bytes(file_suffix, "ascii")
|
|
200
|
+
if suffix and not suffix.startswith(b"."):
|
|
201
|
+
suffix = b"." + suffix
|
|
202
|
+
headers = {"Content-Type": f"multipart/form-data; boundary={boundary}"}
|
|
203
|
+
|
|
204
|
+
async def encode_data(data) -> AsyncIterator[Buffer]:
|
|
205
|
+
if not data:
|
|
206
|
+
return
|
|
207
|
+
if isinstance(data, Mapping):
|
|
208
|
+
data = ItemsView(data)
|
|
209
|
+
for name, value in data:
|
|
210
|
+
yield boundary_line
|
|
211
|
+
yield b'Content-Disposition: form-data; name="%s"\r\n\r\n' % bytes(quote(name), "ascii")
|
|
212
|
+
yield ensure_bytes(value)
|
|
213
|
+
yield b"\r\n"
|
|
214
|
+
|
|
215
|
+
async def encode_files(files) -> AsyncIterator[Buffer]:
|
|
216
|
+
if not files:
|
|
217
|
+
return
|
|
218
|
+
if isinstance(files, Mapping):
|
|
219
|
+
files = ItemsView(files)
|
|
220
|
+
for name, file in files:
|
|
221
|
+
headers: dict[bytes, bytes] = {b"Content-Disposition": b'form-data; name="%s"' % quote(name).encode("ascii")}
|
|
222
|
+
filename: bytes | str = ""
|
|
223
|
+
if isinstance(file, (list, tuple)):
|
|
224
|
+
match file:
|
|
225
|
+
case [file]:
|
|
226
|
+
pass
|
|
227
|
+
case [file_name, file]:
|
|
228
|
+
pass
|
|
229
|
+
case [file_name, file, file_type]:
|
|
230
|
+
if file_type:
|
|
231
|
+
headers[b"Content-Type"] = ensure_bytes(file_type)
|
|
232
|
+
case [file_name, file, file_type, file_headers, *rest]:
|
|
233
|
+
if isinstance(file_headers, Mapping):
|
|
234
|
+
file_headers = ItemsView(file_headers)
|
|
235
|
+
for k, v in file_headers:
|
|
236
|
+
headers[ensure_bytes(k).title()] = ensure_bytes(v)
|
|
237
|
+
if file_type:
|
|
238
|
+
headers[b"Content-Type"] = ensure_bytes(file_type)
|
|
239
|
+
if isinstance(file, Buffer):
|
|
240
|
+
pass
|
|
241
|
+
elif isinstance(file, str):
|
|
242
|
+
file = file.encode("utf-8")
|
|
243
|
+
elif hasattr(file, "read"):
|
|
244
|
+
file = bio_chunk_async_iter(file)
|
|
245
|
+
if not filename:
|
|
246
|
+
path = getattr(file, "name", None)
|
|
247
|
+
if path:
|
|
248
|
+
filename = basename(path)
|
|
249
|
+
if b"Content-Type" not in headers:
|
|
250
|
+
headers[b"Content-Type"] = ensure_bytes(guess_type(fsdecode(filename))[0] or b"application/octet-stream")
|
|
251
|
+
else:
|
|
252
|
+
file = ensure_aiter(file)
|
|
253
|
+
if filename:
|
|
254
|
+
name = bytes(quote(filename), "ascii")
|
|
255
|
+
if not name.endswith(suffix):
|
|
256
|
+
name += suffix
|
|
257
|
+
headers[b"Content-Disposition"] += b'; filename="%s"' % name
|
|
258
|
+
else:
|
|
259
|
+
headers[b"Content-Disposition"] += b'; filename="%032x%s"' % (uuid4().int, suffix)
|
|
260
|
+
yield boundary_line
|
|
261
|
+
for entry in headers.items():
|
|
262
|
+
yield b"%s: %s\r\n" % entry
|
|
263
|
+
yield b"\r\n"
|
|
264
|
+
if isinstance(file, Buffer):
|
|
265
|
+
yield file
|
|
266
|
+
else:
|
|
267
|
+
async for chunk in file:
|
|
268
|
+
yield chunk
|
|
269
|
+
yield b"\r\n"
|
|
270
|
+
|
|
271
|
+
boundary_line = b"--%s\r\n" % boundary.encode("utf-8")
|
|
272
|
+
return headers, async_chain(encode_data(data), encode_files(files), (b'--%s--\r\n' % boundary.encode("ascii"),))
|
|
273
|
+
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "python-http_request"
|
|
3
|
-
version = "0.0.
|
|
3
|
+
version = "0.0.7.1"
|
|
4
4
|
description = "Python http response utils."
|
|
5
5
|
authors = ["ChenyangGao <wosiwujm@gmail.com>"]
|
|
6
6
|
license = "MIT"
|
|
@@ -13,7 +13,7 @@ classifiers = [
|
|
|
13
13
|
"Development Status :: 5 - Production/Stable",
|
|
14
14
|
"Programming Language :: Python",
|
|
15
15
|
"Programming Language :: Python :: 3",
|
|
16
|
-
"Programming Language :: Python :: 3.
|
|
16
|
+
"Programming Language :: Python :: 3.12",
|
|
17
17
|
"Programming Language :: Python :: 3 :: Only",
|
|
18
18
|
"Operating System :: OS Independent",
|
|
19
19
|
"Intended Audience :: Developers",
|
|
@@ -26,11 +26,11 @@ include = [
|
|
|
26
26
|
]
|
|
27
27
|
|
|
28
28
|
[tool.poetry.dependencies]
|
|
29
|
-
python = "^3.
|
|
30
|
-
python-asynctools = "
|
|
31
|
-
python-filewrap = ">=0.
|
|
32
|
-
python-texttools = "
|
|
33
|
-
integer_tool = "
|
|
29
|
+
python = "^3.12"
|
|
30
|
+
python-asynctools = ">=0.0.10"
|
|
31
|
+
python-filewrap = ">=0.2.6"
|
|
32
|
+
python-texttools = ">=0.0.3"
|
|
33
|
+
integer_tool = ">=0.0.2"
|
|
34
34
|
|
|
35
35
|
[build-system]
|
|
36
36
|
requires = ["poetry-core"]
|
|
@@ -1,169 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
# encoding: utf-8
|
|
3
|
-
|
|
4
|
-
__author__ = "ChenyangGao <https://chenyanggao.github.io>"
|
|
5
|
-
__version__ = (0, 0, 5)
|
|
6
|
-
__all__ = [
|
|
7
|
-
"SupportsGeturl", "url_origin", "complete_url", "cookies_str_to_dict", "headers_str_to_dict",
|
|
8
|
-
"encode_multipart_data", "encode_multipart_data_async",
|
|
9
|
-
]
|
|
10
|
-
|
|
11
|
-
from itertools import chain
|
|
12
|
-
from collections.abc import AsyncIterable, AsyncIterator, ItemsView, Iterable, Iterator, Mapping
|
|
13
|
-
from re import compile as re_compile, Pattern
|
|
14
|
-
from typing import runtime_checkable, Any, Final, Protocol, TypeVar
|
|
15
|
-
from urllib.parse import quote, urlsplit, urlunsplit
|
|
16
|
-
from uuid import uuid4
|
|
17
|
-
|
|
18
|
-
from asynctools import ensure_aiter, async_chain
|
|
19
|
-
from filewrap import bio_chunk_iter, bio_chunk_async_iter, Buffer, SupportsRead
|
|
20
|
-
from integer_tool import int_to_bytes
|
|
21
|
-
from texttools import text_to_dict
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
AnyStr = TypeVar("AnyStr", bytes, str, covariant=True)
|
|
25
|
-
|
|
26
|
-
CRE_URL_SCHEME: Final = re_compile(r"^(?i:[a-z][a-z0-9.+-]*)://")
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
@runtime_checkable
|
|
30
|
-
class SupportsGeturl(Protocol[AnyStr]):
|
|
31
|
-
def geturl(self) -> AnyStr: ...
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
def url_origin(url: str, /) -> str:
|
|
35
|
-
if url.startswith("://"):
|
|
36
|
-
url = "http" + url
|
|
37
|
-
elif CRE_URL_SCHEME.match(url) is None:
|
|
38
|
-
url = "http://" + url
|
|
39
|
-
urlp = urlsplit(url)
|
|
40
|
-
scheme, netloc = urlp.scheme, urlp.netloc
|
|
41
|
-
if not netloc:
|
|
42
|
-
netloc = "localhost"
|
|
43
|
-
return f"{scheme}://{netloc}"
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
def complete_url(url: str, /) -> str:
|
|
47
|
-
if url.startswith("://"):
|
|
48
|
-
url = "http" + url
|
|
49
|
-
elif CRE_URL_SCHEME.match(url) is None:
|
|
50
|
-
url = "http://" + url
|
|
51
|
-
urlp = urlsplit(url)
|
|
52
|
-
repl = {"query": "", "fragment": ""}
|
|
53
|
-
if not urlp.netloc:
|
|
54
|
-
repl["path"] = "localhost"
|
|
55
|
-
return urlunsplit(urlp._replace(**repl)).rstrip("/")
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
def cookies_str_to_dict(
|
|
59
|
-
cookies: str,
|
|
60
|
-
/,
|
|
61
|
-
kv_sep: str | Pattern[str] = re_compile(r"\s*=\s*"),
|
|
62
|
-
entry_sep: str | Pattern[str] = re_compile(r"\s*;\s*"),
|
|
63
|
-
) -> dict[str, str]:
|
|
64
|
-
return text_to_dict(cookies.strip(), kv_sep, entry_sep)
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
def headers_str_to_dict(
|
|
68
|
-
headers: str,
|
|
69
|
-
/,
|
|
70
|
-
kv_sep: str | Pattern[str] = re_compile(r":\s+"),
|
|
71
|
-
entry_sep: str | Pattern[str] = re_compile("\n+"),
|
|
72
|
-
) -> dict[str, str]:
|
|
73
|
-
return text_to_dict(headers.strip(), kv_sep, entry_sep)
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
def ensure_bytes(s, /) -> Buffer:
|
|
77
|
-
if isinstance(s, Buffer):
|
|
78
|
-
return s
|
|
79
|
-
if isinstance(s, int):
|
|
80
|
-
return int_to_bytes(s)
|
|
81
|
-
elif isinstance(s, str):
|
|
82
|
-
return bytes(s, "utf-8")
|
|
83
|
-
try:
|
|
84
|
-
return bytes(s)
|
|
85
|
-
except TypeError:
|
|
86
|
-
return bytes(str(s), "utf-8")
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
def encode_multipart_data(
|
|
90
|
-
data: None | Mapping[str, Any] = None,
|
|
91
|
-
files: None | Mapping[str, Buffer | SupportsRead[Buffer] | Iterable[Buffer]] = None,
|
|
92
|
-
boundary: None | str = None,
|
|
93
|
-
) -> tuple[dict, Iterator[Buffer]]:
|
|
94
|
-
if not boundary:
|
|
95
|
-
boundary = uuid4().bytes.hex()
|
|
96
|
-
headers = {"Content-Type": f"multipart/form-data; boundary={boundary}"}
|
|
97
|
-
|
|
98
|
-
def encode_data(data) -> Iterator[Buffer]:
|
|
99
|
-
if not data:
|
|
100
|
-
return
|
|
101
|
-
if isinstance(data, Mapping):
|
|
102
|
-
data = ItemsView(data)
|
|
103
|
-
for name, value in data:
|
|
104
|
-
yield boundary_line
|
|
105
|
-
yield b'Content-Disposition: form-data; name="%s"\r\n\r\n' % bytes(quote(name), "ascii")
|
|
106
|
-
yield ensure_bytes(value)
|
|
107
|
-
yield b"\r\n"
|
|
108
|
-
|
|
109
|
-
def encode_files(files) -> Iterator[Buffer]:
|
|
110
|
-
if not files:
|
|
111
|
-
return
|
|
112
|
-
if isinstance(files, Mapping):
|
|
113
|
-
files = ItemsView(files)
|
|
114
|
-
for name, file in files:
|
|
115
|
-
yield boundary_line
|
|
116
|
-
yield b'Content-Disposition: form-data; name="%s"\r\nContent-Type: application/octet-stream\r\n\r\n' % bytes(quote(name), "ascii")
|
|
117
|
-
if isinstance(file, Buffer):
|
|
118
|
-
yield file
|
|
119
|
-
elif hasattr(file, "read"):
|
|
120
|
-
yield from bio_chunk_iter(file)
|
|
121
|
-
else:
|
|
122
|
-
yield from file
|
|
123
|
-
yield b"\r\n"
|
|
124
|
-
|
|
125
|
-
boundary_line = b"--%s\r\n" % boundary.encode("utf-8")
|
|
126
|
-
return headers, chain(encode_data(data), encode_files(files), (b'--%s--\r\n' % boundary.encode("ascii"),))
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
def encode_multipart_data_async(
|
|
130
|
-
data: None | Mapping[str, Any] = None,
|
|
131
|
-
files: None | Mapping[str, Buffer | SupportsRead[Buffer] | Iterable[Buffer] | AsyncIterable[Buffer]] = None,
|
|
132
|
-
boundary: None | str = None,
|
|
133
|
-
) -> tuple[dict, AsyncIterator[Buffer]]:
|
|
134
|
-
if not boundary:
|
|
135
|
-
boundary = uuid4().bytes.hex()
|
|
136
|
-
headers = {"Content-Type": f"multipart/form-data; boundary={boundary}"}
|
|
137
|
-
|
|
138
|
-
async def encode_data(data) -> AsyncIterator[Buffer]:
|
|
139
|
-
if not data:
|
|
140
|
-
return
|
|
141
|
-
if isinstance(data, Mapping):
|
|
142
|
-
data = ItemsView(data)
|
|
143
|
-
for name, value in data:
|
|
144
|
-
yield boundary_line
|
|
145
|
-
yield b'Content-Disposition: form-data; name="%s"\r\n\r\n' % bytes(quote(name), "ascii")
|
|
146
|
-
yield ensure_bytes(value)
|
|
147
|
-
yield b"\r\n"
|
|
148
|
-
|
|
149
|
-
async def encode_files(files) -> AsyncIterator[Buffer]:
|
|
150
|
-
if not files:
|
|
151
|
-
return
|
|
152
|
-
if isinstance(files, Mapping):
|
|
153
|
-
files = ItemsView(files)
|
|
154
|
-
for name, file in files:
|
|
155
|
-
yield boundary_line
|
|
156
|
-
yield b'Content-Disposition: form-data; name="%s"\r\nContent-Type: application/octet-stream\r\n\r\n' % bytes(quote(name), "ascii")
|
|
157
|
-
if isinstance(file, Buffer):
|
|
158
|
-
yield file
|
|
159
|
-
elif hasattr(file, "read"):
|
|
160
|
-
async for b in bio_chunk_async_iter(file):
|
|
161
|
-
yield b
|
|
162
|
-
else:
|
|
163
|
-
async for b in ensure_aiter(file):
|
|
164
|
-
yield b
|
|
165
|
-
yield b"\r\n"
|
|
166
|
-
|
|
167
|
-
boundary_line = b"--%s\r\n" % boundary.encode("utf-8")
|
|
168
|
-
return headers, async_chain(encode_data(data), encode_files(files), (b'--%s--\r\n' % boundary.encode("ascii"),))
|
|
169
|
-
|
|
File without changes
|
|
File without changes
|
|
File without changes
|