python-http_request 0.0.7__tar.gz → 0.0.8__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {python_http_request-0.0.7 → python_http_request-0.0.8}/PKG-INFO +6 -5
- python_http_request-0.0.8/http_request/__init__.py +332 -0
- {python_http_request-0.0.7 → python_http_request-0.0.8}/pyproject.toml +6 -5
- python_http_request-0.0.7/http_request/__init__.py +0 -259
- {python_http_request-0.0.7 → python_http_request-0.0.8}/LICENSE +0 -0
- {python_http_request-0.0.7 → python_http_request-0.0.8}/http_request/py.typed +0 -0
- {python_http_request-0.0.7 → python_http_request-0.0.8}/readme.md +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: python-http_request
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.8
|
|
4
4
|
Summary: Python http response utils.
|
|
5
5
|
Home-page: https://github.com/ChenyangGao/web-mount-packs/tree/main/python-module/python-http_request
|
|
6
6
|
License: MIT
|
|
@@ -20,10 +20,11 @@ Classifier: Programming Language :: Python :: 3 :: Only
|
|
|
20
20
|
Classifier: Topic :: Software Development
|
|
21
21
|
Classifier: Topic :: Software Development :: Libraries
|
|
22
22
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
23
|
-
Requires-Dist: integer_tool (>=0.0.
|
|
24
|
-
Requires-Dist: python-asynctools (>=0.
|
|
25
|
-
Requires-Dist: python-
|
|
26
|
-
Requires-Dist: python-
|
|
23
|
+
Requires-Dist: integer_tool (>=0.0.5)
|
|
24
|
+
Requires-Dist: python-asynctools (>=0.1.3)
|
|
25
|
+
Requires-Dist: python-dicttools (>=0.0.1)
|
|
26
|
+
Requires-Dist: python-filewrap (>=0.2.8)
|
|
27
|
+
Requires-Dist: python-texttools (>=0.0.4)
|
|
27
28
|
Project-URL: Repository, https://github.com/ChenyangGao/web-mount-packs/tree/main/python-module/python-http_request
|
|
28
29
|
Description-Content-Type: text/markdown
|
|
29
30
|
|
|
@@ -0,0 +1,332 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# encoding: utf-8
|
|
3
|
+
|
|
4
|
+
__author__ = "ChenyangGao <https://chenyanggao.github.io>"
|
|
5
|
+
__version__ = (0, 0, 8)
|
|
6
|
+
__all__ = [
|
|
7
|
+
"SupportsGeturl", "url_origin", "complete_url", "cookies_str_to_dict",
|
|
8
|
+
"headers_str_to_dict_by_lines", "headers_str_to_dict",
|
|
9
|
+
"encode_multipart_data", "encode_multipart_data_async",
|
|
10
|
+
]
|
|
11
|
+
|
|
12
|
+
from collections import UserString
|
|
13
|
+
from collections.abc import (
|
|
14
|
+
AsyncIterable, AsyncIterator, Buffer, Iterable, Iterator, Mapping,
|
|
15
|
+
)
|
|
16
|
+
from io import TextIOWrapper
|
|
17
|
+
from itertools import batched
|
|
18
|
+
from mimetypes import guess_type
|
|
19
|
+
from os import PathLike
|
|
20
|
+
from os.path import basename
|
|
21
|
+
from re import compile as re_compile, Pattern
|
|
22
|
+
from typing import runtime_checkable, Any, Final, Protocol, TypeVar
|
|
23
|
+
from urllib.parse import quote, urlsplit, urlunsplit
|
|
24
|
+
from uuid import uuid4
|
|
25
|
+
|
|
26
|
+
from asynctools import async_map
|
|
27
|
+
from dicttools import iter_items
|
|
28
|
+
from filewrap import bio_chunk_iter, bio_chunk_async_iter, SupportsRead
|
|
29
|
+
from integer_tool import int_to_bytes
|
|
30
|
+
from texttools import text_to_dict
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
AnyStr = TypeVar("AnyStr", bytes, str, covariant=True)
|
|
34
|
+
|
|
35
|
+
CRE_URL_SCHEME_match: Final = re_compile(r"(?i:[a-z][a-z0-9.+-]*)://").match
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
@runtime_checkable
|
|
39
|
+
class SupportsGeturl(Protocol[AnyStr]):
|
|
40
|
+
def geturl(self) -> AnyStr: ...
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def url_origin(url: str, /, default_port: int = 0) -> str:
|
|
44
|
+
if url.startswith("/"):
|
|
45
|
+
url = "http://localhost" + url
|
|
46
|
+
elif url.startswith("//"):
|
|
47
|
+
url = "http:" + url
|
|
48
|
+
elif url.startswith("://"):
|
|
49
|
+
url = "http" + url
|
|
50
|
+
elif not CRE_URL_SCHEME_match(url):
|
|
51
|
+
url = "http://" + url
|
|
52
|
+
urlp = urlsplit(url)
|
|
53
|
+
scheme, netloc = urlp.scheme or "http", urlp.netloc or "localhost"
|
|
54
|
+
if default_port and not urlp.port:
|
|
55
|
+
netloc = netloc.removesuffix(":") + f":{default_port}"
|
|
56
|
+
return f"{scheme}://{netloc}"
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def complete_url(url: str, /, default_port: int = 0) -> str:
|
|
60
|
+
if url.startswith("/"):
|
|
61
|
+
url = "http://localhost" + url
|
|
62
|
+
elif url.startswith("//"):
|
|
63
|
+
url = "http:" + url
|
|
64
|
+
elif url.startswith("://"):
|
|
65
|
+
url = "http" + url
|
|
66
|
+
elif not CRE_URL_SCHEME_match(url):
|
|
67
|
+
url = "http://" + url
|
|
68
|
+
urlp = urlsplit(url)
|
|
69
|
+
repl = {"query": "", "fragment": ""}
|
|
70
|
+
if not urlp.scheme:
|
|
71
|
+
repl["scheme"] = "http"
|
|
72
|
+
netloc = urlp.netloc
|
|
73
|
+
if not netloc:
|
|
74
|
+
netloc = "localhost"
|
|
75
|
+
if default_port and not urlp.port:
|
|
76
|
+
netloc = netloc.removesuffix(":") + f":{default_port}"
|
|
77
|
+
repl["netloc"] = netloc
|
|
78
|
+
return urlunsplit(urlp._replace(**repl)).rstrip("/")
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def cookies_str_to_dict(
|
|
82
|
+
cookies: str,
|
|
83
|
+
/,
|
|
84
|
+
kv_sep: str | Pattern[str] = re_compile(r"\s*=\s*"),
|
|
85
|
+
entry_sep: str | Pattern[str] = re_compile(r"\s*;\s*"),
|
|
86
|
+
) -> dict[str, str]:
|
|
87
|
+
return text_to_dict(cookies.strip(), kv_sep, entry_sep)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def headers_str_to_dict(
|
|
91
|
+
headers: str,
|
|
92
|
+
/,
|
|
93
|
+
kv_sep: str | Pattern[str] = re_compile(r":\s+"),
|
|
94
|
+
entry_sep: str | Pattern[str] = re_compile("\n+"),
|
|
95
|
+
) -> dict[str, str]:
|
|
96
|
+
return text_to_dict(headers.strip(), kv_sep, entry_sep)
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def headers_str_to_dict_by_lines(headers: str, /, ) -> dict[str, str]:
|
|
100
|
+
lines = headers.strip().split("\n")
|
|
101
|
+
if len(lines) & 1:
|
|
102
|
+
lines.append("")
|
|
103
|
+
return dict(batched(lines, 2)) # type: ignore
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def ensure_bytes(
|
|
107
|
+
o,
|
|
108
|
+
/,
|
|
109
|
+
encoding: str = "utf-8",
|
|
110
|
+
errors: str = "strict",
|
|
111
|
+
) -> bytes:
|
|
112
|
+
if isinstance(o, bytes):
|
|
113
|
+
return o
|
|
114
|
+
elif isinstance(o, memoryview):
|
|
115
|
+
return o.tobytes()
|
|
116
|
+
elif isinstance(o, Buffer):
|
|
117
|
+
return bytes(o)
|
|
118
|
+
elif isinstance(o, int):
|
|
119
|
+
return int_to_bytes(o)
|
|
120
|
+
elif isinstance(o, (str, UserString)):
|
|
121
|
+
return o.encode(encoding, errors)
|
|
122
|
+
try:
|
|
123
|
+
return bytes(o)
|
|
124
|
+
except TypeError:
|
|
125
|
+
return bytes(str(o), encoding, errors)
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def ensure_buffer(
|
|
129
|
+
o,
|
|
130
|
+
/,
|
|
131
|
+
encoding: str = "utf-8",
|
|
132
|
+
errors: str = "strict",
|
|
133
|
+
) -> Buffer:
|
|
134
|
+
if isinstance(o, Buffer):
|
|
135
|
+
return o
|
|
136
|
+
elif isinstance(o, int):
|
|
137
|
+
return int_to_bytes(o)
|
|
138
|
+
elif isinstance(o, (str, UserString)):
|
|
139
|
+
return o.encode(encoding, errors)
|
|
140
|
+
try:
|
|
141
|
+
return bytes(o)
|
|
142
|
+
except TypeError:
|
|
143
|
+
return bytes(str(o), encoding, errors)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def encode_multipart_data(
|
|
147
|
+
data: None | Mapping[Buffer | str, Any] = None,
|
|
148
|
+
files: None | Mapping[Buffer | str, Any] = None,
|
|
149
|
+
boundary: None | str = None,
|
|
150
|
+
file_suffix: str = "",
|
|
151
|
+
) -> tuple[dict, Iterator[Buffer]]:
|
|
152
|
+
if not boundary:
|
|
153
|
+
boundary = uuid4().hex
|
|
154
|
+
boundary_bytes = bytes(boundary, "ascii")
|
|
155
|
+
elif isinstance(boundary, str):
|
|
156
|
+
boundary_bytes = bytes(boundary, "latin-1")
|
|
157
|
+
else:
|
|
158
|
+
boundary_bytes = bytes(boundary)
|
|
159
|
+
boundary = str(boundary_bytes, "latin-1")
|
|
160
|
+
boundary_line = b"--%s\r\n" % boundary_bytes
|
|
161
|
+
suffix = ensure_bytes(file_suffix)
|
|
162
|
+
if suffix and not suffix.startswith(b"."):
|
|
163
|
+
suffix = b"." + suffix
|
|
164
|
+
|
|
165
|
+
def encode_item(name, value, /, is_file=False) -> Iterator[Buffer]:
|
|
166
|
+
headers = {b"content-disposition": b'form-data; name="%s"' % bytes(quote(name), "ascii")}
|
|
167
|
+
filename = b""
|
|
168
|
+
if isinstance(value, (list, tuple)):
|
|
169
|
+
match value:
|
|
170
|
+
case [value]:
|
|
171
|
+
pass
|
|
172
|
+
case [_, value]:
|
|
173
|
+
pass
|
|
174
|
+
case [_, value, file_type]:
|
|
175
|
+
if file_type:
|
|
176
|
+
headers[b"content-type"] = ensure_bytes(file_type)
|
|
177
|
+
case [_, value, file_type, file_headers, *rest]:
|
|
178
|
+
for k, v in iter_items(file_headers):
|
|
179
|
+
headers[ensure_bytes(k).lower()] = ensure_bytes(v)
|
|
180
|
+
if file_type:
|
|
181
|
+
headers[b"content-type"] = ensure_bytes(file_type)
|
|
182
|
+
if isinstance(value, (PathLike, SupportsRead)):
|
|
183
|
+
is_file = True
|
|
184
|
+
if isinstance(value, PathLike):
|
|
185
|
+
file: SupportsRead[Buffer] = open(value, "rb")
|
|
186
|
+
elif isinstance(value, TextIOWrapper):
|
|
187
|
+
file = value.buffer
|
|
188
|
+
else:
|
|
189
|
+
file = value
|
|
190
|
+
value = bio_chunk_iter(file)
|
|
191
|
+
if not filename:
|
|
192
|
+
filename = ensure_bytes(basename(getattr(file, "name", b"") or b""))
|
|
193
|
+
elif isinstance(value, Buffer):
|
|
194
|
+
pass
|
|
195
|
+
elif isinstance(value, (str, UserString)):
|
|
196
|
+
value = ensure_bytes(value)
|
|
197
|
+
elif isinstance(value, Iterable):
|
|
198
|
+
value = map(ensure_buffer, value)
|
|
199
|
+
else:
|
|
200
|
+
value = ensure_buffer(value)
|
|
201
|
+
if is_file:
|
|
202
|
+
if filename:
|
|
203
|
+
filename = bytes(quote(filename), "ascii")
|
|
204
|
+
if suffix and not filename.endswith(suffix):
|
|
205
|
+
filename += suffix
|
|
206
|
+
else:
|
|
207
|
+
filename = bytes(uuid4().hex, "ascii") + suffix
|
|
208
|
+
if b"content-type" not in headers:
|
|
209
|
+
headers[b"content-type"] = ensure_bytes(
|
|
210
|
+
guess_type(str(filename, "latin-1"))[0] or b"application/octet-stream")
|
|
211
|
+
headers[b"content-disposition"] += b'; filename="%s"' % filename
|
|
212
|
+
yield boundary_line
|
|
213
|
+
for entry in headers.items():
|
|
214
|
+
yield b"%s: %s\r\n" % entry
|
|
215
|
+
yield b"\r\n"
|
|
216
|
+
if isinstance(value, Buffer):
|
|
217
|
+
yield value
|
|
218
|
+
else:
|
|
219
|
+
yield from value
|
|
220
|
+
|
|
221
|
+
def encode_iter() -> Iterator[Buffer]:
|
|
222
|
+
if data:
|
|
223
|
+
for name, value in iter_items(data):
|
|
224
|
+
yield boundary_line
|
|
225
|
+
yield from encode_item(name, value)
|
|
226
|
+
yield b"\r\n"
|
|
227
|
+
if files:
|
|
228
|
+
for name, value in iter_items(files):
|
|
229
|
+
yield boundary_line
|
|
230
|
+
yield from encode_item(name, value, is_file=True)
|
|
231
|
+
yield b"\r\n"
|
|
232
|
+
yield b'--%s--\r\n' % boundary_bytes
|
|
233
|
+
|
|
234
|
+
return {"content-type": "multipart/form-data; boundary="+boundary}, encode_iter()
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
def encode_multipart_data_async(
|
|
238
|
+
data: None | Mapping[Buffer | str, Any] = None,
|
|
239
|
+
files: None | Mapping[Buffer | str, Any] = None,
|
|
240
|
+
boundary: None | str = None,
|
|
241
|
+
file_suffix: str = "",
|
|
242
|
+
) -> tuple[dict, AsyncIterator[Buffer]]:
|
|
243
|
+
if not boundary:
|
|
244
|
+
boundary = uuid4().hex
|
|
245
|
+
boundary_bytes = bytes(boundary, "ascii")
|
|
246
|
+
elif isinstance(boundary, str):
|
|
247
|
+
boundary_bytes = bytes(boundary, "latin-1")
|
|
248
|
+
else:
|
|
249
|
+
boundary_bytes = bytes(boundary)
|
|
250
|
+
boundary = str(boundary_bytes, "latin-1")
|
|
251
|
+
boundary_line = b"--%s\r\n" % boundary_bytes
|
|
252
|
+
suffix = ensure_bytes(file_suffix)
|
|
253
|
+
if suffix and not suffix.startswith(b"."):
|
|
254
|
+
suffix = b"." + suffix
|
|
255
|
+
|
|
256
|
+
async def encode_item(name, value, /, is_file=False) -> AsyncIterator[Buffer]:
|
|
257
|
+
headers = {b"content-disposition": b'form-data; name="%s"' % bytes(quote(name), "ascii")}
|
|
258
|
+
filename = b""
|
|
259
|
+
if isinstance(value, (list, tuple)):
|
|
260
|
+
match value:
|
|
261
|
+
case [value]:
|
|
262
|
+
pass
|
|
263
|
+
case [_, value]:
|
|
264
|
+
pass
|
|
265
|
+
case [_, value, file_type]:
|
|
266
|
+
if file_type:
|
|
267
|
+
headers[b"content-type"] = ensure_bytes(file_type)
|
|
268
|
+
case [_, value, file_type, file_headers, *rest]:
|
|
269
|
+
for k, v in iter_items(file_headers):
|
|
270
|
+
headers[ensure_bytes(k).lower()] = ensure_bytes(v)
|
|
271
|
+
if file_type:
|
|
272
|
+
headers[b"content-type"] = ensure_bytes(file_type)
|
|
273
|
+
if isinstance(value, (PathLike, SupportsRead)):
|
|
274
|
+
is_file = True
|
|
275
|
+
if isinstance(value, PathLike):
|
|
276
|
+
file: SupportsRead[Buffer] = open(value, "rb")
|
|
277
|
+
elif isinstance(value, TextIOWrapper):
|
|
278
|
+
file = value.buffer
|
|
279
|
+
else:
|
|
280
|
+
file = value
|
|
281
|
+
value = bio_chunk_async_iter(file)
|
|
282
|
+
if not filename:
|
|
283
|
+
filename = ensure_bytes(basename(getattr(file, "name", b"") or b""))
|
|
284
|
+
elif isinstance(value, Buffer):
|
|
285
|
+
pass
|
|
286
|
+
elif isinstance(value, (str, UserString)):
|
|
287
|
+
value = ensure_bytes(value)
|
|
288
|
+
elif isinstance(value, Iterable):
|
|
289
|
+
value = async_map(ensure_buffer, value)
|
|
290
|
+
else:
|
|
291
|
+
value = ensure_buffer(value)
|
|
292
|
+
if is_file:
|
|
293
|
+
if filename:
|
|
294
|
+
filename = bytes(quote(filename), "ascii")
|
|
295
|
+
if suffix and not filename.endswith(suffix):
|
|
296
|
+
filename += suffix
|
|
297
|
+
else:
|
|
298
|
+
filename = bytes(uuid4().hex, "ascii") + suffix
|
|
299
|
+
if b"content-type" not in headers:
|
|
300
|
+
headers[b"content-type"] = ensure_bytes(
|
|
301
|
+
guess_type(str(filename, "latin-1"))[0] or b"application/octet-stream")
|
|
302
|
+
headers[b"content-disposition"] += b'; filename="%s"' % filename
|
|
303
|
+
yield boundary_line
|
|
304
|
+
for entry in headers.items():
|
|
305
|
+
yield b"%s: %s\r\n" % entry
|
|
306
|
+
yield b"\r\n"
|
|
307
|
+
if isinstance(value, Buffer):
|
|
308
|
+
yield value
|
|
309
|
+
elif isinstance(value, AsyncIterable):
|
|
310
|
+
async for line in value:
|
|
311
|
+
yield line
|
|
312
|
+
else:
|
|
313
|
+
for line in value:
|
|
314
|
+
yield line
|
|
315
|
+
|
|
316
|
+
async def encode_iter() -> AsyncIterator[Buffer]:
|
|
317
|
+
if data:
|
|
318
|
+
for name, value in iter_items(data):
|
|
319
|
+
yield boundary_line
|
|
320
|
+
async for line in encode_item(name, value):
|
|
321
|
+
yield line
|
|
322
|
+
yield b"\r\n"
|
|
323
|
+
if files:
|
|
324
|
+
for name, value in iter_items(files):
|
|
325
|
+
yield boundary_line
|
|
326
|
+
async for line in encode_item(name, value, is_file=True):
|
|
327
|
+
yield line
|
|
328
|
+
yield b"\r\n"
|
|
329
|
+
yield b'--%s--\r\n' % boundary_bytes
|
|
330
|
+
|
|
331
|
+
return {"content-type": "multipart/form-data; boundary="+boundary}, encode_iter()
|
|
332
|
+
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "python-http_request"
|
|
3
|
-
version = "0.0.
|
|
3
|
+
version = "0.0.8"
|
|
4
4
|
description = "Python http response utils."
|
|
5
5
|
authors = ["ChenyangGao <wosiwujm@gmail.com>"]
|
|
6
6
|
license = "MIT"
|
|
@@ -27,10 +27,11 @@ include = [
|
|
|
27
27
|
|
|
28
28
|
[tool.poetry.dependencies]
|
|
29
29
|
python = "^3.12"
|
|
30
|
-
python-asynctools = ">=0.
|
|
31
|
-
python-
|
|
32
|
-
python-
|
|
33
|
-
|
|
30
|
+
python-asynctools = ">=0.1.3"
|
|
31
|
+
python-dicttools = ">=0.0.1"
|
|
32
|
+
python-filewrap = ">=0.2.8"
|
|
33
|
+
python-texttools = ">=0.0.4"
|
|
34
|
+
integer_tool = ">=0.0.5"
|
|
34
35
|
|
|
35
36
|
[build-system]
|
|
36
37
|
requires = ["poetry-core"]
|
|
@@ -1,259 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
# encoding: utf-8
|
|
3
|
-
|
|
4
|
-
__author__ = "ChenyangGao <https://chenyanggao.github.io>"
|
|
5
|
-
__version__ = (0, 0, 7)
|
|
6
|
-
__all__ = [
|
|
7
|
-
"SupportsGeturl", "url_origin", "complete_url", "cookies_str_to_dict", "headers_str_to_dict",
|
|
8
|
-
"encode_multipart_data", "encode_multipart_data_async",
|
|
9
|
-
]
|
|
10
|
-
|
|
11
|
-
from collections.abc import AsyncIterable, AsyncIterator, Buffer, ItemsView, Iterable, Iterator, Mapping
|
|
12
|
-
from itertools import chain
|
|
13
|
-
from mimetypes import guess_type
|
|
14
|
-
from os import fsdecode
|
|
15
|
-
from os.path import basename
|
|
16
|
-
from re import compile as re_compile, Pattern
|
|
17
|
-
from typing import runtime_checkable, Any, Final, Protocol, TypeVar
|
|
18
|
-
from urllib.parse import quote, urlsplit, urlunsplit
|
|
19
|
-
from uuid import uuid4
|
|
20
|
-
|
|
21
|
-
from asynctools import ensure_aiter, async_chain
|
|
22
|
-
from filewrap import bio_chunk_iter, bio_chunk_async_iter, SupportsRead
|
|
23
|
-
from integer_tool import int_to_bytes
|
|
24
|
-
from texttools import text_to_dict
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
AnyStr = TypeVar("AnyStr", bytes, str, covariant=True)
|
|
28
|
-
|
|
29
|
-
CRE_URL_SCHEME: Final = re_compile(r"^(?i:[a-z][a-z0-9.+-]*)://")
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
@runtime_checkable
|
|
33
|
-
class SupportsGeturl(Protocol[AnyStr]):
|
|
34
|
-
def geturl(self) -> AnyStr: ...
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
def url_origin(url: str, /) -> str:
|
|
38
|
-
if url.startswith("://"):
|
|
39
|
-
url = "http" + url
|
|
40
|
-
elif CRE_URL_SCHEME.match(url) is None:
|
|
41
|
-
url = "http://" + url
|
|
42
|
-
urlp = urlsplit(url)
|
|
43
|
-
scheme, netloc = urlp.scheme, urlp.netloc
|
|
44
|
-
if not netloc:
|
|
45
|
-
netloc = "localhost"
|
|
46
|
-
return f"{scheme}://{netloc}"
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
def complete_url(url: str, /) -> str:
|
|
50
|
-
if url.startswith("://"):
|
|
51
|
-
url = "http" + url
|
|
52
|
-
elif CRE_URL_SCHEME.match(url) is None:
|
|
53
|
-
url = "http://" + url
|
|
54
|
-
urlp = urlsplit(url)
|
|
55
|
-
repl = {"query": "", "fragment": ""}
|
|
56
|
-
if not urlp.netloc:
|
|
57
|
-
repl["path"] = "localhost"
|
|
58
|
-
return urlunsplit(urlp._replace(**repl)).rstrip("/")
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
def cookies_str_to_dict(
|
|
62
|
-
cookies: str,
|
|
63
|
-
/,
|
|
64
|
-
kv_sep: str | Pattern[str] = re_compile(r"\s*=\s*"),
|
|
65
|
-
entry_sep: str | Pattern[str] = re_compile(r"\s*;\s*"),
|
|
66
|
-
) -> dict[str, str]:
|
|
67
|
-
return text_to_dict(cookies.strip(), kv_sep, entry_sep)
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
def headers_str_to_dict(
|
|
71
|
-
headers: str,
|
|
72
|
-
/,
|
|
73
|
-
kv_sep: str | Pattern[str] = re_compile(r":\s+"),
|
|
74
|
-
entry_sep: str | Pattern[str] = re_compile("\n+"),
|
|
75
|
-
) -> dict[str, str]:
|
|
76
|
-
return text_to_dict(headers.strip(), kv_sep, entry_sep)
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
def ensure_bytes(s, /) -> bytes:
|
|
80
|
-
if isinstance(s, bytes):
|
|
81
|
-
return s
|
|
82
|
-
elif isinstance(s, memoryview):
|
|
83
|
-
return s.tobytes()
|
|
84
|
-
elif isinstance(s, Buffer):
|
|
85
|
-
return bytes(s)
|
|
86
|
-
if isinstance(s, int):
|
|
87
|
-
return int_to_bytes(s)
|
|
88
|
-
elif isinstance(s, str):
|
|
89
|
-
return bytes(s, "utf-8")
|
|
90
|
-
try:
|
|
91
|
-
return bytes(s)
|
|
92
|
-
except TypeError:
|
|
93
|
-
return bytes(str(s), "utf-8")
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
def encode_multipart_data(
|
|
97
|
-
data: None | Mapping[str, Any] = None,
|
|
98
|
-
files: None | Mapping[str, Buffer | SupportsRead[Buffer] | Iterable[Buffer]] = None,
|
|
99
|
-
boundary: None | str = None,
|
|
100
|
-
file_suffix: str = "",
|
|
101
|
-
) -> tuple[dict, Iterator[Buffer]]:
|
|
102
|
-
if not boundary:
|
|
103
|
-
boundary = uuid4().hex
|
|
104
|
-
suffix = bytes(file_suffix, "ascii")
|
|
105
|
-
if suffix and not suffix.startswith(b"."):
|
|
106
|
-
suffix = b"." + suffix
|
|
107
|
-
headers = {"Content-Type": f"multipart/form-data; boundary={boundary}"}
|
|
108
|
-
|
|
109
|
-
def encode_data(data) -> Iterator[Buffer]:
|
|
110
|
-
if not data:
|
|
111
|
-
return
|
|
112
|
-
if isinstance(data, Mapping):
|
|
113
|
-
data = ItemsView(data)
|
|
114
|
-
for name, value in data:
|
|
115
|
-
yield boundary_line
|
|
116
|
-
yield b'Content-Disposition: form-data; name="%s"\r\n\r\n' % bytes(quote(name), "ascii")
|
|
117
|
-
yield ensure_bytes(value)
|
|
118
|
-
yield b"\r\n"
|
|
119
|
-
|
|
120
|
-
def encode_files(files) -> Iterator[Buffer]:
|
|
121
|
-
if not files:
|
|
122
|
-
return
|
|
123
|
-
if isinstance(files, Mapping):
|
|
124
|
-
files = ItemsView(files)
|
|
125
|
-
for name, file in files:
|
|
126
|
-
headers: dict[bytes, bytes] = {b"Content-Disposition": b'form-data; name="%s"' % quote(name).encode("ascii")}
|
|
127
|
-
filename: bytes | str = ""
|
|
128
|
-
if isinstance(file, (list, tuple)):
|
|
129
|
-
match file:
|
|
130
|
-
case [file]:
|
|
131
|
-
pass
|
|
132
|
-
case [file_name, file]:
|
|
133
|
-
pass
|
|
134
|
-
case [file_name, file, file_type]:
|
|
135
|
-
if file_type:
|
|
136
|
-
headers[b"Content-Type"] = ensure_bytes(file_type)
|
|
137
|
-
case [file_name, file, file_type, file_headers, *rest]:
|
|
138
|
-
if isinstance(file_headers, Mapping):
|
|
139
|
-
file_headers = ItemsView(file_headers)
|
|
140
|
-
for k, v in file_headers:
|
|
141
|
-
headers[ensure_bytes(k).title()] = ensure_bytes(v)
|
|
142
|
-
if file_type:
|
|
143
|
-
headers[b"Content-Type"] = ensure_bytes(file_type)
|
|
144
|
-
if isinstance(file, Buffer):
|
|
145
|
-
pass
|
|
146
|
-
elif isinstance(file, str):
|
|
147
|
-
file = file.encode("utf-8")
|
|
148
|
-
elif hasattr(file, "read"):
|
|
149
|
-
file = bio_chunk_iter(file)
|
|
150
|
-
if not filename:
|
|
151
|
-
path = getattr(file, "name", None)
|
|
152
|
-
if path:
|
|
153
|
-
filename = basename(path)
|
|
154
|
-
if b"Content-Type" not in headers:
|
|
155
|
-
headers[b"Content-Type"] = ensure_bytes(guess_type(fsdecode(filename))[0] or b"application/octet-stream")
|
|
156
|
-
if filename:
|
|
157
|
-
name = bytes(quote(filename), "ascii")
|
|
158
|
-
if not name.endswith(suffix):
|
|
159
|
-
name += suffix
|
|
160
|
-
headers[b"Content-Disposition"] += b'; filename="%s"' % name
|
|
161
|
-
else:
|
|
162
|
-
headers[b"Content-Disposition"] += b'; filename="%032x%s"' % (uuid4().int, suffix)
|
|
163
|
-
yield boundary_line
|
|
164
|
-
for entry in headers.items():
|
|
165
|
-
yield b"%s: %s\r\n" % entry
|
|
166
|
-
yield b"\r\n"
|
|
167
|
-
if isinstance(file, Buffer):
|
|
168
|
-
yield file
|
|
169
|
-
else:
|
|
170
|
-
yield from file
|
|
171
|
-
yield b"\r\n"
|
|
172
|
-
|
|
173
|
-
boundary_line = b"--%s\r\n" % boundary.encode("utf-8")
|
|
174
|
-
return headers, chain(encode_data(data), encode_files(files), (b'--%s--\r\n' % boundary.encode("ascii"),))
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
def encode_multipart_data_async(
|
|
178
|
-
data: None | Mapping[str, Any] = None,
|
|
179
|
-
files: None | Mapping[str, Buffer | SupportsRead[Buffer] | Iterable[Buffer] | AsyncIterable[Buffer]] = None,
|
|
180
|
-
boundary: None | str = None,
|
|
181
|
-
file_suffix: str = "",
|
|
182
|
-
) -> tuple[dict, AsyncIterator[Buffer]]:
|
|
183
|
-
if not boundary:
|
|
184
|
-
boundary = uuid4().hex
|
|
185
|
-
suffix = bytes(file_suffix, "ascii")
|
|
186
|
-
if suffix and not suffix.startswith(b"."):
|
|
187
|
-
suffix = b"." + suffix
|
|
188
|
-
headers = {"Content-Type": f"multipart/form-data; boundary={boundary}"}
|
|
189
|
-
|
|
190
|
-
async def encode_data(data) -> AsyncIterator[Buffer]:
|
|
191
|
-
if not data:
|
|
192
|
-
return
|
|
193
|
-
if isinstance(data, Mapping):
|
|
194
|
-
data = ItemsView(data)
|
|
195
|
-
for name, value in data:
|
|
196
|
-
yield boundary_line
|
|
197
|
-
yield b'Content-Disposition: form-data; name="%s"\r\n\r\n' % bytes(quote(name), "ascii")
|
|
198
|
-
yield ensure_bytes(value)
|
|
199
|
-
yield b"\r\n"
|
|
200
|
-
|
|
201
|
-
async def encode_files(files) -> AsyncIterator[Buffer]:
|
|
202
|
-
if not files:
|
|
203
|
-
return
|
|
204
|
-
if isinstance(files, Mapping):
|
|
205
|
-
files = ItemsView(files)
|
|
206
|
-
for name, file in files:
|
|
207
|
-
headers: dict[bytes, bytes] = {b"Content-Disposition": b'form-data; name="%s"' % quote(name).encode("ascii")}
|
|
208
|
-
filename: bytes | str = ""
|
|
209
|
-
if isinstance(file, (list, tuple)):
|
|
210
|
-
match file:
|
|
211
|
-
case [file]:
|
|
212
|
-
pass
|
|
213
|
-
case [file_name, file]:
|
|
214
|
-
pass
|
|
215
|
-
case [file_name, file, file_type]:
|
|
216
|
-
if file_type:
|
|
217
|
-
headers[b"Content-Type"] = ensure_bytes(file_type)
|
|
218
|
-
case [file_name, file, file_type, file_headers, *rest]:
|
|
219
|
-
if isinstance(file_headers, Mapping):
|
|
220
|
-
file_headers = ItemsView(file_headers)
|
|
221
|
-
for k, v in file_headers:
|
|
222
|
-
headers[ensure_bytes(k).title()] = ensure_bytes(v)
|
|
223
|
-
if file_type:
|
|
224
|
-
headers[b"Content-Type"] = ensure_bytes(file_type)
|
|
225
|
-
if isinstance(file, Buffer):
|
|
226
|
-
pass
|
|
227
|
-
elif isinstance(file, str):
|
|
228
|
-
file = file.encode("utf-8")
|
|
229
|
-
elif hasattr(file, "read"):
|
|
230
|
-
file = bio_chunk_async_iter(file)
|
|
231
|
-
if not filename:
|
|
232
|
-
path = getattr(file, "name", None)
|
|
233
|
-
if path:
|
|
234
|
-
filename = basename(path)
|
|
235
|
-
if b"Content-Type" not in headers:
|
|
236
|
-
headers[b"Content-Type"] = ensure_bytes(guess_type(fsdecode(filename))[0] or b"application/octet-stream")
|
|
237
|
-
else:
|
|
238
|
-
file = ensure_aiter(file)
|
|
239
|
-
if filename:
|
|
240
|
-
name = bytes(quote(filename), "ascii")
|
|
241
|
-
if not name.endswith(suffix):
|
|
242
|
-
name += suffix
|
|
243
|
-
headers[b"Content-Disposition"] += b'; filename="%s"' % name
|
|
244
|
-
else:
|
|
245
|
-
headers[b"Content-Disposition"] += b'; filename="%032x%s"' % (uuid4().int, suffix)
|
|
246
|
-
yield boundary_line
|
|
247
|
-
for entry in headers.items():
|
|
248
|
-
yield b"%s: %s\r\n" % entry
|
|
249
|
-
yield b"\r\n"
|
|
250
|
-
if isinstance(file, Buffer):
|
|
251
|
-
yield file
|
|
252
|
-
else:
|
|
253
|
-
async for chunk in file:
|
|
254
|
-
yield chunk
|
|
255
|
-
yield b"\r\n"
|
|
256
|
-
|
|
257
|
-
boundary_line = b"--%s\r\n" % boundary.encode("utf-8")
|
|
258
|
-
return headers, async_chain(encode_data(data), encode_files(files), (b'--%s--\r\n' % boundary.encode("ascii"),))
|
|
259
|
-
|
|
File without changes
|
|
File without changes
|
|
File without changes
|