python-http_request 0.0.7.1__tar.gz → 0.0.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: python-http_request
3
- Version: 0.0.7.1
3
+ Version: 0.0.8
4
4
  Summary: Python http response utils.
5
5
  Home-page: https://github.com/ChenyangGao/web-mount-packs/tree/main/python-module/python-http_request
6
6
  License: MIT
@@ -20,10 +20,11 @@ Classifier: Programming Language :: Python :: 3 :: Only
20
20
  Classifier: Topic :: Software Development
21
21
  Classifier: Topic :: Software Development :: Libraries
22
22
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
23
- Requires-Dist: integer_tool (>=0.0.2)
24
- Requires-Dist: python-asynctools (>=0.0.10)
25
- Requires-Dist: python-filewrap (>=0.2.6)
26
- Requires-Dist: python-texttools (>=0.0.3)
23
+ Requires-Dist: integer_tool (>=0.0.5)
24
+ Requires-Dist: python-asynctools (>=0.1.3)
25
+ Requires-Dist: python-dicttools (>=0.0.1)
26
+ Requires-Dist: python-filewrap (>=0.2.8)
27
+ Requires-Dist: python-texttools (>=0.0.4)
27
28
  Project-URL: Repository, https://github.com/ChenyangGao/web-mount-packs/tree/main/python-module/python-http_request
28
29
  Description-Content-Type: text/markdown
29
30
 
@@ -0,0 +1,332 @@
1
+ #!/usr/bin/env python3
2
+ # encoding: utf-8
3
+
4
+ __author__ = "ChenyangGao <https://chenyanggao.github.io>"
5
+ __version__ = (0, 0, 8)
6
+ __all__ = [
7
+ "SupportsGeturl", "url_origin", "complete_url", "cookies_str_to_dict",
8
+ "headers_str_to_dict_by_lines", "headers_str_to_dict",
9
+ "encode_multipart_data", "encode_multipart_data_async",
10
+ ]
11
+
12
+ from collections import UserString
13
+ from collections.abc import (
14
+ AsyncIterable, AsyncIterator, Buffer, Iterable, Iterator, Mapping,
15
+ )
16
+ from io import TextIOWrapper
17
+ from itertools import batched
18
+ from mimetypes import guess_type
19
+ from os import PathLike
20
+ from os.path import basename
21
+ from re import compile as re_compile, Pattern
22
+ from typing import runtime_checkable, Any, Final, Protocol, TypeVar
23
+ from urllib.parse import quote, urlsplit, urlunsplit
24
+ from uuid import uuid4
25
+
26
+ from asynctools import async_map
27
+ from dicttools import iter_items
28
+ from filewrap import bio_chunk_iter, bio_chunk_async_iter, SupportsRead
29
+ from integer_tool import int_to_bytes
30
+ from texttools import text_to_dict
31
+
32
+
33
+ AnyStr = TypeVar("AnyStr", bytes, str, covariant=True)
34
+
35
+ CRE_URL_SCHEME_match: Final = re_compile(r"(?i:[a-z][a-z0-9.+-]*)://").match
36
+
37
+
38
+ @runtime_checkable
39
+ class SupportsGeturl(Protocol[AnyStr]):
40
+ def geturl(self) -> AnyStr: ...
41
+
42
+
43
+ def url_origin(url: str, /, default_port: int = 0) -> str:
44
+ if url.startswith("/"):
45
+ url = "http://localhost" + url
46
+ elif url.startswith("//"):
47
+ url = "http:" + url
48
+ elif url.startswith("://"):
49
+ url = "http" + url
50
+ elif not CRE_URL_SCHEME_match(url):
51
+ url = "http://" + url
52
+ urlp = urlsplit(url)
53
+ scheme, netloc = urlp.scheme or "http", urlp.netloc or "localhost"
54
+ if default_port and not urlp.port:
55
+ netloc = netloc.removesuffix(":") + f":{default_port}"
56
+ return f"{scheme}://{netloc}"
57
+
58
+
59
+ def complete_url(url: str, /, default_port: int = 0) -> str:
60
+ if url.startswith("/"):
61
+ url = "http://localhost" + url
62
+ elif url.startswith("//"):
63
+ url = "http:" + url
64
+ elif url.startswith("://"):
65
+ url = "http" + url
66
+ elif not CRE_URL_SCHEME_match(url):
67
+ url = "http://" + url
68
+ urlp = urlsplit(url)
69
+ repl = {"query": "", "fragment": ""}
70
+ if not urlp.scheme:
71
+ repl["scheme"] = "http"
72
+ netloc = urlp.netloc
73
+ if not netloc:
74
+ netloc = "localhost"
75
+ if default_port and not urlp.port:
76
+ netloc = netloc.removesuffix(":") + f":{default_port}"
77
+ repl["netloc"] = netloc
78
+ return urlunsplit(urlp._replace(**repl)).rstrip("/")
79
+
80
+
81
+ def cookies_str_to_dict(
82
+ cookies: str,
83
+ /,
84
+ kv_sep: str | Pattern[str] = re_compile(r"\s*=\s*"),
85
+ entry_sep: str | Pattern[str] = re_compile(r"\s*;\s*"),
86
+ ) -> dict[str, str]:
87
+ return text_to_dict(cookies.strip(), kv_sep, entry_sep)
88
+
89
+
90
+ def headers_str_to_dict(
91
+ headers: str,
92
+ /,
93
+ kv_sep: str | Pattern[str] = re_compile(r":\s+"),
94
+ entry_sep: str | Pattern[str] = re_compile("\n+"),
95
+ ) -> dict[str, str]:
96
+ return text_to_dict(headers.strip(), kv_sep, entry_sep)
97
+
98
+
99
+ def headers_str_to_dict_by_lines(headers: str, /, ) -> dict[str, str]:
100
+ lines = headers.strip().split("\n")
101
+ if len(lines) & 1:
102
+ lines.append("")
103
+ return dict(batched(lines, 2)) # type: ignore
104
+
105
+
106
+ def ensure_bytes(
107
+ o,
108
+ /,
109
+ encoding: str = "utf-8",
110
+ errors: str = "strict",
111
+ ) -> bytes:
112
+ if isinstance(o, bytes):
113
+ return o
114
+ elif isinstance(o, memoryview):
115
+ return o.tobytes()
116
+ elif isinstance(o, Buffer):
117
+ return bytes(o)
118
+ elif isinstance(o, int):
119
+ return int_to_bytes(o)
120
+ elif isinstance(o, (str, UserString)):
121
+ return o.encode(encoding, errors)
122
+ try:
123
+ return bytes(o)
124
+ except TypeError:
125
+ return bytes(str(o), encoding, errors)
126
+
127
+
128
+ def ensure_buffer(
129
+ o,
130
+ /,
131
+ encoding: str = "utf-8",
132
+ errors: str = "strict",
133
+ ) -> Buffer:
134
+ if isinstance(o, Buffer):
135
+ return o
136
+ elif isinstance(o, int):
137
+ return int_to_bytes(o)
138
+ elif isinstance(o, (str, UserString)):
139
+ return o.encode(encoding, errors)
140
+ try:
141
+ return bytes(o)
142
+ except TypeError:
143
+ return bytes(str(o), encoding, errors)
144
+
145
+
146
+ def encode_multipart_data(
147
+ data: None | Mapping[Buffer | str, Any] = None,
148
+ files: None | Mapping[Buffer | str, Any] = None,
149
+ boundary: None | str = None,
150
+ file_suffix: str = "",
151
+ ) -> tuple[dict, Iterator[Buffer]]:
152
+ if not boundary:
153
+ boundary = uuid4().hex
154
+ boundary_bytes = bytes(boundary, "ascii")
155
+ elif isinstance(boundary, str):
156
+ boundary_bytes = bytes(boundary, "latin-1")
157
+ else:
158
+ boundary_bytes = bytes(boundary)
159
+ boundary = str(boundary_bytes, "latin-1")
160
+ boundary_line = b"--%s\r\n" % boundary_bytes
161
+ suffix = ensure_bytes(file_suffix)
162
+ if suffix and not suffix.startswith(b"."):
163
+ suffix = b"." + suffix
164
+
165
+ def encode_item(name, value, /, is_file=False) -> Iterator[Buffer]:
166
+ headers = {b"content-disposition": b'form-data; name="%s"' % bytes(quote(name), "ascii")}
167
+ filename = b""
168
+ if isinstance(value, (list, tuple)):
169
+ match value:
170
+ case [value]:
171
+ pass
172
+ case [_, value]:
173
+ pass
174
+ case [_, value, file_type]:
175
+ if file_type:
176
+ headers[b"content-type"] = ensure_bytes(file_type)
177
+ case [_, value, file_type, file_headers, *rest]:
178
+ for k, v in iter_items(file_headers):
179
+ headers[ensure_bytes(k).lower()] = ensure_bytes(v)
180
+ if file_type:
181
+ headers[b"content-type"] = ensure_bytes(file_type)
182
+ if isinstance(value, (PathLike, SupportsRead)):
183
+ is_file = True
184
+ if isinstance(value, PathLike):
185
+ file: SupportsRead[Buffer] = open(value, "rb")
186
+ elif isinstance(value, TextIOWrapper):
187
+ file = value.buffer
188
+ else:
189
+ file = value
190
+ value = bio_chunk_iter(file)
191
+ if not filename:
192
+ filename = ensure_bytes(basename(getattr(file, "name", b"") or b""))
193
+ elif isinstance(value, Buffer):
194
+ pass
195
+ elif isinstance(value, (str, UserString)):
196
+ value = ensure_bytes(value)
197
+ elif isinstance(value, Iterable):
198
+ value = map(ensure_buffer, value)
199
+ else:
200
+ value = ensure_buffer(value)
201
+ if is_file:
202
+ if filename:
203
+ filename = bytes(quote(filename), "ascii")
204
+ if suffix and not filename.endswith(suffix):
205
+ filename += suffix
206
+ else:
207
+ filename = bytes(uuid4().hex, "ascii") + suffix
208
+ if b"content-type" not in headers:
209
+ headers[b"content-type"] = ensure_bytes(
210
+ guess_type(str(filename, "latin-1"))[0] or b"application/octet-stream")
211
+ headers[b"content-disposition"] += b'; filename="%s"' % filename
212
+ yield boundary_line
213
+ for entry in headers.items():
214
+ yield b"%s: %s\r\n" % entry
215
+ yield b"\r\n"
216
+ if isinstance(value, Buffer):
217
+ yield value
218
+ else:
219
+ yield from value
220
+
221
+ def encode_iter() -> Iterator[Buffer]:
222
+ if data:
223
+ for name, value in iter_items(data):
224
+ yield boundary_line
225
+ yield from encode_item(name, value)
226
+ yield b"\r\n"
227
+ if files:
228
+ for name, value in iter_items(files):
229
+ yield boundary_line
230
+ yield from encode_item(name, value, is_file=True)
231
+ yield b"\r\n"
232
+ yield b'--%s--\r\n' % boundary_bytes
233
+
234
+ return {"content-type": "multipart/form-data; boundary="+boundary}, encode_iter()
235
+
236
+
237
+ def encode_multipart_data_async(
238
+ data: None | Mapping[Buffer | str, Any] = None,
239
+ files: None | Mapping[Buffer | str, Any] = None,
240
+ boundary: None | str = None,
241
+ file_suffix: str = "",
242
+ ) -> tuple[dict, AsyncIterator[Buffer]]:
243
+ if not boundary:
244
+ boundary = uuid4().hex
245
+ boundary_bytes = bytes(boundary, "ascii")
246
+ elif isinstance(boundary, str):
247
+ boundary_bytes = bytes(boundary, "latin-1")
248
+ else:
249
+ boundary_bytes = bytes(boundary)
250
+ boundary = str(boundary_bytes, "latin-1")
251
+ boundary_line = b"--%s\r\n" % boundary_bytes
252
+ suffix = ensure_bytes(file_suffix)
253
+ if suffix and not suffix.startswith(b"."):
254
+ suffix = b"." + suffix
255
+
256
+ async def encode_item(name, value, /, is_file=False) -> AsyncIterator[Buffer]:
257
+ headers = {b"content-disposition": b'form-data; name="%s"' % bytes(quote(name), "ascii")}
258
+ filename = b""
259
+ if isinstance(value, (list, tuple)):
260
+ match value:
261
+ case [value]:
262
+ pass
263
+ case [_, value]:
264
+ pass
265
+ case [_, value, file_type]:
266
+ if file_type:
267
+ headers[b"content-type"] = ensure_bytes(file_type)
268
+ case [_, value, file_type, file_headers, *rest]:
269
+ for k, v in iter_items(file_headers):
270
+ headers[ensure_bytes(k).lower()] = ensure_bytes(v)
271
+ if file_type:
272
+ headers[b"content-type"] = ensure_bytes(file_type)
273
+ if isinstance(value, (PathLike, SupportsRead)):
274
+ is_file = True
275
+ if isinstance(value, PathLike):
276
+ file: SupportsRead[Buffer] = open(value, "rb")
277
+ elif isinstance(value, TextIOWrapper):
278
+ file = value.buffer
279
+ else:
280
+ file = value
281
+ value = bio_chunk_async_iter(file)
282
+ if not filename:
283
+ filename = ensure_bytes(basename(getattr(file, "name", b"") or b""))
284
+ elif isinstance(value, Buffer):
285
+ pass
286
+ elif isinstance(value, (str, UserString)):
287
+ value = ensure_bytes(value)
288
+ elif isinstance(value, Iterable):
289
+ value = async_map(ensure_buffer, value)
290
+ else:
291
+ value = ensure_buffer(value)
292
+ if is_file:
293
+ if filename:
294
+ filename = bytes(quote(filename), "ascii")
295
+ if suffix and not filename.endswith(suffix):
296
+ filename += suffix
297
+ else:
298
+ filename = bytes(uuid4().hex, "ascii") + suffix
299
+ if b"content-type" not in headers:
300
+ headers[b"content-type"] = ensure_bytes(
301
+ guess_type(str(filename, "latin-1"))[0] or b"application/octet-stream")
302
+ headers[b"content-disposition"] += b'; filename="%s"' % filename
303
+ yield boundary_line
304
+ for entry in headers.items():
305
+ yield b"%s: %s\r\n" % entry
306
+ yield b"\r\n"
307
+ if isinstance(value, Buffer):
308
+ yield value
309
+ elif isinstance(value, AsyncIterable):
310
+ async for line in value:
311
+ yield line
312
+ else:
313
+ for line in value:
314
+ yield line
315
+
316
+ async def encode_iter() -> AsyncIterator[Buffer]:
317
+ if data:
318
+ for name, value in iter_items(data):
319
+ yield boundary_line
320
+ async for line in encode_item(name, value):
321
+ yield line
322
+ yield b"\r\n"
323
+ if files:
324
+ for name, value in iter_items(files):
325
+ yield boundary_line
326
+ async for line in encode_item(name, value, is_file=True):
327
+ yield line
328
+ yield b"\r\n"
329
+ yield b'--%s--\r\n' % boundary_bytes
330
+
331
+ return {"content-type": "multipart/form-data; boundary="+boundary}, encode_iter()
332
+
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "python-http_request"
3
- version = "0.0.7.1"
3
+ version = "0.0.8"
4
4
  description = "Python http response utils."
5
5
  authors = ["ChenyangGao <wosiwujm@gmail.com>"]
6
6
  license = "MIT"
@@ -27,10 +27,11 @@ include = [
27
27
 
28
28
  [tool.poetry.dependencies]
29
29
  python = "^3.12"
30
- python-asynctools = ">=0.0.10"
31
- python-filewrap = ">=0.2.6"
32
- python-texttools = ">=0.0.3"
33
- integer_tool = ">=0.0.2"
30
+ python-asynctools = ">=0.1.3"
31
+ python-dicttools = ">=0.0.1"
32
+ python-filewrap = ">=0.2.8"
33
+ python-texttools = ">=0.0.4"
34
+ integer_tool = ">=0.0.5"
34
35
 
35
36
  [build-system]
36
37
  requires = ["poetry-core"]
@@ -1,273 +0,0 @@
1
- #!/usr/bin/env python3
2
- # encoding: utf-8
3
-
4
- __author__ = "ChenyangGao <https://chenyanggao.github.io>"
5
- __version__ = (0, 0, 7)
6
- __all__ = [
7
- "SupportsGeturl", "url_origin", "complete_url", "cookies_str_to_dict", "headers_str_to_dict",
8
- "encode_multipart_data", "encode_multipart_data_async",
9
- ]
10
-
11
- from collections.abc import AsyncIterable, AsyncIterator, Buffer, ItemsView, Iterable, Iterator, Mapping
12
- from itertools import chain
13
- from mimetypes import guess_type
14
- from os import fsdecode
15
- from os.path import basename
16
- from re import compile as re_compile, Pattern
17
- from typing import runtime_checkable, Any, Final, Protocol, TypeVar
18
- from urllib.parse import quote, urlsplit, urlunsplit
19
- from uuid import uuid4
20
-
21
- from asynctools import ensure_aiter, async_chain
22
- from filewrap import bio_chunk_iter, bio_chunk_async_iter, SupportsRead
23
- from integer_tool import int_to_bytes
24
- from texttools import text_to_dict
25
-
26
-
27
- AnyStr = TypeVar("AnyStr", bytes, str, covariant=True)
28
-
29
- CRE_URL_SCHEME_match: Final = re_compile(r"(?i:[a-z][a-z0-9.+-]*)://").match
30
-
31
-
32
- @runtime_checkable
33
- class SupportsGeturl(Protocol[AnyStr]):
34
- def geturl(self) -> AnyStr: ...
35
-
36
-
37
- def url_origin(url: str, /, default_port: int = 0) -> str:
38
- if url.startswith("/"):
39
- url = "http://localhost" + url
40
- elif url.startswith("//"):
41
- url = "http:" + url
42
- elif url.startswith("://"):
43
- url = "http" + url
44
- elif not CRE_URL_SCHEME_match(url):
45
- url = "http://" + url
46
- urlp = urlsplit(url)
47
- scheme, netloc = urlp.scheme or "http", urlp.netloc or "localhost"
48
- if default_port and not urlp.port:
49
- netloc = netloc.removesuffix(":") + f":{default_port}"
50
- return f"{scheme}://{netloc}"
51
-
52
-
53
- def complete_url(url: str, /, default_port: int = 0) -> str:
54
- if url.startswith("/"):
55
- url = "http://localhost" + url
56
- elif url.startswith("//"):
57
- url = "http:" + url
58
- elif url.startswith("://"):
59
- url = "http" + url
60
- elif not CRE_URL_SCHEME_match(url):
61
- url = "http://" + url
62
- urlp = urlsplit(url)
63
- repl = {"query": "", "fragment": ""}
64
- if not urlp.scheme:
65
- repl["scheme"] = "http"
66
- netloc = urlp.netloc
67
- if not netloc:
68
- netloc = "localhost"
69
- if default_port and not urlp.port:
70
- netloc = netloc.removesuffix(":") + f":{default_port}"
71
- repl["netloc"] = netloc
72
- return urlunsplit(urlp._replace(**repl)).rstrip("/")
73
-
74
-
75
- def cookies_str_to_dict(
76
- cookies: str,
77
- /,
78
- kv_sep: str | Pattern[str] = re_compile(r"\s*=\s*"),
79
- entry_sep: str | Pattern[str] = re_compile(r"\s*;\s*"),
80
- ) -> dict[str, str]:
81
- return text_to_dict(cookies.strip(), kv_sep, entry_sep)
82
-
83
-
84
- def headers_str_to_dict(
85
- headers: str,
86
- /,
87
- kv_sep: str | Pattern[str] = re_compile(r":\s+"),
88
- entry_sep: str | Pattern[str] = re_compile("\n+"),
89
- ) -> dict[str, str]:
90
- return text_to_dict(headers.strip(), kv_sep, entry_sep)
91
-
92
-
93
- def ensure_bytes(s, /) -> bytes:
94
- if isinstance(s, bytes):
95
- return s
96
- elif isinstance(s, memoryview):
97
- return s.tobytes()
98
- elif isinstance(s, Buffer):
99
- return bytes(s)
100
- if isinstance(s, int):
101
- return int_to_bytes(s)
102
- elif isinstance(s, str):
103
- return bytes(s, "utf-8")
104
- try:
105
- return bytes(s)
106
- except TypeError:
107
- return bytes(str(s), "utf-8")
108
-
109
-
110
- def encode_multipart_data(
111
- data: None | Mapping[str, Any] = None,
112
- files: None | Mapping[str, Buffer | SupportsRead[Buffer] | Iterable[Buffer]] = None,
113
- boundary: None | str = None,
114
- file_suffix: str = "",
115
- ) -> tuple[dict, Iterator[Buffer]]:
116
- if not boundary:
117
- boundary = uuid4().hex
118
- suffix = bytes(file_suffix, "ascii")
119
- if suffix and not suffix.startswith(b"."):
120
- suffix = b"." + suffix
121
- headers = {"Content-Type": f"multipart/form-data; boundary={boundary}"}
122
-
123
- def encode_data(data) -> Iterator[Buffer]:
124
- if not data:
125
- return
126
- if isinstance(data, Mapping):
127
- data = ItemsView(data)
128
- for name, value in data:
129
- yield boundary_line
130
- yield b'Content-Disposition: form-data; name="%s"\r\n\r\n' % bytes(quote(name), "ascii")
131
- yield ensure_bytes(value)
132
- yield b"\r\n"
133
-
134
- def encode_files(files) -> Iterator[Buffer]:
135
- if not files:
136
- return
137
- if isinstance(files, Mapping):
138
- files = ItemsView(files)
139
- for name, file in files:
140
- headers: dict[bytes, bytes] = {b"Content-Disposition": b'form-data; name="%s"' % quote(name).encode("ascii")}
141
- filename: bytes | str = ""
142
- if isinstance(file, (list, tuple)):
143
- match file:
144
- case [file]:
145
- pass
146
- case [file_name, file]:
147
- pass
148
- case [file_name, file, file_type]:
149
- if file_type:
150
- headers[b"Content-Type"] = ensure_bytes(file_type)
151
- case [file_name, file, file_type, file_headers, *rest]:
152
- if isinstance(file_headers, Mapping):
153
- file_headers = ItemsView(file_headers)
154
- for k, v in file_headers:
155
- headers[ensure_bytes(k).title()] = ensure_bytes(v)
156
- if file_type:
157
- headers[b"Content-Type"] = ensure_bytes(file_type)
158
- if isinstance(file, Buffer):
159
- pass
160
- elif isinstance(file, str):
161
- file = file.encode("utf-8")
162
- elif hasattr(file, "read"):
163
- file = bio_chunk_iter(file)
164
- if not filename:
165
- path = getattr(file, "name", None)
166
- if path:
167
- filename = basename(path)
168
- if b"Content-Type" not in headers:
169
- headers[b"Content-Type"] = ensure_bytes(guess_type(fsdecode(filename))[0] or b"application/octet-stream")
170
- if filename:
171
- name = bytes(quote(filename), "ascii")
172
- if not name.endswith(suffix):
173
- name += suffix
174
- headers[b"Content-Disposition"] += b'; filename="%s"' % name
175
- else:
176
- headers[b"Content-Disposition"] += b'; filename="%032x%s"' % (uuid4().int, suffix)
177
- yield boundary_line
178
- for entry in headers.items():
179
- yield b"%s: %s\r\n" % entry
180
- yield b"\r\n"
181
- if isinstance(file, Buffer):
182
- yield file
183
- else:
184
- yield from file
185
- yield b"\r\n"
186
-
187
- boundary_line = b"--%s\r\n" % boundary.encode("utf-8")
188
- return headers, chain(encode_data(data), encode_files(files), (b'--%s--\r\n' % boundary.encode("ascii"),))
189
-
190
-
191
- def encode_multipart_data_async(
192
- data: None | Mapping[str, Any] = None,
193
- files: None | Mapping[str, Buffer | SupportsRead[Buffer] | Iterable[Buffer] | AsyncIterable[Buffer]] = None,
194
- boundary: None | str = None,
195
- file_suffix: str = "",
196
- ) -> tuple[dict, AsyncIterator[Buffer]]:
197
- if not boundary:
198
- boundary = uuid4().hex
199
- suffix = bytes(file_suffix, "ascii")
200
- if suffix and not suffix.startswith(b"."):
201
- suffix = b"." + suffix
202
- headers = {"Content-Type": f"multipart/form-data; boundary={boundary}"}
203
-
204
- async def encode_data(data) -> AsyncIterator[Buffer]:
205
- if not data:
206
- return
207
- if isinstance(data, Mapping):
208
- data = ItemsView(data)
209
- for name, value in data:
210
- yield boundary_line
211
- yield b'Content-Disposition: form-data; name="%s"\r\n\r\n' % bytes(quote(name), "ascii")
212
- yield ensure_bytes(value)
213
- yield b"\r\n"
214
-
215
- async def encode_files(files) -> AsyncIterator[Buffer]:
216
- if not files:
217
- return
218
- if isinstance(files, Mapping):
219
- files = ItemsView(files)
220
- for name, file in files:
221
- headers: dict[bytes, bytes] = {b"Content-Disposition": b'form-data; name="%s"' % quote(name).encode("ascii")}
222
- filename: bytes | str = ""
223
- if isinstance(file, (list, tuple)):
224
- match file:
225
- case [file]:
226
- pass
227
- case [file_name, file]:
228
- pass
229
- case [file_name, file, file_type]:
230
- if file_type:
231
- headers[b"Content-Type"] = ensure_bytes(file_type)
232
- case [file_name, file, file_type, file_headers, *rest]:
233
- if isinstance(file_headers, Mapping):
234
- file_headers = ItemsView(file_headers)
235
- for k, v in file_headers:
236
- headers[ensure_bytes(k).title()] = ensure_bytes(v)
237
- if file_type:
238
- headers[b"Content-Type"] = ensure_bytes(file_type)
239
- if isinstance(file, Buffer):
240
- pass
241
- elif isinstance(file, str):
242
- file = file.encode("utf-8")
243
- elif hasattr(file, "read"):
244
- file = bio_chunk_async_iter(file)
245
- if not filename:
246
- path = getattr(file, "name", None)
247
- if path:
248
- filename = basename(path)
249
- if b"Content-Type" not in headers:
250
- headers[b"Content-Type"] = ensure_bytes(guess_type(fsdecode(filename))[0] or b"application/octet-stream")
251
- else:
252
- file = ensure_aiter(file)
253
- if filename:
254
- name = bytes(quote(filename), "ascii")
255
- if not name.endswith(suffix):
256
- name += suffix
257
- headers[b"Content-Disposition"] += b'; filename="%s"' % name
258
- else:
259
- headers[b"Content-Disposition"] += b'; filename="%032x%s"' % (uuid4().int, suffix)
260
- yield boundary_line
261
- for entry in headers.items():
262
- yield b"%s: %s\r\n" % entry
263
- yield b"\r\n"
264
- if isinstance(file, Buffer):
265
- yield file
266
- else:
267
- async for chunk in file:
268
- yield chunk
269
- yield b"\r\n"
270
-
271
- boundary_line = b"--%s\r\n" % boundary.encode("utf-8")
272
- return headers, async_chain(encode_data(data), encode_files(files), (b'--%s--\r\n' % boundary.encode("ascii"),))
273
-