python-http_request 0.0.2__tar.gz → 0.0.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {python_http_request-0.0.2 → python_http_request-0.0.4}/PKG-INFO +1 -1
- {python_http_request-0.0.2 → python_http_request-0.0.4}/http_request/__init__.py +54 -4
- {python_http_request-0.0.2 → python_http_request-0.0.4}/pyproject.toml +1 -1
- {python_http_request-0.0.2 → python_http_request-0.0.4}/LICENSE +0 -0
- {python_http_request-0.0.2 → python_http_request-0.0.4}/http_request/py.typed +0 -0
- {python_http_request-0.0.2 → python_http_request-0.0.4}/readme.md +0 -0
|
@@ -2,27 +2,77 @@
|
|
|
2
2
|
# encoding: utf-8
|
|
3
3
|
|
|
4
4
|
__author__ = "ChenyangGao <https://chenyanggao.github.io>"
|
|
5
|
-
__version__ = (0, 0,
|
|
6
|
-
__all__ = [
|
|
5
|
+
__version__ = (0, 0, 4)
|
|
6
|
+
__all__ = [
|
|
7
|
+
"SupportsGeturl", "url_origin", "complete_url", "cookies_str_to_dict", "headers_str_to_dict",
|
|
8
|
+
"encode_multipart_data", "encode_multipart_data_async",
|
|
9
|
+
]
|
|
7
10
|
|
|
8
11
|
from itertools import chain
|
|
9
12
|
from collections.abc import AsyncIterable, AsyncIterator, ItemsView, Iterable, Iterator, Mapping
|
|
10
|
-
from
|
|
11
|
-
from
|
|
13
|
+
from re import compile as re_compile, Pattern
|
|
14
|
+
from typing import runtime_checkable, Any, Final, Protocol, TypeVar
|
|
15
|
+
from urllib.parse import quote, urlsplit, urlunsplit
|
|
12
16
|
from uuid import uuid4
|
|
13
17
|
|
|
14
18
|
from asynctools import ensure_aiter, async_chain
|
|
15
19
|
from filewrap import bio_chunk_iter, bio_chunk_async_iter, SupportsRead
|
|
16
20
|
from integer_tool import int_to_bytes
|
|
21
|
+
from texttools import text_to_dict
|
|
17
22
|
|
|
18
23
|
|
|
19
24
|
AnyStr = TypeVar("AnyStr", bytes, str, covariant=True)
|
|
20
25
|
|
|
26
|
+
CRE_URL_SCHEME: Final = re_compile(r"^(?i:[a-z][a-z0-9.+-]*)://")
|
|
21
27
|
|
|
28
|
+
|
|
29
|
+
@runtime_checkable
|
|
22
30
|
class SupportsGeturl(Protocol[AnyStr]):
|
|
23
31
|
def geturl(self) -> AnyStr: ...
|
|
24
32
|
|
|
25
33
|
|
|
34
|
+
def url_origin(url: str, /) -> str:
|
|
35
|
+
if url.startswith("://"):
|
|
36
|
+
url = "http" + url
|
|
37
|
+
elif CRE_URL_SCHEME.match(url) is None:
|
|
38
|
+
url = "http://" + url
|
|
39
|
+
urlp = urlsplit(url)
|
|
40
|
+
scheme, netloc = urlp.scheme, urlp.netloc
|
|
41
|
+
if not netloc:
|
|
42
|
+
netloc = "localhost"
|
|
43
|
+
return f"{scheme}://{netloc}"
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def complete_url(url: str, /) -> str:
|
|
47
|
+
if url.startswith("://"):
|
|
48
|
+
url = "http" + url
|
|
49
|
+
elif CRE_URL_SCHEME.match(url) is None:
|
|
50
|
+
url = "http://" + url
|
|
51
|
+
urlp = urlsplit(url)
|
|
52
|
+
repl = {"query": "", "fragment": ""}
|
|
53
|
+
if not urlp.netloc:
|
|
54
|
+
repl["path"] = "localhost"
|
|
55
|
+
return urlunsplit(urlp._replace(**repl)).rstrip("/")
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def cookies_str_to_dict(
|
|
59
|
+
cookies: str,
|
|
60
|
+
/,
|
|
61
|
+
kv_sep: str | Pattern[str] = re_compile(r"\s*=\s*"),
|
|
62
|
+
entry_sep: str | Pattern[str] = re_compile(r"\s*;\s*"),
|
|
63
|
+
) -> dict[str, str]:
|
|
64
|
+
return text_to_dict(cookies.strip(), kv_sep, entry_sep)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def headers_str_to_dict(
|
|
68
|
+
headers: str,
|
|
69
|
+
/,
|
|
70
|
+
kv_sep: str | Pattern[str] = re_compile(r":\s+"),
|
|
71
|
+
entry_sep: str | Pattern[str] = re_compile("\n+"),
|
|
72
|
+
) -> dict[str, str]:
|
|
73
|
+
return text_to_dict(headers.strip(), kv_sep, entry_sep)
|
|
74
|
+
|
|
75
|
+
|
|
26
76
|
def ensure_bytes(s, /) -> bytes | bytearray | memoryview:
|
|
27
77
|
if isinstance(s, (bytes, bytearray, memoryview)):
|
|
28
78
|
return s
|
|
File without changes
|
|
File without changes
|
|
File without changes
|