catocli 1.0.4__py3-none-any.whl → 1.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of catocli might be problematic. Click here for more details.

Files changed (92) hide show
  1. build/lib/catocli/Utils/clidriver.py +103 -0
  2. build/lib/catocli/__init__.py +2 -0
  3. build/lib/catocli/__main__.py +12 -0
  4. build/lib/catocli/parsers/custom/__init__.py +42 -0
  5. build/lib/catocli/parsers/custom/customLib.py +71 -0
  6. build/lib/catocli/parsers/mutation_admin/__init__.py +51 -0
  7. build/lib/catocli/parsers/mutation_container/__init__.py +23 -0
  8. build/lib/catocli/parsers/mutation_policy/__init__.py +357 -0
  9. build/lib/catocli/parsers/mutation_site/__init__.py +219 -0
  10. build/lib/catocli/parsers/mutation_sites/__init__.py +219 -0
  11. build/lib/catocli/parsers/parserApiClient.py +312 -0
  12. build/lib/catocli/parsers/query_accountBySubdomain/__init__.py +17 -0
  13. build/lib/catocli/parsers/query_accountMetrics/__init__.py +17 -0
  14. build/lib/catocli/parsers/query_accountRoles/__init__.py +17 -0
  15. build/lib/catocli/parsers/query_accountSnapshot/__init__.py +17 -0
  16. build/lib/catocli/parsers/query_admin/__init__.py +17 -0
  17. build/lib/catocli/parsers/query_admins/__init__.py +17 -0
  18. build/lib/catocli/parsers/query_appStats/__init__.py +17 -0
  19. build/lib/catocli/parsers/query_appStatsTimeSeries/__init__.py +17 -0
  20. build/lib/catocli/parsers/query_auditFeed/__init__.py +17 -0
  21. build/lib/catocli/parsers/query_container/__init__.py +17 -0
  22. build/lib/catocli/parsers/query_entityLookup/__init__.py +17 -0
  23. build/lib/catocli/parsers/query_events/__init__.py +17 -0
  24. build/lib/catocli/parsers/query_eventsFeed/__init__.py +17 -0
  25. build/lib/catocli/parsers/query_eventsTimeSeries/__init__.py +17 -0
  26. build/lib/catocli/parsers/query_hardwareManagement/__init__.py +17 -0
  27. build/lib/catocli/parsers/query_licensing/__init__.py +17 -0
  28. build/lib/catocli/parsers/query_policy/__init__.py +17 -0
  29. build/lib/catocli/parsers/query_siteLocation/__init__.py +17 -0
  30. build/lib/catocli/parsers/query_subDomains/__init__.py +17 -0
  31. build/lib/catocli/parsers/query_xdr/__init__.py +37 -0
  32. build/lib/catocli/parsers/raw/__init__.py +9 -0
  33. build/lib/graphql_client/__init__.py +11 -0
  34. build/lib/graphql_client/api/__init__.py +3 -0
  35. build/lib/graphql_client/api/call_api.py +73 -0
  36. build/lib/graphql_client/api_client.py +192 -0
  37. build/lib/graphql_client/api_client_types.py +404 -0
  38. build/lib/graphql_client/configuration.py +230 -0
  39. build/lib/graphql_client/models/__init__.py +13 -0
  40. build/lib/graphql_client/models/no_schema.py +71 -0
  41. build/lib/schema/catolib.py +1002 -0
  42. build/lib/schema/importSchema.py +60 -0
  43. build/lib/vendor/certifi/__init__.py +4 -0
  44. build/lib/vendor/certifi/__main__.py +12 -0
  45. build/lib/vendor/certifi/core.py +114 -0
  46. build/lib/vendor/certifi/py.typed +0 -0
  47. build/lib/vendor/six.py +998 -0
  48. build/lib/vendor/urllib3/__init__.py +211 -0
  49. build/lib/vendor/urllib3/_base_connection.py +172 -0
  50. build/lib/vendor/urllib3/_collections.py +483 -0
  51. build/lib/vendor/urllib3/_request_methods.py +278 -0
  52. build/lib/vendor/urllib3/_version.py +16 -0
  53. build/lib/vendor/urllib3/connection.py +1033 -0
  54. build/lib/vendor/urllib3/connectionpool.py +1182 -0
  55. build/lib/vendor/urllib3/contrib/__init__.py +0 -0
  56. build/lib/vendor/urllib3/contrib/emscripten/__init__.py +18 -0
  57. build/lib/vendor/urllib3/contrib/emscripten/connection.py +254 -0
  58. build/lib/vendor/urllib3/contrib/emscripten/fetch.py +418 -0
  59. build/lib/vendor/urllib3/contrib/emscripten/request.py +22 -0
  60. build/lib/vendor/urllib3/contrib/emscripten/response.py +285 -0
  61. build/lib/vendor/urllib3/contrib/pyopenssl.py +552 -0
  62. build/lib/vendor/urllib3/contrib/socks.py +228 -0
  63. build/lib/vendor/urllib3/exceptions.py +321 -0
  64. build/lib/vendor/urllib3/fields.py +341 -0
  65. build/lib/vendor/urllib3/filepost.py +89 -0
  66. build/lib/vendor/urllib3/http2/__init__.py +53 -0
  67. build/lib/vendor/urllib3/http2/connection.py +356 -0
  68. build/lib/vendor/urllib3/http2/probe.py +87 -0
  69. build/lib/vendor/urllib3/poolmanager.py +637 -0
  70. build/lib/vendor/urllib3/py.typed +2 -0
  71. build/lib/vendor/urllib3/response.py +1265 -0
  72. build/lib/vendor/urllib3/util/__init__.py +42 -0
  73. build/lib/vendor/urllib3/util/connection.py +137 -0
  74. build/lib/vendor/urllib3/util/proxy.py +43 -0
  75. build/lib/vendor/urllib3/util/request.py +256 -0
  76. build/lib/vendor/urllib3/util/response.py +101 -0
  77. build/lib/vendor/urllib3/util/retry.py +533 -0
  78. build/lib/vendor/urllib3/util/ssl_.py +513 -0
  79. build/lib/vendor/urllib3/util/ssl_match_hostname.py +159 -0
  80. build/lib/vendor/urllib3/util/ssltransport.py +276 -0
  81. build/lib/vendor/urllib3/util/timeout.py +275 -0
  82. build/lib/vendor/urllib3/util/url.py +471 -0
  83. build/lib/vendor/urllib3/util/util.py +42 -0
  84. build/lib/vendor/urllib3/util/wait.py +124 -0
  85. catocli/__init__.py +1 -1
  86. catocli/parsers/parserApiClient.py +7 -4
  87. {catocli-1.0.4.dist-info → catocli-1.0.5.dist-info}/METADATA +1 -1
  88. {catocli-1.0.4.dist-info → catocli-1.0.5.dist-info}/RECORD +92 -8
  89. {catocli-1.0.4.dist-info → catocli-1.0.5.dist-info}/top_level.txt +1 -0
  90. {catocli-1.0.4.dist-info → catocli-1.0.5.dist-info}/LICENSE +0 -0
  91. {catocli-1.0.4.dist-info → catocli-1.0.5.dist-info}/WHEEL +0 -0
  92. {catocli-1.0.4.dist-info → catocli-1.0.5.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,42 @@
1
+ # For backwards compatibility, provide imports that used to be here.
2
+ from __future__ import annotations
3
+
4
+ from .connection import is_connection_dropped
5
+ from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
6
+ from .response import is_fp_closed
7
+ from .retry import Retry
8
+ from .ssl_ import (
9
+ ALPN_PROTOCOLS,
10
+ IS_PYOPENSSL,
11
+ SSLContext,
12
+ assert_fingerprint,
13
+ create_urllib3_context,
14
+ resolve_cert_reqs,
15
+ resolve_ssl_version,
16
+ ssl_wrap_socket,
17
+ )
18
+ from .timeout import Timeout
19
+ from .url import Url, parse_url
20
+ from .wait import wait_for_read, wait_for_write
21
+
22
+ __all__ = (
23
+ "IS_PYOPENSSL",
24
+ "SSLContext",
25
+ "ALPN_PROTOCOLS",
26
+ "Retry",
27
+ "Timeout",
28
+ "Url",
29
+ "assert_fingerprint",
30
+ "create_urllib3_context",
31
+ "is_connection_dropped",
32
+ "is_fp_closed",
33
+ "parse_url",
34
+ "make_headers",
35
+ "resolve_cert_reqs",
36
+ "resolve_ssl_version",
37
+ "ssl_wrap_socket",
38
+ "wait_for_read",
39
+ "wait_for_write",
40
+ "SKIP_HEADER",
41
+ "SKIPPABLE_HEADERS",
42
+ )
@@ -0,0 +1,137 @@
1
+ from __future__ import annotations
2
+
3
+ import socket
4
+ import typing
5
+
6
+ from ..exceptions import LocationParseError
7
+ from .timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT
8
+
9
+ _TYPE_SOCKET_OPTIONS = typing.List[typing.Tuple[int, int, typing.Union[int, bytes]]]
10
+
11
+ if typing.TYPE_CHECKING:
12
+ from .._base_connection import BaseHTTPConnection
13
+
14
+
15
+ def is_connection_dropped(conn: BaseHTTPConnection) -> bool: # Platform-specific
16
+ """
17
+ Returns True if the connection is dropped and should be closed.
18
+ :param conn: :class:`urllib3.connection.HTTPConnection` object.
19
+ """
20
+ return not conn.is_connected
21
+
22
+
23
+ # This function is copied from socket.py in the Python 2.7 standard
24
+ # library test suite. Added to its signature is only `socket_options`.
25
+ # One additional modification is that we avoid binding to IPv6 servers
26
+ # discovered in DNS if the system doesn't have IPv6 functionality.
27
+ def create_connection(
28
+ address: tuple[str, int],
29
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
30
+ source_address: tuple[str, int] | None = None,
31
+ socket_options: _TYPE_SOCKET_OPTIONS | None = None,
32
+ ) -> socket.socket:
33
+ """Connect to *address* and return the socket object.
34
+
35
+ Convenience function. Connect to *address* (a 2-tuple ``(host,
36
+ port)``) and return the socket object. Passing the optional
37
+ *timeout* parameter will set the timeout on the socket instance
38
+ before attempting to connect. If no *timeout* is supplied, the
39
+ global default timeout setting returned by :func:`socket.getdefaulttimeout`
40
+ is used. If *source_address* is set it must be a tuple of (host, port)
41
+ for the socket to bind as a source address before making the connection.
42
+ An host of '' or port 0 tells the OS to use the default.
43
+ """
44
+
45
+ host, port = address
46
+ if host.startswith("["):
47
+ host = host.strip("[]")
48
+ err = None
49
+
50
+ # Using the value from allowed_gai_family() in the context of getaddrinfo lets
51
+ # us select whether to work with IPv4 DNS records, IPv6 records, or both.
52
+ # The original create_connection function always returns all records.
53
+ family = allowed_gai_family()
54
+
55
+ try:
56
+ host.encode("idna")
57
+ except UnicodeError:
58
+ raise LocationParseError(f"'{host}', label empty or too long") from None
59
+
60
+ for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
61
+ af, socktype, proto, canonname, sa = res
62
+ sock = None
63
+ try:
64
+ sock = socket.socket(af, socktype, proto)
65
+
66
+ # If provided, set socket level options before connecting.
67
+ _set_socket_options(sock, socket_options)
68
+
69
+ if timeout is not _DEFAULT_TIMEOUT:
70
+ sock.settimeout(timeout)
71
+ if source_address:
72
+ sock.bind(source_address)
73
+ sock.connect(sa)
74
+ # Break explicitly a reference cycle
75
+ err = None
76
+ return sock
77
+
78
+ except OSError as _:
79
+ err = _
80
+ if sock is not None:
81
+ sock.close()
82
+
83
+ if err is not None:
84
+ try:
85
+ raise err
86
+ finally:
87
+ # Break explicitly a reference cycle
88
+ err = None
89
+ else:
90
+ raise OSError("getaddrinfo returns an empty list")
91
+
92
+
93
+ def _set_socket_options(
94
+ sock: socket.socket, options: _TYPE_SOCKET_OPTIONS | None
95
+ ) -> None:
96
+ if options is None:
97
+ return
98
+
99
+ for opt in options:
100
+ sock.setsockopt(*opt)
101
+
102
+
103
+ def allowed_gai_family() -> socket.AddressFamily:
104
+ """This function is designed to work in the context of
105
+ getaddrinfo, where family=socket.AF_UNSPEC is the default and
106
+ will perform a DNS search for both IPv6 and IPv4 records."""
107
+
108
+ family = socket.AF_INET
109
+ if HAS_IPV6:
110
+ family = socket.AF_UNSPEC
111
+ return family
112
+
113
+
114
+ def _has_ipv6(host: str) -> bool:
115
+ """Returns True if the system can bind an IPv6 address."""
116
+ sock = None
117
+ has_ipv6 = False
118
+
119
+ if socket.has_ipv6:
120
+ # has_ipv6 returns true if cPython was compiled with IPv6 support.
121
+ # It does not tell us if the system has IPv6 support enabled. To
122
+ # determine that we must bind to an IPv6 address.
123
+ # https://github.com/urllib3/urllib3/pull/611
124
+ # https://bugs.python.org/issue658327
125
+ try:
126
+ sock = socket.socket(socket.AF_INET6)
127
+ sock.bind((host, 0))
128
+ has_ipv6 = True
129
+ except Exception:
130
+ pass
131
+
132
+ if sock:
133
+ sock.close()
134
+ return has_ipv6
135
+
136
+
137
+ HAS_IPV6 = _has_ipv6("::1")
@@ -0,0 +1,43 @@
1
+ from __future__ import annotations
2
+
3
+ import typing
4
+
5
+ from .url import Url
6
+
7
+ if typing.TYPE_CHECKING:
8
+ from ..connection import ProxyConfig
9
+
10
+
11
+ def connection_requires_http_tunnel(
12
+ proxy_url: Url | None = None,
13
+ proxy_config: ProxyConfig | None = None,
14
+ destination_scheme: str | None = None,
15
+ ) -> bool:
16
+ """
17
+ Returns True if the connection requires an HTTP CONNECT through the proxy.
18
+
19
+ :param URL proxy_url:
20
+ URL of the proxy.
21
+ :param ProxyConfig proxy_config:
22
+ Proxy configuration from poolmanager.py
23
+ :param str destination_scheme:
24
+ The scheme of the destination. (i.e https, http, etc)
25
+ """
26
+ # If we're not using a proxy, no way to use a tunnel.
27
+ if proxy_url is None:
28
+ return False
29
+
30
+ # HTTP destinations never require tunneling, we always forward.
31
+ if destination_scheme == "http":
32
+ return False
33
+
34
+ # Support for forwarding with HTTPS proxies and HTTPS destinations.
35
+ if (
36
+ proxy_url.scheme == "https"
37
+ and proxy_config
38
+ and proxy_config.use_forwarding_for_https
39
+ ):
40
+ return False
41
+
42
+ # Otherwise always use a tunnel.
43
+ return True
@@ -0,0 +1,256 @@
1
+ from __future__ import annotations
2
+
3
+ import io
4
+ import typing
5
+ from base64 import b64encode
6
+ from enum import Enum
7
+
8
+ from ..exceptions import UnrewindableBodyError
9
+ from .util import to_bytes
10
+
11
+ if typing.TYPE_CHECKING:
12
+ from typing import Final
13
+
14
+ # Pass as a value within ``headers`` to skip
15
+ # emitting some HTTP headers that are added automatically.
16
+ # The only headers that are supported are ``Accept-Encoding``,
17
+ # ``Host``, and ``User-Agent``.
18
+ SKIP_HEADER = "@@@SKIP_HEADER@@@"
19
+ SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
20
+
21
+ ACCEPT_ENCODING = "gzip,deflate"
22
+ try:
23
+ try:
24
+ import brotlicffi as _unused_module_brotli # type: ignore[import-not-found] # noqa: F401
25
+ except ImportError:
26
+ import brotli as _unused_module_brotli # type: ignore[import-not-found] # noqa: F401
27
+ except ImportError:
28
+ pass
29
+ else:
30
+ ACCEPT_ENCODING += ",br"
31
+ try:
32
+ import zstandard as _unused_module_zstd # noqa: F401
33
+ except ImportError:
34
+ pass
35
+ else:
36
+ ACCEPT_ENCODING += ",zstd"
37
+
38
+
39
+ class _TYPE_FAILEDTELL(Enum):
40
+ token = 0
41
+
42
+
43
+ _FAILEDTELL: Final[_TYPE_FAILEDTELL] = _TYPE_FAILEDTELL.token
44
+
45
+ _TYPE_BODY_POSITION = typing.Union[int, _TYPE_FAILEDTELL]
46
+
47
+ # When sending a request with these methods we aren't expecting
48
+ # a body so don't need to set an explicit 'Content-Length: 0'
49
+ # The reason we do this in the negative instead of tracking methods
50
+ # which 'should' have a body is because unknown methods should be
51
+ # treated as if they were 'POST' which *does* expect a body.
52
+ _METHODS_NOT_EXPECTING_BODY = {"GET", "HEAD", "DELETE", "TRACE", "OPTIONS", "CONNECT"}
53
+
54
+
55
+ def make_headers(
56
+ keep_alive: bool | None = None,
57
+ accept_encoding: bool | list[str] | str | None = None,
58
+ user_agent: str | None = None,
59
+ basic_auth: str | None = None,
60
+ proxy_basic_auth: str | None = None,
61
+ disable_cache: bool | None = None,
62
+ ) -> dict[str, str]:
63
+ """
64
+ Shortcuts for generating request headers.
65
+
66
+ :param keep_alive:
67
+ If ``True``, adds 'connection: keep-alive' header.
68
+
69
+ :param accept_encoding:
70
+ Can be a boolean, list, or string.
71
+ ``True`` translates to 'gzip,deflate'. If either the ``brotli`` or
72
+ ``brotlicffi`` package is installed 'gzip,deflate,br' is used instead.
73
+ List will get joined by comma.
74
+ String will be used as provided.
75
+
76
+ :param user_agent:
77
+ String representing the user-agent you want, such as
78
+ "python-urllib3/0.6"
79
+
80
+ :param basic_auth:
81
+ Colon-separated username:password string for 'authorization: basic ...'
82
+ auth header.
83
+
84
+ :param proxy_basic_auth:
85
+ Colon-separated username:password string for 'proxy-authorization: basic ...'
86
+ auth header.
87
+
88
+ :param disable_cache:
89
+ If ``True``, adds 'cache-control: no-cache' header.
90
+
91
+ Example:
92
+
93
+ .. code-block:: python
94
+
95
+ import urllib3
96
+
97
+ print(urllib3.util.make_headers(keep_alive=True, user_agent="Batman/1.0"))
98
+ # {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
99
+ print(urllib3.util.make_headers(accept_encoding=True))
100
+ # {'accept-encoding': 'gzip,deflate'}
101
+ """
102
+ headers: dict[str, str] = {}
103
+ if accept_encoding:
104
+ if isinstance(accept_encoding, str):
105
+ pass
106
+ elif isinstance(accept_encoding, list):
107
+ accept_encoding = ",".join(accept_encoding)
108
+ else:
109
+ accept_encoding = ACCEPT_ENCODING
110
+ headers["accept-encoding"] = accept_encoding
111
+
112
+ if user_agent:
113
+ headers["user-agent"] = user_agent
114
+
115
+ if keep_alive:
116
+ headers["connection"] = "keep-alive"
117
+
118
+ if basic_auth:
119
+ headers[
120
+ "authorization"
121
+ ] = f"Basic {b64encode(basic_auth.encode('latin-1')).decode()}"
122
+
123
+ if proxy_basic_auth:
124
+ headers[
125
+ "proxy-authorization"
126
+ ] = f"Basic {b64encode(proxy_basic_auth.encode('latin-1')).decode()}"
127
+
128
+ if disable_cache:
129
+ headers["cache-control"] = "no-cache"
130
+
131
+ return headers
132
+
133
+
134
+ def set_file_position(
135
+ body: typing.Any, pos: _TYPE_BODY_POSITION | None
136
+ ) -> _TYPE_BODY_POSITION | None:
137
+ """
138
+ If a position is provided, move file to that point.
139
+ Otherwise, we'll attempt to record a position for future use.
140
+ """
141
+ if pos is not None:
142
+ rewind_body(body, pos)
143
+ elif getattr(body, "tell", None) is not None:
144
+ try:
145
+ pos = body.tell()
146
+ except OSError:
147
+ # This differentiates from None, allowing us to catch
148
+ # a failed `tell()` later when trying to rewind the body.
149
+ pos = _FAILEDTELL
150
+
151
+ return pos
152
+
153
+
154
+ def rewind_body(body: typing.IO[typing.AnyStr], body_pos: _TYPE_BODY_POSITION) -> None:
155
+ """
156
+ Attempt to rewind body to a certain position.
157
+ Primarily used for request redirects and retries.
158
+
159
+ :param body:
160
+ File-like object that supports seek.
161
+
162
+ :param int pos:
163
+ Position to seek to in file.
164
+ """
165
+ body_seek = getattr(body, "seek", None)
166
+ if body_seek is not None and isinstance(body_pos, int):
167
+ try:
168
+ body_seek(body_pos)
169
+ except OSError as e:
170
+ raise UnrewindableBodyError(
171
+ "An error occurred when rewinding request body for redirect/retry."
172
+ ) from e
173
+ elif body_pos is _FAILEDTELL:
174
+ raise UnrewindableBodyError(
175
+ "Unable to record file position for rewinding "
176
+ "request body during a redirect/retry."
177
+ )
178
+ else:
179
+ raise ValueError(
180
+ f"body_pos must be of type integer, instead it was {type(body_pos)}."
181
+ )
182
+
183
+
184
+ class ChunksAndContentLength(typing.NamedTuple):
185
+ chunks: typing.Iterable[bytes] | None
186
+ content_length: int | None
187
+
188
+
189
+ def body_to_chunks(
190
+ body: typing.Any | None, method: str, blocksize: int
191
+ ) -> ChunksAndContentLength:
192
+ """Takes the HTTP request method, body, and blocksize and
193
+ transforms them into an iterable of chunks to pass to
194
+ socket.sendall() and an optional 'Content-Length' header.
195
+
196
+ A 'Content-Length' of 'None' indicates the length of the body
197
+ can't be determined so should use 'Transfer-Encoding: chunked'
198
+ for framing instead.
199
+ """
200
+
201
+ chunks: typing.Iterable[bytes] | None
202
+ content_length: int | None
203
+
204
+ # No body, we need to make a recommendation on 'Content-Length'
205
+ # based on whether that request method is expected to have
206
+ # a body or not.
207
+ if body is None:
208
+ chunks = None
209
+ if method.upper() not in _METHODS_NOT_EXPECTING_BODY:
210
+ content_length = 0
211
+ else:
212
+ content_length = None
213
+
214
+ # Bytes or strings become bytes
215
+ elif isinstance(body, (str, bytes)):
216
+ chunks = (to_bytes(body),)
217
+ content_length = len(chunks[0])
218
+
219
+ # File-like object, TODO: use seek() and tell() for length?
220
+ elif hasattr(body, "read"):
221
+
222
+ def chunk_readable() -> typing.Iterable[bytes]:
223
+ nonlocal body, blocksize
224
+ encode = isinstance(body, io.TextIOBase)
225
+ while True:
226
+ datablock = body.read(blocksize)
227
+ if not datablock:
228
+ break
229
+ if encode:
230
+ datablock = datablock.encode("utf-8")
231
+ yield datablock
232
+
233
+ chunks = chunk_readable()
234
+ content_length = None
235
+
236
+ # Otherwise we need to start checking via duck-typing.
237
+ else:
238
+ try:
239
+ # Check if the body implements the buffer API.
240
+ mv = memoryview(body)
241
+ except TypeError:
242
+ try:
243
+ # Check if the body is an iterable
244
+ chunks = iter(body)
245
+ content_length = None
246
+ except TypeError:
247
+ raise TypeError(
248
+ f"'body' must be a bytes-like object, file-like "
249
+ f"object, or iterable. Instead was {body!r}"
250
+ ) from None
251
+ else:
252
+ # Since it implements the buffer API can be passed directly to socket.sendall()
253
+ chunks = (body,)
254
+ content_length = mv.nbytes
255
+
256
+ return ChunksAndContentLength(chunks=chunks, content_length=content_length)
@@ -0,0 +1,101 @@
1
+ from __future__ import annotations
2
+
3
+ import http.client as httplib
4
+ from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
5
+
6
+ from ..exceptions import HeaderParsingError
7
+
8
+
9
+ def is_fp_closed(obj: object) -> bool:
10
+ """
11
+ Checks whether a given file-like object is closed.
12
+
13
+ :param obj:
14
+ The file-like object to check.
15
+ """
16
+
17
+ try:
18
+ # Check `isclosed()` first, in case Python3 doesn't set `closed`.
19
+ # GH Issue #928
20
+ return obj.isclosed() # type: ignore[no-any-return, attr-defined]
21
+ except AttributeError:
22
+ pass
23
+
24
+ try:
25
+ # Check via the official file-like-object way.
26
+ return obj.closed # type: ignore[no-any-return, attr-defined]
27
+ except AttributeError:
28
+ pass
29
+
30
+ try:
31
+ # Check if the object is a container for another file-like object that
32
+ # gets released on exhaustion (e.g. HTTPResponse).
33
+ return obj.fp is None # type: ignore[attr-defined]
34
+ except AttributeError:
35
+ pass
36
+
37
+ raise ValueError("Unable to determine whether fp is closed.")
38
+
39
+
40
+ def assert_header_parsing(headers: httplib.HTTPMessage) -> None:
41
+ """
42
+ Asserts whether all headers have been successfully parsed.
43
+ Extracts encountered errors from the result of parsing headers.
44
+
45
+ Only works on Python 3.
46
+
47
+ :param http.client.HTTPMessage headers: Headers to verify.
48
+
49
+ :raises urllib3.exceptions.HeaderParsingError:
50
+ If parsing errors are found.
51
+ """
52
+
53
+ # This will fail silently if we pass in the wrong kind of parameter.
54
+ # To make debugging easier add an explicit check.
55
+ if not isinstance(headers, httplib.HTTPMessage):
56
+ raise TypeError(f"expected httplib.Message, got {type(headers)}.")
57
+
58
+ unparsed_data = None
59
+
60
+ # get_payload is actually email.message.Message.get_payload;
61
+ # we're only interested in the result if it's not a multipart message
62
+ if not headers.is_multipart():
63
+ payload = headers.get_payload()
64
+
65
+ if isinstance(payload, (bytes, str)):
66
+ unparsed_data = payload
67
+
68
+ # httplib is assuming a response body is available
69
+ # when parsing headers even when httplib only sends
70
+ # header data to parse_headers() This results in
71
+ # defects on multipart responses in particular.
72
+ # See: https://github.com/urllib3/urllib3/issues/800
73
+
74
+ # So we ignore the following defects:
75
+ # - StartBoundaryNotFoundDefect:
76
+ # The claimed start boundary was never found.
77
+ # - MultipartInvariantViolationDefect:
78
+ # A message claimed to be a multipart but no subparts were found.
79
+ defects = [
80
+ defect
81
+ for defect in headers.defects
82
+ if not isinstance(
83
+ defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect)
84
+ )
85
+ ]
86
+
87
+ if defects or unparsed_data:
88
+ raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
89
+
90
+
91
+ def is_response_to_head(response: httplib.HTTPResponse) -> bool:
92
+ """
93
+ Checks whether the request of a response has been a HEAD-request.
94
+
95
+ :param http.client.HTTPResponse response:
96
+ Response to check if the originating request
97
+ used 'HEAD' as a method.
98
+ """
99
+ # FIXME: Can we do this somehow without accessing private httplib _method?
100
+ method_str = response._method # type: str # type: ignore[attr-defined]
101
+ return method_str.upper() == "HEAD"