catocli 1.0.13__py3-none-any.whl → 1.0.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of catocli might be problematic. Click here for more details.
- catocli/Utils/clidriver.py +15 -2
- catocli/__init__.py +1 -1
- catocli/parsers/custom/customLib.py +3 -1
- catocli/parsers/mutation_accountManagement/README.md +7 -0
- catocli/parsers/mutation_accountManagement/__init__.py +51 -0
- catocli/parsers/mutation_accountManagement_addAccount/README.md +17 -0
- catocli/parsers/mutation_accountManagement_removeAccount/README.md +16 -0
- catocli/parsers/mutation_accountManagement_updateAccount/README.md +17 -0
- catocli/parsers/mutation_admin_addAdmin/README.md +2 -2
- catocli/parsers/mutation_admin_updateAdmin/README.md +2 -2
- catocli/parsers/mutation_container_delete/README.md +2 -2
- catocli/parsers/mutation_policy_internetFirewall_addRule/README.md +3 -3
- catocli/parsers/mutation_policy_internetFirewall_addSection/README.md +3 -3
- catocli/parsers/mutation_policy_internetFirewall_createPolicyRevision/README.md +3 -3
- catocli/parsers/mutation_policy_internetFirewall_discardPolicyRevision/README.md +3 -3
- catocli/parsers/mutation_policy_internetFirewall_moveRule/README.md +3 -3
- catocli/parsers/mutation_policy_internetFirewall_moveSection/README.md +3 -3
- catocli/parsers/mutation_policy_internetFirewall_publishPolicyRevision/README.md +3 -3
- catocli/parsers/mutation_policy_internetFirewall_removeRule/README.md +3 -3
- catocli/parsers/mutation_policy_internetFirewall_removeSection/README.md +3 -3
- catocli/parsers/mutation_policy_internetFirewall_updatePolicy/README.md +3 -3
- catocli/parsers/mutation_policy_internetFirewall_updateRule/README.md +3 -3
- catocli/parsers/mutation_policy_internetFirewall_updateSection/README.md +3 -3
- catocli/parsers/mutation_policy_wanFirewall_addRule/README.md +3 -3
- catocli/parsers/mutation_policy_wanFirewall_addSection/README.md +3 -3
- catocli/parsers/mutation_policy_wanFirewall_createPolicyRevision/README.md +3 -3
- catocli/parsers/mutation_policy_wanFirewall_discardPolicyRevision/README.md +3 -3
- catocli/parsers/mutation_policy_wanFirewall_moveRule/README.md +3 -3
- catocli/parsers/mutation_policy_wanFirewall_moveSection/README.md +3 -3
- catocli/parsers/mutation_policy_wanFirewall_publishPolicyRevision/README.md +3 -3
- catocli/parsers/mutation_policy_wanFirewall_removeRule/README.md +3 -3
- catocli/parsers/mutation_policy_wanFirewall_removeSection/README.md +3 -3
- catocli/parsers/mutation_policy_wanFirewall_updatePolicy/README.md +3 -3
- catocli/parsers/mutation_policy_wanFirewall_updateRule/README.md +3 -3
- catocli/parsers/mutation_policy_wanFirewall_updateSection/README.md +3 -3
- catocli/parsers/mutation_site/__init__.py +14 -0
- catocli/parsers/mutation_site_addIpsecIkeV2Site/README.md +2 -2
- catocli/parsers/mutation_site_addIpsecIkeV2SiteTunnels/README.md +2 -2
- catocli/parsers/mutation_site_addNetworkRange/README.md +2 -2
- catocli/parsers/mutation_site_addSocketSite/README.md +2 -2
- catocli/parsers/mutation_site_addStaticHost/README.md +2 -2
- catocli/parsers/mutation_site_removeIpsecIkeV2SiteTunnels/README.md +18 -0
- catocli/parsers/mutation_site_updateHa/README.md +2 -2
- catocli/parsers/mutation_site_updateIpsecIkeV2SiteGeneralDetails/README.md +2 -2
- catocli/parsers/mutation_site_updateIpsecIkeV2SiteTunnels/README.md +2 -2
- catocli/parsers/mutation_site_updateNetworkRange/README.md +2 -2
- catocli/parsers/mutation_site_updateSiteGeneralDetails/README.md +2 -2
- catocli/parsers/mutation_site_updateSocketInterface/README.md +2 -2
- catocli/parsers/mutation_site_updateStaticHost/README.md +2 -2
- catocli/parsers/mutation_sites/__init__.py +14 -0
- catocli/parsers/mutation_sites_addIpsecIkeV2Site/README.md +2 -2
- catocli/parsers/mutation_sites_addIpsecIkeV2SiteTunnels/README.md +2 -2
- catocli/parsers/mutation_sites_addNetworkRange/README.md +2 -2
- catocli/parsers/mutation_sites_addSocketSite/README.md +2 -2
- catocli/parsers/mutation_sites_addStaticHost/README.md +2 -2
- catocli/parsers/mutation_sites_removeIpsecIkeV2SiteTunnels/README.md +18 -0
- catocli/parsers/mutation_sites_updateHa/README.md +2 -2
- catocli/parsers/mutation_sites_updateIpsecIkeV2SiteGeneralDetails/README.md +2 -2
- catocli/parsers/mutation_sites_updateIpsecIkeV2SiteTunnels/README.md +2 -2
- catocli/parsers/mutation_sites_updateNetworkRange/README.md +2 -2
- catocli/parsers/mutation_sites_updateSiteGeneralDetails/README.md +2 -2
- catocli/parsers/mutation_sites_updateSocketInterface/README.md +2 -2
- catocli/parsers/mutation_sites_updateStaticHost/README.md +2 -2
- catocli/parsers/parserApiClient.py +7 -4
- catocli/parsers/query_accountManagement/README.md +16 -0
- catocli/parsers/query_accountManagement/__init__.py +17 -0
- catocli/parsers/query_admins/README.md +2 -2
- catocli/parsers/query_appStats/README.md +5 -5
- catocli/parsers/query_appStatsTimeSeries/README.md +4 -4
- catocli/parsers/query_auditFeed/README.md +2 -2
- catocli/parsers/query_container/README.md +8 -8
- catocli/parsers/query_entityLookup/README.md +6 -6
- catocli/parsers/query_events/README.md +5 -5
- catocli/parsers/query_eventsFeed/README.md +2 -2
- catocli/parsers/query_eventsTimeSeries/README.md +4 -4
- catocli/parsers/query_hardwareManagement/README.md +2 -2
- catocli/parsers/query_policy/README.md +3 -3
- catocli/parsers/query_xdr_stories/README.md +2 -2
- {catocli-1.0.13.dist-info → catocli-1.0.15.dist-info}/METADATA +1 -1
- catocli-1.0.15.dist-info/RECORD +275 -0
- {catocli-1.0.13.dist-info → catocli-1.0.15.dist-info}/top_level.txt +0 -2
- graphql_client/api_client_types.py +4 -0
- models/mutation.accountManagement.addAccount.json +908 -0
- models/mutation.accountManagement.removeAccount.json +446 -0
- models/mutation.accountManagement.updateAccount.json +524 -0
- models/mutation.admin.addAdmin.json +103 -103
- models/mutation.admin.removeAdmin.json +3 -3
- models/mutation.admin.updateAdmin.json +121 -121
- models/mutation.container.delete.json +67 -67
- models/mutation.policy.internetFirewall.addRule.json +2497 -1889
- models/mutation.policy.internetFirewall.addSection.json +92 -92
- models/mutation.policy.internetFirewall.createPolicyRevision.json +734 -546
- models/mutation.policy.internetFirewall.discardPolicyRevision.json +734 -546
- models/mutation.policy.internetFirewall.moveRule.json +723 -535
- models/mutation.policy.internetFirewall.moveSection.json +85 -85
- models/mutation.policy.internetFirewall.publishPolicyRevision.json +734 -546
- models/mutation.policy.internetFirewall.removeRule.json +710 -522
- models/mutation.policy.internetFirewall.removeSection.json +72 -72
- models/mutation.policy.internetFirewall.updatePolicy.json +734 -546
- models/mutation.policy.internetFirewall.updateRule.json +2476 -1885
- models/mutation.policy.internetFirewall.updateSection.json +85 -85
- models/mutation.policy.wanFirewall.addRule.json +2577 -1225
- models/mutation.policy.wanFirewall.addSection.json +92 -92
- models/mutation.policy.wanFirewall.createPolicyRevision.json +800 -455
- models/mutation.policy.wanFirewall.discardPolicyRevision.json +800 -455
- models/mutation.policy.wanFirewall.moveRule.json +789 -444
- models/mutation.policy.wanFirewall.moveSection.json +85 -85
- models/mutation.policy.wanFirewall.publishPolicyRevision.json +800 -455
- models/mutation.policy.wanFirewall.removeRule.json +776 -431
- models/mutation.policy.wanFirewall.removeSection.json +72 -72
- models/mutation.policy.wanFirewall.updatePolicy.json +800 -455
- models/mutation.policy.wanFirewall.updateRule.json +2597 -1265
- models/mutation.policy.wanFirewall.updateSection.json +85 -85
- models/mutation.site.addIpsecIkeV2Site.json +43 -43
- models/mutation.site.addIpsecIkeV2SiteTunnels.json +505 -102
- models/mutation.site.addNetworkRange.json +53 -53
- models/mutation.site.addSocketSite.json +43 -43
- models/mutation.site.addStaticHost.json +33 -33
- models/mutation.site.removeIpsecIkeV2SiteTunnels.json +861 -0
- models/mutation.site.removeNetworkRange.json +3 -3
- models/mutation.site.removeSite.json +3 -3
- models/mutation.site.removeStaticHost.json +3 -3
- models/mutation.site.updateHa.json +54 -54
- models/mutation.site.updateIpsecIkeV2SiteGeneralDetails.json +79 -79
- models/mutation.site.updateIpsecIkeV2SiteTunnels.json +123 -123
- models/mutation.site.updateNetworkRange.json +74 -74
- models/mutation.site.updateSiteGeneralDetails.json +73 -73
- models/mutation.site.updateSocketInterface.json +282 -282
- models/mutation.site.updateStaticHost.json +54 -54
- models/mutation.sites.addIpsecIkeV2Site.json +43 -43
- models/mutation.sites.addIpsecIkeV2SiteTunnels.json +505 -102
- models/mutation.sites.addNetworkRange.json +53 -53
- models/mutation.sites.addSocketSite.json +43 -43
- models/mutation.sites.addStaticHost.json +33 -33
- models/mutation.sites.removeIpsecIkeV2SiteTunnels.json +861 -0
- models/mutation.sites.removeNetworkRange.json +3 -3
- models/mutation.sites.removeSite.json +3 -3
- models/mutation.sites.removeStaticHost.json +3 -3
- models/mutation.sites.updateHa.json +54 -54
- models/mutation.sites.updateIpsecIkeV2SiteGeneralDetails.json +79 -79
- models/mutation.sites.updateIpsecIkeV2SiteTunnels.json +123 -123
- models/mutation.sites.updateNetworkRange.json +74 -74
- models/mutation.sites.updateSiteGeneralDetails.json +73 -73
- models/mutation.sites.updateSocketInterface.json +282 -282
- models/mutation.sites.updateStaticHost.json +54 -54
- models/query.accountManagement.json +355 -0
- models/query.accountMetrics.json +235 -235
- models/query.accountRoles.json +3 -3
- models/query.accountSnapshot.json +278 -118
- models/query.admin.json +24 -24
- models/query.admins.json +138 -138
- models/query.appStats.json +92 -92
- models/query.appStatsTimeSeries.json +78 -78
- models/query.auditFeed.json +77 -77
- models/query.container.json +192 -192
- models/query.entityLookup.json +154 -154
- models/query.events.json +54 -54
- models/query.eventsFeed.json +38 -38
- models/query.eventsTimeSeries.json +78 -78
- models/query.hardwareManagement.json +125 -125
- models/query.licensing.json +933 -249
- models/query.policy.json +1502 -970
- models/query.xdr.stories.json +842 -842
- models/query.xdr.story.json +610 -610
- schema/catolib.py +12 -13
- build/lib/catocli/Utils/clidriver.py +0 -117
- build/lib/catocli/__init__.py +0 -2
- build/lib/catocli/__main__.py +0 -12
- build/lib/catocli/parsers/custom/__init__.py +0 -47
- build/lib/catocli/parsers/custom/customLib.py +0 -70
- build/lib/catocli/parsers/mutation_admin/__init__.py +0 -51
- build/lib/catocli/parsers/mutation_container/__init__.py +0 -23
- build/lib/catocli/parsers/mutation_policy/__init__.py +0 -357
- build/lib/catocli/parsers/mutation_site/__init__.py +0 -219
- build/lib/catocli/parsers/mutation_sites/__init__.py +0 -219
- build/lib/catocli/parsers/parserApiClient.py +0 -309
- build/lib/catocli/parsers/query_accountBySubdomain/__init__.py +0 -17
- build/lib/catocli/parsers/query_accountMetrics/__init__.py +0 -17
- build/lib/catocli/parsers/query_accountRoles/__init__.py +0 -17
- build/lib/catocli/parsers/query_accountSnapshot/__init__.py +0 -17
- build/lib/catocli/parsers/query_admin/__init__.py +0 -17
- build/lib/catocli/parsers/query_admins/__init__.py +0 -17
- build/lib/catocli/parsers/query_appStats/__init__.py +0 -17
- build/lib/catocli/parsers/query_appStatsTimeSeries/__init__.py +0 -17
- build/lib/catocli/parsers/query_auditFeed/__init__.py +0 -17
- build/lib/catocli/parsers/query_container/__init__.py +0 -17
- build/lib/catocli/parsers/query_entityLookup/__init__.py +0 -17
- build/lib/catocli/parsers/query_events/__init__.py +0 -17
- build/lib/catocli/parsers/query_eventsFeed/__init__.py +0 -17
- build/lib/catocli/parsers/query_eventsTimeSeries/__init__.py +0 -17
- build/lib/catocli/parsers/query_hardwareManagement/__init__.py +0 -17
- build/lib/catocli/parsers/query_licensing/__init__.py +0 -17
- build/lib/catocli/parsers/query_policy/__init__.py +0 -17
- build/lib/catocli/parsers/query_siteLocation/__init__.py +0 -17
- build/lib/catocli/parsers/query_subDomains/__init__.py +0 -17
- build/lib/catocli/parsers/query_xdr/__init__.py +0 -37
- build/lib/catocli/parsers/raw/__init__.py +0 -9
- build/lib/graphql_client/__init__.py +0 -11
- build/lib/graphql_client/api/__init__.py +0 -3
- build/lib/graphql_client/api/call_api.py +0 -73
- build/lib/graphql_client/api_client.py +0 -192
- build/lib/graphql_client/api_client_types.py +0 -404
- build/lib/graphql_client/configuration.py +0 -230
- build/lib/graphql_client/models/__init__.py +0 -13
- build/lib/graphql_client/models/no_schema.py +0 -71
- build/lib/schema/catolib.py +0 -1016
- build/lib/schema/importSchema.py +0 -60
- build/lib/vendor/certifi/__init__.py +0 -4
- build/lib/vendor/certifi/__main__.py +0 -12
- build/lib/vendor/certifi/core.py +0 -114
- build/lib/vendor/certifi/py.typed +0 -0
- build/lib/vendor/six.py +0 -998
- build/lib/vendor/urllib3/__init__.py +0 -211
- build/lib/vendor/urllib3/_base_connection.py +0 -172
- build/lib/vendor/urllib3/_collections.py +0 -483
- build/lib/vendor/urllib3/_request_methods.py +0 -278
- build/lib/vendor/urllib3/_version.py +0 -16
- build/lib/vendor/urllib3/connection.py +0 -1033
- build/lib/vendor/urllib3/connectionpool.py +0 -1182
- build/lib/vendor/urllib3/contrib/__init__.py +0 -0
- build/lib/vendor/urllib3/contrib/emscripten/__init__.py +0 -18
- build/lib/vendor/urllib3/contrib/emscripten/connection.py +0 -254
- build/lib/vendor/urllib3/contrib/emscripten/fetch.py +0 -418
- build/lib/vendor/urllib3/contrib/emscripten/request.py +0 -22
- build/lib/vendor/urllib3/contrib/emscripten/response.py +0 -285
- build/lib/vendor/urllib3/contrib/pyopenssl.py +0 -552
- build/lib/vendor/urllib3/contrib/socks.py +0 -228
- build/lib/vendor/urllib3/exceptions.py +0 -321
- build/lib/vendor/urllib3/fields.py +0 -341
- build/lib/vendor/urllib3/filepost.py +0 -89
- build/lib/vendor/urllib3/http2/__init__.py +0 -53
- build/lib/vendor/urllib3/http2/connection.py +0 -356
- build/lib/vendor/urllib3/http2/probe.py +0 -87
- build/lib/vendor/urllib3/poolmanager.py +0 -637
- build/lib/vendor/urllib3/py.typed +0 -2
- build/lib/vendor/urllib3/response.py +0 -1265
- build/lib/vendor/urllib3/util/__init__.py +0 -42
- build/lib/vendor/urllib3/util/connection.py +0 -137
- build/lib/vendor/urllib3/util/proxy.py +0 -43
- build/lib/vendor/urllib3/util/request.py +0 -256
- build/lib/vendor/urllib3/util/response.py +0 -101
- build/lib/vendor/urllib3/util/retry.py +0 -533
- build/lib/vendor/urllib3/util/ssl_.py +0 -513
- build/lib/vendor/urllib3/util/ssl_match_hostname.py +0 -159
- build/lib/vendor/urllib3/util/ssltransport.py +0 -276
- build/lib/vendor/urllib3/util/timeout.py +0 -275
- build/lib/vendor/urllib3/util/url.py +0 -471
- build/lib/vendor/urllib3/util/util.py +0 -42
- build/lib/vendor/urllib3/util/wait.py +0 -124
- catocli-1.0.13.dist-info/RECORD +0 -344
- {catocli-1.0.13.dist-info → catocli-1.0.15.dist-info}/LICENSE +0 -0
- {catocli-1.0.13.dist-info → catocli-1.0.15.dist-info}/WHEEL +0 -0
- {catocli-1.0.13.dist-info → catocli-1.0.15.dist-info}/entry_points.txt +0 -0
|
File without changes
|
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import sys
|
|
4
|
-
sys.path.insert(0, 'vendor')
|
|
5
|
-
import urllib3.connection
|
|
6
|
-
|
|
7
|
-
from ...connectionpool import HTTPConnectionPool, HTTPSConnectionPool
|
|
8
|
-
from .connection import EmscriptenHTTPConnection, EmscriptenHTTPSConnection
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
def inject_into_urllib3() -> None:
|
|
12
|
-
# override connection classes to use emscripten specific classes
|
|
13
|
-
# n.b. mypy complains about the overriding of classes below
|
|
14
|
-
# if it isn't ignored
|
|
15
|
-
HTTPConnectionPool.ConnectionCls = EmscriptenHTTPConnection
|
|
16
|
-
HTTPSConnectionPool.ConnectionCls = EmscriptenHTTPSConnection
|
|
17
|
-
urllib3.connection.HTTPConnection = EmscriptenHTTPConnection # type: ignore[misc,assignment]
|
|
18
|
-
urllib3.connection.HTTPSConnection = EmscriptenHTTPSConnection # type: ignore[misc,assignment]
|
|
@@ -1,254 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import os
|
|
4
|
-
import typing
|
|
5
|
-
|
|
6
|
-
# use http.client.HTTPException for consistency with non-emscripten
|
|
7
|
-
from http.client import HTTPException as HTTPException # noqa: F401
|
|
8
|
-
from http.client import ResponseNotReady
|
|
9
|
-
|
|
10
|
-
from ..._base_connection import _TYPE_BODY
|
|
11
|
-
from ...connection import HTTPConnection, ProxyConfig, port_by_scheme
|
|
12
|
-
from ...exceptions import TimeoutError
|
|
13
|
-
from ...response import BaseHTTPResponse
|
|
14
|
-
from ...util.connection import _TYPE_SOCKET_OPTIONS
|
|
15
|
-
from ...util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT
|
|
16
|
-
from ...util.url import Url
|
|
17
|
-
from .fetch import _RequestError, _TimeoutError, send_request, send_streaming_request
|
|
18
|
-
from .request import EmscriptenRequest
|
|
19
|
-
from .response import EmscriptenHttpResponseWrapper, EmscriptenResponse
|
|
20
|
-
|
|
21
|
-
if typing.TYPE_CHECKING:
|
|
22
|
-
from ..._base_connection import BaseHTTPConnection, BaseHTTPSConnection
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
class EmscriptenHTTPConnection:
|
|
26
|
-
default_port: typing.ClassVar[int] = port_by_scheme["http"]
|
|
27
|
-
default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS]
|
|
28
|
-
|
|
29
|
-
timeout: None | (float)
|
|
30
|
-
|
|
31
|
-
host: str
|
|
32
|
-
port: int
|
|
33
|
-
blocksize: int
|
|
34
|
-
source_address: tuple[str, int] | None
|
|
35
|
-
socket_options: _TYPE_SOCKET_OPTIONS | None
|
|
36
|
-
|
|
37
|
-
proxy: Url | None
|
|
38
|
-
proxy_config: ProxyConfig | None
|
|
39
|
-
|
|
40
|
-
is_verified: bool = False
|
|
41
|
-
proxy_is_verified: bool | None = None
|
|
42
|
-
|
|
43
|
-
_response: EmscriptenResponse | None
|
|
44
|
-
|
|
45
|
-
def __init__(
|
|
46
|
-
self,
|
|
47
|
-
host: str,
|
|
48
|
-
port: int = 0,
|
|
49
|
-
*,
|
|
50
|
-
timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
|
|
51
|
-
source_address: tuple[str, int] | None = None,
|
|
52
|
-
blocksize: int = 8192,
|
|
53
|
-
socket_options: _TYPE_SOCKET_OPTIONS | None = None,
|
|
54
|
-
proxy: Url | None = None,
|
|
55
|
-
proxy_config: ProxyConfig | None = None,
|
|
56
|
-
) -> None:
|
|
57
|
-
self.host = host
|
|
58
|
-
self.port = port
|
|
59
|
-
self.timeout = timeout if isinstance(timeout, float) else 0.0
|
|
60
|
-
self.scheme = "http"
|
|
61
|
-
self._closed = True
|
|
62
|
-
self._response = None
|
|
63
|
-
# ignore these things because we don't
|
|
64
|
-
# have control over that stuff
|
|
65
|
-
self.proxy = None
|
|
66
|
-
self.proxy_config = None
|
|
67
|
-
self.blocksize = blocksize
|
|
68
|
-
self.source_address = None
|
|
69
|
-
self.socket_options = None
|
|
70
|
-
self.is_verified = False
|
|
71
|
-
|
|
72
|
-
def set_tunnel(
|
|
73
|
-
self,
|
|
74
|
-
host: str,
|
|
75
|
-
port: int | None = 0,
|
|
76
|
-
headers: typing.Mapping[str, str] | None = None,
|
|
77
|
-
scheme: str = "http",
|
|
78
|
-
) -> None:
|
|
79
|
-
pass
|
|
80
|
-
|
|
81
|
-
def connect(self) -> None:
|
|
82
|
-
pass
|
|
83
|
-
|
|
84
|
-
def request(
|
|
85
|
-
self,
|
|
86
|
-
method: str,
|
|
87
|
-
url: str,
|
|
88
|
-
body: _TYPE_BODY | None = None,
|
|
89
|
-
headers: typing.Mapping[str, str] | None = None,
|
|
90
|
-
# We know *at least* botocore is depending on the order of the
|
|
91
|
-
# first 3 parameters so to be safe we only mark the later ones
|
|
92
|
-
# as keyword-only to ensure we have space to extend.
|
|
93
|
-
*,
|
|
94
|
-
chunked: bool = False,
|
|
95
|
-
preload_content: bool = True,
|
|
96
|
-
decode_content: bool = True,
|
|
97
|
-
enforce_content_length: bool = True,
|
|
98
|
-
) -> None:
|
|
99
|
-
self._closed = False
|
|
100
|
-
if url.startswith("/"):
|
|
101
|
-
# no scheme / host / port included, make a full url
|
|
102
|
-
url = f"{self.scheme}://{self.host}:{self.port}" + url
|
|
103
|
-
request = EmscriptenRequest(
|
|
104
|
-
url=url,
|
|
105
|
-
method=method,
|
|
106
|
-
timeout=self.timeout if self.timeout else 0,
|
|
107
|
-
decode_content=decode_content,
|
|
108
|
-
)
|
|
109
|
-
request.set_body(body)
|
|
110
|
-
if headers:
|
|
111
|
-
for k, v in headers.items():
|
|
112
|
-
request.set_header(k, v)
|
|
113
|
-
self._response = None
|
|
114
|
-
try:
|
|
115
|
-
if not preload_content:
|
|
116
|
-
self._response = send_streaming_request(request)
|
|
117
|
-
if self._response is None:
|
|
118
|
-
self._response = send_request(request)
|
|
119
|
-
except _TimeoutError as e:
|
|
120
|
-
raise TimeoutError(e.message) from e
|
|
121
|
-
except _RequestError as e:
|
|
122
|
-
raise HTTPException(e.message) from e
|
|
123
|
-
|
|
124
|
-
def getresponse(self) -> BaseHTTPResponse:
|
|
125
|
-
if self._response is not None:
|
|
126
|
-
return EmscriptenHttpResponseWrapper(
|
|
127
|
-
internal_response=self._response,
|
|
128
|
-
url=self._response.request.url,
|
|
129
|
-
connection=self,
|
|
130
|
-
)
|
|
131
|
-
else:
|
|
132
|
-
raise ResponseNotReady()
|
|
133
|
-
|
|
134
|
-
def close(self) -> None:
|
|
135
|
-
self._closed = True
|
|
136
|
-
self._response = None
|
|
137
|
-
|
|
138
|
-
@property
|
|
139
|
-
def is_closed(self) -> bool:
|
|
140
|
-
"""Whether the connection either is brand new or has been previously closed.
|
|
141
|
-
If this property is True then both ``is_connected`` and ``has_connected_to_proxy``
|
|
142
|
-
properties must be False.
|
|
143
|
-
"""
|
|
144
|
-
return self._closed
|
|
145
|
-
|
|
146
|
-
@property
|
|
147
|
-
def is_connected(self) -> bool:
|
|
148
|
-
"""Whether the connection is actively connected to any origin (proxy or target)"""
|
|
149
|
-
return True
|
|
150
|
-
|
|
151
|
-
@property
|
|
152
|
-
def has_connected_to_proxy(self) -> bool:
|
|
153
|
-
"""Whether the connection has successfully connected to its proxy.
|
|
154
|
-
This returns False if no proxy is in use. Used to determine whether
|
|
155
|
-
errors are coming from the proxy layer or from tunnelling to the target origin.
|
|
156
|
-
"""
|
|
157
|
-
return False
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
class EmscriptenHTTPSConnection(EmscriptenHTTPConnection):
|
|
161
|
-
default_port = port_by_scheme["https"]
|
|
162
|
-
# all this is basically ignored, as browser handles https
|
|
163
|
-
cert_reqs: int | str | None = None
|
|
164
|
-
ca_certs: str | None = None
|
|
165
|
-
ca_cert_dir: str | None = None
|
|
166
|
-
ca_cert_data: None | str | bytes = None
|
|
167
|
-
cert_file: str | None
|
|
168
|
-
key_file: str | None
|
|
169
|
-
key_password: str | None
|
|
170
|
-
ssl_context: typing.Any | None
|
|
171
|
-
ssl_version: int | str | None = None
|
|
172
|
-
ssl_minimum_version: int | None = None
|
|
173
|
-
ssl_maximum_version: int | None = None
|
|
174
|
-
assert_hostname: None | str | typing.Literal[False]
|
|
175
|
-
assert_fingerprint: str | None = None
|
|
176
|
-
|
|
177
|
-
def __init__(
|
|
178
|
-
self,
|
|
179
|
-
host: str,
|
|
180
|
-
port: int = 0,
|
|
181
|
-
*,
|
|
182
|
-
timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
|
|
183
|
-
source_address: tuple[str, int] | None = None,
|
|
184
|
-
blocksize: int = 16384,
|
|
185
|
-
socket_options: None
|
|
186
|
-
| _TYPE_SOCKET_OPTIONS = HTTPConnection.default_socket_options,
|
|
187
|
-
proxy: Url | None = None,
|
|
188
|
-
proxy_config: ProxyConfig | None = None,
|
|
189
|
-
cert_reqs: int | str | None = None,
|
|
190
|
-
assert_hostname: None | str | typing.Literal[False] = None,
|
|
191
|
-
assert_fingerprint: str | None = None,
|
|
192
|
-
server_hostname: str | None = None,
|
|
193
|
-
ssl_context: typing.Any | None = None,
|
|
194
|
-
ca_certs: str | None = None,
|
|
195
|
-
ca_cert_dir: str | None = None,
|
|
196
|
-
ca_cert_data: None | str | bytes = None,
|
|
197
|
-
ssl_minimum_version: int | None = None,
|
|
198
|
-
ssl_maximum_version: int | None = None,
|
|
199
|
-
ssl_version: int | str | None = None, # Deprecated
|
|
200
|
-
cert_file: str | None = None,
|
|
201
|
-
key_file: str | None = None,
|
|
202
|
-
key_password: str | None = None,
|
|
203
|
-
) -> None:
|
|
204
|
-
super().__init__(
|
|
205
|
-
host,
|
|
206
|
-
port=port,
|
|
207
|
-
timeout=timeout,
|
|
208
|
-
source_address=source_address,
|
|
209
|
-
blocksize=blocksize,
|
|
210
|
-
socket_options=socket_options,
|
|
211
|
-
proxy=proxy,
|
|
212
|
-
proxy_config=proxy_config,
|
|
213
|
-
)
|
|
214
|
-
self.scheme = "https"
|
|
215
|
-
|
|
216
|
-
self.key_file = key_file
|
|
217
|
-
self.cert_file = cert_file
|
|
218
|
-
self.key_password = key_password
|
|
219
|
-
self.ssl_context = ssl_context
|
|
220
|
-
self.server_hostname = server_hostname
|
|
221
|
-
self.assert_hostname = assert_hostname
|
|
222
|
-
self.assert_fingerprint = assert_fingerprint
|
|
223
|
-
self.ssl_version = ssl_version
|
|
224
|
-
self.ssl_minimum_version = ssl_minimum_version
|
|
225
|
-
self.ssl_maximum_version = ssl_maximum_version
|
|
226
|
-
self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
|
|
227
|
-
self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
|
|
228
|
-
self.ca_cert_data = ca_cert_data
|
|
229
|
-
|
|
230
|
-
self.cert_reqs = None
|
|
231
|
-
|
|
232
|
-
# The browser will automatically verify all requests.
|
|
233
|
-
# We have no control over that setting.
|
|
234
|
-
self.is_verified = True
|
|
235
|
-
|
|
236
|
-
def set_cert(
|
|
237
|
-
self,
|
|
238
|
-
key_file: str | None = None,
|
|
239
|
-
cert_file: str | None = None,
|
|
240
|
-
cert_reqs: int | str | None = None,
|
|
241
|
-
key_password: str | None = None,
|
|
242
|
-
ca_certs: str | None = None,
|
|
243
|
-
assert_hostname: None | str | typing.Literal[False] = None,
|
|
244
|
-
assert_fingerprint: str | None = None,
|
|
245
|
-
ca_cert_dir: str | None = None,
|
|
246
|
-
ca_cert_data: None | str | bytes = None,
|
|
247
|
-
) -> None:
|
|
248
|
-
pass
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
# verify that this class implements BaseHTTP(s) connection correctly
|
|
252
|
-
if typing.TYPE_CHECKING:
|
|
253
|
-
_supports_http_protocol: BaseHTTPConnection = EmscriptenHTTPConnection("", 0)
|
|
254
|
-
_supports_https_protocol: BaseHTTPSConnection = EmscriptenHTTPSConnection("", 0)
|
|
@@ -1,418 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Support for streaming http requests in emscripten.
|
|
3
|
-
|
|
4
|
-
A few caveats -
|
|
5
|
-
|
|
6
|
-
Firstly, you can't do streaming http in the main UI thread, because atomics.wait isn't allowed.
|
|
7
|
-
Streaming only works if you're running pyodide in a web worker.
|
|
8
|
-
|
|
9
|
-
Secondly, this uses an extra web worker and SharedArrayBuffer to do the asynchronous fetch
|
|
10
|
-
operation, so it requires that you have crossOriginIsolation enabled, by serving over https
|
|
11
|
-
(or from localhost) with the two headers below set:
|
|
12
|
-
|
|
13
|
-
Cross-Origin-Opener-Policy: same-origin
|
|
14
|
-
Cross-Origin-Embedder-Policy: require-corp
|
|
15
|
-
|
|
16
|
-
You can tell if cross origin isolation is successfully enabled by looking at the global crossOriginIsolated variable in
|
|
17
|
-
javascript console. If it isn't, streaming requests will fallback to XMLHttpRequest, i.e. getting the whole
|
|
18
|
-
request into a buffer and then returning it. it shows a warning in the javascript console in this case.
|
|
19
|
-
|
|
20
|
-
Finally, the webworker which does the streaming fetch is created on initial import, but will only be started once
|
|
21
|
-
control is returned to javascript. Call `await wait_for_streaming_ready()` to wait for streaming fetch.
|
|
22
|
-
|
|
23
|
-
NB: in this code, there are a lot of javascript objects. They are named js_*
|
|
24
|
-
to make it clear what type of object they are.
|
|
25
|
-
"""
|
|
26
|
-
from __future__ import annotations
|
|
27
|
-
|
|
28
|
-
import io
|
|
29
|
-
import json
|
|
30
|
-
from email.parser import Parser
|
|
31
|
-
from importlib.resources import files
|
|
32
|
-
from typing import TYPE_CHECKING, Any
|
|
33
|
-
|
|
34
|
-
import js # type: ignore[import-not-found]
|
|
35
|
-
from pyodide.ffi import ( # type: ignore[import-not-found]
|
|
36
|
-
JsArray,
|
|
37
|
-
JsException,
|
|
38
|
-
JsProxy,
|
|
39
|
-
to_js,
|
|
40
|
-
)
|
|
41
|
-
|
|
42
|
-
if TYPE_CHECKING:
|
|
43
|
-
from typing_extensions import Buffer
|
|
44
|
-
|
|
45
|
-
from .request import EmscriptenRequest
|
|
46
|
-
from .response import EmscriptenResponse
|
|
47
|
-
|
|
48
|
-
"""
|
|
49
|
-
There are some headers that trigger unintended CORS preflight requests.
|
|
50
|
-
See also https://github.com/koenvo/pyodide-http/issues/22
|
|
51
|
-
"""
|
|
52
|
-
HEADERS_TO_IGNORE = ("user-agent",)
|
|
53
|
-
|
|
54
|
-
SUCCESS_HEADER = -1
|
|
55
|
-
SUCCESS_EOF = -2
|
|
56
|
-
ERROR_TIMEOUT = -3
|
|
57
|
-
ERROR_EXCEPTION = -4
|
|
58
|
-
|
|
59
|
-
_STREAMING_WORKER_CODE = (
|
|
60
|
-
files(__package__)
|
|
61
|
-
.joinpath("emscripten_fetch_worker.js")
|
|
62
|
-
.read_text(encoding="utf-8")
|
|
63
|
-
)
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
class _RequestError(Exception):
|
|
67
|
-
def __init__(
|
|
68
|
-
self,
|
|
69
|
-
message: str | None = None,
|
|
70
|
-
*,
|
|
71
|
-
request: EmscriptenRequest | None = None,
|
|
72
|
-
response: EmscriptenResponse | None = None,
|
|
73
|
-
):
|
|
74
|
-
self.request = request
|
|
75
|
-
self.response = response
|
|
76
|
-
self.message = message
|
|
77
|
-
super().__init__(self.message)
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
class _StreamingError(_RequestError):
|
|
81
|
-
pass
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
class _TimeoutError(_RequestError):
|
|
85
|
-
pass
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
def _obj_from_dict(dict_val: dict[str, Any]) -> JsProxy:
|
|
89
|
-
return to_js(dict_val, dict_converter=js.Object.fromEntries)
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
class _ReadStream(io.RawIOBase):
|
|
93
|
-
def __init__(
|
|
94
|
-
self,
|
|
95
|
-
int_buffer: JsArray,
|
|
96
|
-
byte_buffer: JsArray,
|
|
97
|
-
timeout: float,
|
|
98
|
-
worker: JsProxy,
|
|
99
|
-
connection_id: int,
|
|
100
|
-
request: EmscriptenRequest,
|
|
101
|
-
):
|
|
102
|
-
self.int_buffer = int_buffer
|
|
103
|
-
self.byte_buffer = byte_buffer
|
|
104
|
-
self.read_pos = 0
|
|
105
|
-
self.read_len = 0
|
|
106
|
-
self.connection_id = connection_id
|
|
107
|
-
self.worker = worker
|
|
108
|
-
self.timeout = int(1000 * timeout) if timeout > 0 else None
|
|
109
|
-
self.is_live = True
|
|
110
|
-
self._is_closed = False
|
|
111
|
-
self.request: EmscriptenRequest | None = request
|
|
112
|
-
|
|
113
|
-
def __del__(self) -> None:
|
|
114
|
-
self.close()
|
|
115
|
-
|
|
116
|
-
# this is compatible with _base_connection
|
|
117
|
-
def is_closed(self) -> bool:
|
|
118
|
-
return self._is_closed
|
|
119
|
-
|
|
120
|
-
# for compatibility with RawIOBase
|
|
121
|
-
@property
|
|
122
|
-
def closed(self) -> bool:
|
|
123
|
-
return self.is_closed()
|
|
124
|
-
|
|
125
|
-
def close(self) -> None:
|
|
126
|
-
if not self.is_closed():
|
|
127
|
-
self.read_len = 0
|
|
128
|
-
self.read_pos = 0
|
|
129
|
-
self.int_buffer = None
|
|
130
|
-
self.byte_buffer = None
|
|
131
|
-
self._is_closed = True
|
|
132
|
-
self.request = None
|
|
133
|
-
if self.is_live:
|
|
134
|
-
self.worker.postMessage(_obj_from_dict({"close": self.connection_id}))
|
|
135
|
-
self.is_live = False
|
|
136
|
-
super().close()
|
|
137
|
-
|
|
138
|
-
def readable(self) -> bool:
|
|
139
|
-
return True
|
|
140
|
-
|
|
141
|
-
def writable(self) -> bool:
|
|
142
|
-
return False
|
|
143
|
-
|
|
144
|
-
def seekable(self) -> bool:
|
|
145
|
-
return False
|
|
146
|
-
|
|
147
|
-
def readinto(self, byte_obj: Buffer) -> int:
|
|
148
|
-
if not self.int_buffer:
|
|
149
|
-
raise _StreamingError(
|
|
150
|
-
"No buffer for stream in _ReadStream.readinto",
|
|
151
|
-
request=self.request,
|
|
152
|
-
response=None,
|
|
153
|
-
)
|
|
154
|
-
if self.read_len == 0:
|
|
155
|
-
# wait for the worker to send something
|
|
156
|
-
js.Atomics.store(self.int_buffer, 0, ERROR_TIMEOUT)
|
|
157
|
-
self.worker.postMessage(_obj_from_dict({"getMore": self.connection_id}))
|
|
158
|
-
if (
|
|
159
|
-
js.Atomics.wait(self.int_buffer, 0, ERROR_TIMEOUT, self.timeout)
|
|
160
|
-
== "timed-out"
|
|
161
|
-
):
|
|
162
|
-
raise _TimeoutError
|
|
163
|
-
data_len = self.int_buffer[0]
|
|
164
|
-
if data_len > 0:
|
|
165
|
-
self.read_len = data_len
|
|
166
|
-
self.read_pos = 0
|
|
167
|
-
elif data_len == ERROR_EXCEPTION:
|
|
168
|
-
string_len = self.int_buffer[1]
|
|
169
|
-
# decode the error string
|
|
170
|
-
js_decoder = js.TextDecoder.new()
|
|
171
|
-
json_str = js_decoder.decode(self.byte_buffer.slice(0, string_len))
|
|
172
|
-
raise _StreamingError(
|
|
173
|
-
f"Exception thrown in fetch: {json_str}",
|
|
174
|
-
request=self.request,
|
|
175
|
-
response=None,
|
|
176
|
-
)
|
|
177
|
-
else:
|
|
178
|
-
# EOF, free the buffers and return zero
|
|
179
|
-
# and free the request
|
|
180
|
-
self.is_live = False
|
|
181
|
-
self.close()
|
|
182
|
-
return 0
|
|
183
|
-
# copy from int32array to python bytes
|
|
184
|
-
ret_length = min(self.read_len, len(memoryview(byte_obj)))
|
|
185
|
-
subarray = self.byte_buffer.subarray(
|
|
186
|
-
self.read_pos, self.read_pos + ret_length
|
|
187
|
-
).to_py()
|
|
188
|
-
memoryview(byte_obj)[0:ret_length] = subarray
|
|
189
|
-
self.read_len -= ret_length
|
|
190
|
-
self.read_pos += ret_length
|
|
191
|
-
return ret_length
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
class _StreamingFetcher:
|
|
195
|
-
def __init__(self) -> None:
|
|
196
|
-
# make web-worker and data buffer on startup
|
|
197
|
-
self.streaming_ready = False
|
|
198
|
-
|
|
199
|
-
js_data_blob = js.Blob.new(
|
|
200
|
-
[_STREAMING_WORKER_CODE], _obj_from_dict({"type": "application/javascript"})
|
|
201
|
-
)
|
|
202
|
-
|
|
203
|
-
def promise_resolver(js_resolve_fn: JsProxy, js_reject_fn: JsProxy) -> None:
|
|
204
|
-
def onMsg(e: JsProxy) -> None:
|
|
205
|
-
self.streaming_ready = True
|
|
206
|
-
js_resolve_fn(e)
|
|
207
|
-
|
|
208
|
-
def onErr(e: JsProxy) -> None:
|
|
209
|
-
js_reject_fn(e) # Defensive: never happens in ci
|
|
210
|
-
|
|
211
|
-
self.js_worker.onmessage = onMsg
|
|
212
|
-
self.js_worker.onerror = onErr
|
|
213
|
-
|
|
214
|
-
js_data_url = js.URL.createObjectURL(js_data_blob)
|
|
215
|
-
self.js_worker = js.globalThis.Worker.new(js_data_url)
|
|
216
|
-
self.js_worker_ready_promise = js.globalThis.Promise.new(promise_resolver)
|
|
217
|
-
|
|
218
|
-
def send(self, request: EmscriptenRequest) -> EmscriptenResponse:
|
|
219
|
-
headers = {
|
|
220
|
-
k: v for k, v in request.headers.items() if k not in HEADERS_TO_IGNORE
|
|
221
|
-
}
|
|
222
|
-
|
|
223
|
-
body = request.body
|
|
224
|
-
fetch_data = {"headers": headers, "body": to_js(body), "method": request.method}
|
|
225
|
-
# start the request off in the worker
|
|
226
|
-
timeout = int(1000 * request.timeout) if request.timeout > 0 else None
|
|
227
|
-
js_shared_buffer = js.SharedArrayBuffer.new(1048576)
|
|
228
|
-
js_int_buffer = js.Int32Array.new(js_shared_buffer)
|
|
229
|
-
js_byte_buffer = js.Uint8Array.new(js_shared_buffer, 8)
|
|
230
|
-
|
|
231
|
-
js.Atomics.store(js_int_buffer, 0, ERROR_TIMEOUT)
|
|
232
|
-
js.Atomics.notify(js_int_buffer, 0)
|
|
233
|
-
js_absolute_url = js.URL.new(request.url, js.location).href
|
|
234
|
-
self.js_worker.postMessage(
|
|
235
|
-
_obj_from_dict(
|
|
236
|
-
{
|
|
237
|
-
"buffer": js_shared_buffer,
|
|
238
|
-
"url": js_absolute_url,
|
|
239
|
-
"fetchParams": fetch_data,
|
|
240
|
-
}
|
|
241
|
-
)
|
|
242
|
-
)
|
|
243
|
-
# wait for the worker to send something
|
|
244
|
-
js.Atomics.wait(js_int_buffer, 0, ERROR_TIMEOUT, timeout)
|
|
245
|
-
if js_int_buffer[0] == ERROR_TIMEOUT:
|
|
246
|
-
raise _TimeoutError(
|
|
247
|
-
"Timeout connecting to streaming request",
|
|
248
|
-
request=request,
|
|
249
|
-
response=None,
|
|
250
|
-
)
|
|
251
|
-
elif js_int_buffer[0] == SUCCESS_HEADER:
|
|
252
|
-
# got response
|
|
253
|
-
# header length is in second int of intBuffer
|
|
254
|
-
string_len = js_int_buffer[1]
|
|
255
|
-
# decode the rest to a JSON string
|
|
256
|
-
js_decoder = js.TextDecoder.new()
|
|
257
|
-
# this does a copy (the slice) because decode can't work on shared array
|
|
258
|
-
# for some silly reason
|
|
259
|
-
json_str = js_decoder.decode(js_byte_buffer.slice(0, string_len))
|
|
260
|
-
# get it as an object
|
|
261
|
-
response_obj = json.loads(json_str)
|
|
262
|
-
return EmscriptenResponse(
|
|
263
|
-
request=request,
|
|
264
|
-
status_code=response_obj["status"],
|
|
265
|
-
headers=response_obj["headers"],
|
|
266
|
-
body=_ReadStream(
|
|
267
|
-
js_int_buffer,
|
|
268
|
-
js_byte_buffer,
|
|
269
|
-
request.timeout,
|
|
270
|
-
self.js_worker,
|
|
271
|
-
response_obj["connectionID"],
|
|
272
|
-
request,
|
|
273
|
-
),
|
|
274
|
-
)
|
|
275
|
-
elif js_int_buffer[0] == ERROR_EXCEPTION:
|
|
276
|
-
string_len = js_int_buffer[1]
|
|
277
|
-
# decode the error string
|
|
278
|
-
js_decoder = js.TextDecoder.new()
|
|
279
|
-
json_str = js_decoder.decode(js_byte_buffer.slice(0, string_len))
|
|
280
|
-
raise _StreamingError(
|
|
281
|
-
f"Exception thrown in fetch: {json_str}", request=request, response=None
|
|
282
|
-
)
|
|
283
|
-
else:
|
|
284
|
-
raise _StreamingError(
|
|
285
|
-
f"Unknown status from worker in fetch: {js_int_buffer[0]}",
|
|
286
|
-
request=request,
|
|
287
|
-
response=None,
|
|
288
|
-
)
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
# check if we are in a worker or not
|
|
292
|
-
def is_in_browser_main_thread() -> bool:
|
|
293
|
-
return hasattr(js, "window") and hasattr(js, "self") and js.self == js.window
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
def is_cross_origin_isolated() -> bool:
|
|
297
|
-
return hasattr(js, "crossOriginIsolated") and js.crossOriginIsolated
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
def is_in_node() -> bool:
|
|
301
|
-
return (
|
|
302
|
-
hasattr(js, "process")
|
|
303
|
-
and hasattr(js.process, "release")
|
|
304
|
-
and hasattr(js.process.release, "name")
|
|
305
|
-
and js.process.release.name == "node"
|
|
306
|
-
)
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
def is_worker_available() -> bool:
|
|
310
|
-
return hasattr(js, "Worker") and hasattr(js, "Blob")
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
_fetcher: _StreamingFetcher | None = None
|
|
314
|
-
|
|
315
|
-
if is_worker_available() and (
|
|
316
|
-
(is_cross_origin_isolated() and not is_in_browser_main_thread())
|
|
317
|
-
and (not is_in_node())
|
|
318
|
-
):
|
|
319
|
-
_fetcher = _StreamingFetcher()
|
|
320
|
-
else:
|
|
321
|
-
_fetcher = None
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
def send_streaming_request(request: EmscriptenRequest) -> EmscriptenResponse | None:
|
|
325
|
-
if _fetcher and streaming_ready():
|
|
326
|
-
return _fetcher.send(request)
|
|
327
|
-
else:
|
|
328
|
-
_show_streaming_warning()
|
|
329
|
-
return None
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
_SHOWN_TIMEOUT_WARNING = False
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
def _show_timeout_warning() -> None:
|
|
336
|
-
global _SHOWN_TIMEOUT_WARNING
|
|
337
|
-
if not _SHOWN_TIMEOUT_WARNING:
|
|
338
|
-
_SHOWN_TIMEOUT_WARNING = True
|
|
339
|
-
message = "Warning: Timeout is not available on main browser thread"
|
|
340
|
-
js.console.warn(message)
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
_SHOWN_STREAMING_WARNING = False
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
def _show_streaming_warning() -> None:
|
|
347
|
-
global _SHOWN_STREAMING_WARNING
|
|
348
|
-
if not _SHOWN_STREAMING_WARNING:
|
|
349
|
-
_SHOWN_STREAMING_WARNING = True
|
|
350
|
-
message = "Can't stream HTTP requests because: \n"
|
|
351
|
-
if not is_cross_origin_isolated():
|
|
352
|
-
message += " Page is not cross-origin isolated\n"
|
|
353
|
-
if is_in_browser_main_thread():
|
|
354
|
-
message += " Python is running in main browser thread\n"
|
|
355
|
-
if not is_worker_available():
|
|
356
|
-
message += " Worker or Blob classes are not available in this environment." # Defensive: this is always False in browsers that we test in
|
|
357
|
-
if streaming_ready() is False:
|
|
358
|
-
message += """ Streaming fetch worker isn't ready. If you want to be sure that streaming fetch
|
|
359
|
-
is working, you need to call: 'await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready()`"""
|
|
360
|
-
from js import console
|
|
361
|
-
|
|
362
|
-
console.warn(message)
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
def send_request(request: EmscriptenRequest) -> EmscriptenResponse:
|
|
366
|
-
try:
|
|
367
|
-
js_xhr = js.XMLHttpRequest.new()
|
|
368
|
-
|
|
369
|
-
if not is_in_browser_main_thread():
|
|
370
|
-
js_xhr.responseType = "arraybuffer"
|
|
371
|
-
if request.timeout:
|
|
372
|
-
js_xhr.timeout = int(request.timeout * 1000)
|
|
373
|
-
else:
|
|
374
|
-
js_xhr.overrideMimeType("text/plain; charset=ISO-8859-15")
|
|
375
|
-
if request.timeout:
|
|
376
|
-
# timeout isn't available on the main thread - show a warning in console
|
|
377
|
-
# if it is set
|
|
378
|
-
_show_timeout_warning()
|
|
379
|
-
|
|
380
|
-
js_xhr.open(request.method, request.url, False)
|
|
381
|
-
for name, value in request.headers.items():
|
|
382
|
-
if name.lower() not in HEADERS_TO_IGNORE:
|
|
383
|
-
js_xhr.setRequestHeader(name, value)
|
|
384
|
-
|
|
385
|
-
js_xhr.send(to_js(request.body))
|
|
386
|
-
|
|
387
|
-
headers = dict(Parser().parsestr(js_xhr.getAllResponseHeaders()))
|
|
388
|
-
|
|
389
|
-
if not is_in_browser_main_thread():
|
|
390
|
-
body = js_xhr.response.to_py().tobytes()
|
|
391
|
-
else:
|
|
392
|
-
body = js_xhr.response.encode("ISO-8859-15")
|
|
393
|
-
return EmscriptenResponse(
|
|
394
|
-
status_code=js_xhr.status, headers=headers, body=body, request=request
|
|
395
|
-
)
|
|
396
|
-
except JsException as err:
|
|
397
|
-
if err.name == "TimeoutError":
|
|
398
|
-
raise _TimeoutError(err.message, request=request)
|
|
399
|
-
elif err.name == "NetworkError":
|
|
400
|
-
raise _RequestError(err.message, request=request)
|
|
401
|
-
else:
|
|
402
|
-
# general http error
|
|
403
|
-
raise _RequestError(err.message, request=request)
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
def streaming_ready() -> bool | None:
|
|
407
|
-
if _fetcher:
|
|
408
|
-
return _fetcher.streaming_ready
|
|
409
|
-
else:
|
|
410
|
-
return None # no fetcher, return None to signify that
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
async def wait_for_streaming_ready() -> bool:
|
|
414
|
-
if _fetcher:
|
|
415
|
-
await _fetcher.js_worker_ready_promise
|
|
416
|
-
return True
|
|
417
|
-
else:
|
|
418
|
-
return False
|