py2docfx 0.1.18.dev2146624__py3-none-any.whl → 0.1.18.dev2159003__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py2docfx/docfx_yaml/build_finished.py +13 -3
- py2docfx/venv/basevenv/Lib/site-packages/certifi/__init__.py +1 -1
- py2docfx/venv/venv1/Lib/site-packages/azure/core/_version.py +1 -1
- py2docfx/venv/venv1/Lib/site-packages/azure/core/credentials.py +6 -2
- py2docfx/venv/venv1/Lib/site-packages/azure/core/credentials_async.py +10 -2
- py2docfx/venv/venv1/Lib/site-packages/azure/core/paging.py +13 -0
- py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/__init__.py +5 -5
- py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/policies/_authentication.py +11 -3
- py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/policies/_authentication_async.py +1 -3
- py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/policies/_distributed_tracing.py +19 -2
- py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/policies/_retry.py +1 -0
- py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/policies/_universal.py +21 -4
- py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/transport/_aiohttp.py +17 -3
- py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/transport/_base.py +2 -0
- py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/transport/_requests_asyncio.py +6 -6
- py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/transport/_requests_basic.py +7 -5
- py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/transport/_requests_trio.py +6 -6
- py2docfx/venv/venv1/Lib/site-packages/azure/core/polling/_async_poller.py +57 -2
- py2docfx/venv/venv1/Lib/site-packages/azure/core/polling/_poller.py +74 -2
- py2docfx/venv/venv1/Lib/site-packages/azure/core/polling/async_base_polling.py +3 -3
- py2docfx/venv/venv1/Lib/site-packages/azure/core/polling/base_polling.py +24 -7
- py2docfx/venv/venv1/Lib/site-packages/azure/core/serialization.py +156 -2
- py2docfx/venv/venv1/Lib/site-packages/azure/core/tracing/opentelemetry.py +24 -2
- py2docfx/venv/venv1/Lib/site-packages/certifi/__init__.py +1 -1
- py2docfx/venv/venv1/Lib/site-packages/cryptography/__about__.py +1 -1
- py2docfx/venv/venv1/Lib/site-packages/typing_extensions.py +49 -43
- {py2docfx-0.1.18.dev2146624.dist-info → py2docfx-0.1.18.dev2159003.dist-info}/METADATA +1 -1
- {py2docfx-0.1.18.dev2146624.dist-info → py2docfx-0.1.18.dev2159003.dist-info}/RECORD +30 -30
- {py2docfx-0.1.18.dev2146624.dist-info → py2docfx-0.1.18.dev2159003.dist-info}/WHEEL +0 -0
- {py2docfx-0.1.18.dev2146624.dist-info → py2docfx-0.1.18.dev2159003.dist-info}/top_level.txt +0 -0
@@ -30,6 +30,16 @@ def string_representer(dumper, data):
|
|
30
30
|
yml.add_representer(str, string_representer)
|
31
31
|
|
32
32
|
def insert_node_to_toc_tree_return_is_root_package(toc_yaml, uid, project_name, toc_node_map):
|
33
|
+
def generate_toc_node(name, uid):
|
34
|
+
"""
|
35
|
+
Generate a TOC node with the given name and uid.
|
36
|
+
"""
|
37
|
+
return {
|
38
|
+
'name': name,
|
39
|
+
'uid': uid,
|
40
|
+
'no-loc': [name],
|
41
|
+
}
|
42
|
+
|
33
43
|
# Build nested TOC
|
34
44
|
parent_level = uid
|
35
45
|
cur_node = None
|
@@ -41,7 +51,7 @@ def insert_node_to_toc_tree_return_is_root_package(toc_yaml, uid, project_name,
|
|
41
51
|
if found_node:
|
42
52
|
# If ancestor already in current TOC, insert to its items
|
43
53
|
name = uid.split('.')[-1] if '.' in uid and project_name != uid else uid
|
44
|
-
cur_node =
|
54
|
+
cur_node = generate_toc_node(name, uid)
|
45
55
|
if 'uid' in found_node:
|
46
56
|
# Only leaf nodes should have uid
|
47
57
|
found_node.pop('uid', 'No uid found')
|
@@ -51,8 +61,8 @@ def insert_node_to_toc_tree_return_is_root_package(toc_yaml, uid, project_name,
|
|
51
61
|
|
52
62
|
# uid is representing a package in TOC as root node
|
53
63
|
if cur_node is None:
|
54
|
-
# if uid doesn't contain '.', the name
|
55
|
-
cur_node =
|
64
|
+
# if uid doesn't contain '.', the name doesn't need to be simplified
|
65
|
+
cur_node = generate_toc_node(uid, uid)
|
56
66
|
toc_yaml.append(cur_node)
|
57
67
|
is_root = True
|
58
68
|
|
@@ -116,7 +116,11 @@ class SupportsTokenInfo(Protocol, ContextManager["SupportsTokenInfo"]):
|
|
116
116
|
...
|
117
117
|
|
118
118
|
def close(self) -> None:
|
119
|
-
|
119
|
+
"""Close the credential, releasing any resources it holds.
|
120
|
+
|
121
|
+
:return: None
|
122
|
+
:rtype: None
|
123
|
+
"""
|
120
124
|
|
121
125
|
|
122
126
|
TokenProvider = Union[TokenCredential, SupportsTokenInfo]
|
@@ -171,7 +175,7 @@ class AzureKeyCredential:
|
|
171
175
|
to update long-lived clients.
|
172
176
|
|
173
177
|
:param str key: The key used to authenticate to an Azure service
|
174
|
-
:raises
|
178
|
+
:raises ValueError or TypeError: If the key is None, empty, or not a string.
|
175
179
|
"""
|
176
180
|
if not key:
|
177
181
|
raise ValueError("The key used for updating can not be None or empty")
|
@@ -39,7 +39,11 @@ class AsyncTokenCredential(Protocol, AsyncContextManager["AsyncTokenCredential"]
|
|
39
39
|
...
|
40
40
|
|
41
41
|
async def close(self) -> None:
|
42
|
-
|
42
|
+
"""Close the credential, releasing any resources.
|
43
|
+
|
44
|
+
:return: None
|
45
|
+
:rtype: None
|
46
|
+
"""
|
43
47
|
|
44
48
|
async def __aexit__(
|
45
49
|
self,
|
@@ -70,7 +74,11 @@ class AsyncSupportsTokenInfo(Protocol, AsyncContextManager["AsyncSupportsTokenIn
|
|
70
74
|
...
|
71
75
|
|
72
76
|
async def close(self) -> None:
|
73
|
-
|
77
|
+
"""Close the credential, releasing any resources.
|
78
|
+
|
79
|
+
:return: None
|
80
|
+
:rtype: None
|
81
|
+
"""
|
74
82
|
|
75
83
|
async def __aexit__(
|
76
84
|
self,
|
@@ -69,6 +69,13 @@ class PageIterator(Iterator[Iterator[ReturnType]]):
|
|
69
69
|
return self
|
70
70
|
|
71
71
|
def __next__(self) -> Iterator[ReturnType]:
|
72
|
+
"""Get the next page in the iterator.
|
73
|
+
|
74
|
+
:returns: An iterator of objects in the next page.
|
75
|
+
:rtype: iterator[ReturnType]
|
76
|
+
:raises StopIteration: If there are no more pages to return.
|
77
|
+
:raises AzureError: If the request fails.
|
78
|
+
"""
|
72
79
|
if self.continuation_token is None and self._did_a_call_already:
|
73
80
|
raise StopIteration("End of paging")
|
74
81
|
try:
|
@@ -118,6 +125,12 @@ class ItemPaged(Iterator[ReturnType]):
|
|
118
125
|
return self
|
119
126
|
|
120
127
|
def __next__(self) -> ReturnType:
|
128
|
+
"""Get the next item in the iterator.
|
129
|
+
|
130
|
+
:returns: The next item in the iterator.
|
131
|
+
:rtype: ReturnType
|
132
|
+
:raises StopIteration: If there are no more items to return.
|
133
|
+
"""
|
121
134
|
if self._page_iterator is None:
|
122
135
|
self._page_iterator = itertools.chain.from_iterable(self.by_page())
|
123
136
|
return next(self._page_iterator)
|
@@ -107,18 +107,18 @@ class PipelineContext(Dict[str, Any]):
|
|
107
107
|
def clear( # pylint: disable=docstring-missing-return, docstring-missing-rtype
|
108
108
|
self,
|
109
109
|
) -> None:
|
110
|
-
"""
|
110
|
+
"""Clears the context objects.
|
111
111
|
|
112
|
-
:raises:
|
112
|
+
:raises TypeError: If context objects cannot be cleared
|
113
113
|
"""
|
114
114
|
raise TypeError("Context objects cannot be cleared.")
|
115
115
|
|
116
116
|
def update( # pylint: disable=docstring-missing-return, docstring-missing-rtype, docstring-missing-param
|
117
117
|
self, *args: Any, **kwargs: Any
|
118
118
|
) -> None:
|
119
|
-
"""
|
119
|
+
"""Updates the context objects.
|
120
120
|
|
121
|
-
:raises:
|
121
|
+
:raises TypeError: If context objects cannot be updated
|
122
122
|
"""
|
123
123
|
raise TypeError("Context objects cannot be updated.")
|
124
124
|
|
@@ -135,7 +135,7 @@ class PipelineContext(Dict[str, Any]):
|
|
135
135
|
:type args: str
|
136
136
|
:return: The value for this key.
|
137
137
|
:rtype: any
|
138
|
-
:raises
|
138
|
+
:raises ValueError: If the key is in the protected list.
|
139
139
|
"""
|
140
140
|
if args and args[0] in self._protected:
|
141
141
|
raise ValueError("Context value {} cannot be popped.".format(args[0]))
|
@@ -204,9 +204,7 @@ class BearerTokenCredentialPolicy(_BearerTokenCredentialPolicyBase, HTTPPolicy[H
|
|
204
204
|
padding_needed = -len(encoded_claims) % 4
|
205
205
|
claims = base64.urlsafe_b64decode(encoded_claims + "=" * padding_needed).decode("utf-8")
|
206
206
|
if claims:
|
207
|
-
|
208
|
-
bearer_token = cast(Union["AccessToken", "AccessTokenInfo"], token).token
|
209
|
-
request.http_request.headers["Authorization"] = "Bearer " + bearer_token
|
207
|
+
self.authorize_request(request, *self._scopes, claims=claims)
|
210
208
|
return True
|
211
209
|
except Exception: # pylint:disable=broad-except
|
212
210
|
return False
|
@@ -268,6 +266,11 @@ class AzureKeyCredentialPolicy(SansIOHTTPPolicy[HTTPRequestType, HTTPResponseTyp
|
|
268
266
|
self._prefix = prefix + " " if prefix else ""
|
269
267
|
|
270
268
|
def on_request(self, request: PipelineRequest[HTTPRequestType]) -> None:
|
269
|
+
"""Called before the policy sends a request.
|
270
|
+
|
271
|
+
:param request: The request to be modified before sending.
|
272
|
+
:type request: ~azure.core.pipeline.PipelineRequest
|
273
|
+
"""
|
271
274
|
request.http_request.headers[self._name] = f"{self._prefix}{self._credential.key}"
|
272
275
|
|
273
276
|
|
@@ -290,6 +293,11 @@ class AzureSasCredentialPolicy(SansIOHTTPPolicy[HTTPRequestType, HTTPResponseTyp
|
|
290
293
|
self._credential = credential
|
291
294
|
|
292
295
|
def on_request(self, request: PipelineRequest[HTTPRequestType]) -> None:
|
296
|
+
"""Called before the policy sends a request.
|
297
|
+
|
298
|
+
:param request: The request to be modified before sending.
|
299
|
+
:type request: ~azure.core.pipeline.PipelineRequest
|
300
|
+
"""
|
293
301
|
url = request.http_request.url
|
294
302
|
query = request.http_request.query
|
295
303
|
signature = self._credential.signature
|
@@ -149,9 +149,7 @@ class AsyncBearerTokenCredentialPolicy(AsyncHTTPPolicy[HTTPRequestType, AsyncHTT
|
|
149
149
|
padding_needed = -len(encoded_claims) % 4
|
150
150
|
claims = base64.urlsafe_b64decode(encoded_claims + "=" * padding_needed).decode("utf-8")
|
151
151
|
if claims:
|
152
|
-
|
153
|
-
bearer_token = cast(Union["AccessToken", "AccessTokenInfo"], token).token
|
154
|
-
request.http_request.headers["Authorization"] = "Bearer " + bearer_token
|
152
|
+
await self.authorize_request(request, *self._scopes, claims=claims)
|
155
153
|
return True
|
156
154
|
except Exception: # pylint:disable=broad-except
|
157
155
|
return False
|
@@ -101,6 +101,11 @@ class DistributedTracingPolicy(SansIOHTTPPolicy[HTTPRequestType, HTTPResponseTyp
|
|
101
101
|
self._instrumentation_config = instrumentation_config
|
102
102
|
|
103
103
|
def on_request(self, request: PipelineRequest[HTTPRequestType]) -> None:
|
104
|
+
"""Starts a span for the network call.
|
105
|
+
|
106
|
+
:param request: The PipelineRequest object
|
107
|
+
:type request: ~azure.core.pipeline.PipelineRequest
|
108
|
+
"""
|
104
109
|
ctxt = request.context.options
|
105
110
|
try:
|
106
111
|
tracing_options: TracingOptions = ctxt.pop("tracing_options", {})
|
@@ -161,8 +166,8 @@ class DistributedTracingPolicy(SansIOHTTPPolicy[HTTPRequestType, HTTPResponseTyp
|
|
161
166
|
token = tracer._suppress_auto_http_instrumentation() # pylint: disable=protected-access
|
162
167
|
request.context[self._SUPPRESSION_TOKEN] = token
|
163
168
|
|
164
|
-
except Exception
|
165
|
-
_LOGGER.warning("Unable to start network span
|
169
|
+
except Exception: # pylint: disable=broad-except
|
170
|
+
_LOGGER.warning("Unable to start network span.")
|
166
171
|
|
167
172
|
def end_span(
|
168
173
|
self,
|
@@ -234,9 +239,21 @@ class DistributedTracingPolicy(SansIOHTTPPolicy[HTTPRequestType, HTTPResponseTyp
|
|
234
239
|
request: PipelineRequest[HTTPRequestType],
|
235
240
|
response: PipelineResponse[HTTPRequestType, HTTPResponseType],
|
236
241
|
) -> None:
|
242
|
+
"""Ends the span for the network call and updates its status.
|
243
|
+
|
244
|
+
:param request: The PipelineRequest object
|
245
|
+
:type request: ~azure.core.pipeline.PipelineRequest
|
246
|
+
:param response: The PipelineResponse object
|
247
|
+
:type response: ~azure.core.pipeline.PipelineResponse
|
248
|
+
"""
|
237
249
|
self.end_span(request, response=response.http_response)
|
238
250
|
|
239
251
|
def on_exception(self, request: PipelineRequest[HTTPRequestType]) -> None:
|
252
|
+
"""Ends the span for the network call and updates its status with exception info.
|
253
|
+
|
254
|
+
:param request: The PipelineRequest object
|
255
|
+
:type request: ~azure.core.pipeline.PipelineRequest
|
256
|
+
"""
|
240
257
|
self.end_span(request, exc_info=sys.exc_info())
|
241
258
|
|
242
259
|
def _set_http_client_span_attributes(
|
@@ -510,14 +510,21 @@ class HttpLoggingPolicy(
|
|
510
510
|
log_string += "\nNo body was attached to the request"
|
511
511
|
logger.info(log_string)
|
512
512
|
|
513
|
-
except Exception
|
514
|
-
logger.warning("Failed to log request
|
513
|
+
except Exception: # pylint: disable=broad-except
|
514
|
+
logger.warning("Failed to log request.")
|
515
515
|
|
516
516
|
def on_response(
|
517
517
|
self,
|
518
518
|
request: PipelineRequest[HTTPRequestType],
|
519
519
|
response: PipelineResponse[HTTPRequestType, HTTPResponseType],
|
520
520
|
) -> None:
|
521
|
+
"""Logs HTTP response status and headers.
|
522
|
+
|
523
|
+
:param request: The PipelineRequest object.
|
524
|
+
:type request: ~azure.core.pipeline.PipelineRequest
|
525
|
+
:param response: The PipelineResponse object.
|
526
|
+
:type response: ~azure.core.pipeline.PipelineResponse
|
527
|
+
"""
|
521
528
|
http_response = response.http_response
|
522
529
|
|
523
530
|
# Get logger in my context first (request has been retried)
|
@@ -545,8 +552,8 @@ class HttpLoggingPolicy(
|
|
545
552
|
value = self._redact_header(res_header, value)
|
546
553
|
log_string += "\n '{}': '{}'".format(res_header, value)
|
547
554
|
logger.info(log_string)
|
548
|
-
except Exception
|
549
|
-
logger.warning("Failed to log response
|
555
|
+
except Exception: # pylint: disable=broad-except
|
556
|
+
logger.warning("Failed to log response.")
|
550
557
|
|
551
558
|
|
552
559
|
class ContentDecodePolicy(SansIOHTTPPolicy[HTTPRequestType, HTTPResponseType]):
|
@@ -683,6 +690,11 @@ class ContentDecodePolicy(SansIOHTTPPolicy[HTTPRequestType, HTTPResponseType]):
|
|
683
690
|
return cls.deserialize_from_text(response.text(encoding), mime_type, response=response)
|
684
691
|
|
685
692
|
def on_request(self, request: PipelineRequest[HTTPRequestType]) -> None:
|
693
|
+
"""Set the response encoding in the request context.
|
694
|
+
|
695
|
+
:param request: The PipelineRequest object.
|
696
|
+
:type request: ~azure.core.pipeline.PipelineRequest
|
697
|
+
"""
|
686
698
|
options = request.context.options
|
687
699
|
response_encoding = options.pop("response_encoding", self._response_encoding)
|
688
700
|
if response_encoding:
|
@@ -743,6 +755,11 @@ class ProxyPolicy(SansIOHTTPPolicy[HTTPRequestType, HTTPResponseType]):
|
|
743
755
|
self.proxies = proxies
|
744
756
|
|
745
757
|
def on_request(self, request: PipelineRequest[HTTPRequestType]) -> None:
|
758
|
+
"""Adds the proxy information to the request context.
|
759
|
+
|
760
|
+
:param request: The PipelineRequest object
|
761
|
+
:type request: ~azure.core.pipeline.PipelineRequest
|
762
|
+
"""
|
746
763
|
ctxt = request.context.options
|
747
764
|
if self.proxies and "proxies" not in ctxt:
|
748
765
|
ctxt["proxies"] = self.proxies
|
@@ -49,7 +49,9 @@ from multidict import CIMultiDict
|
|
49
49
|
from azure.core.configuration import ConnectionConfiguration
|
50
50
|
from azure.core.exceptions import (
|
51
51
|
ServiceRequestError,
|
52
|
+
ServiceRequestTimeoutError,
|
52
53
|
ServiceResponseError,
|
54
|
+
ServiceResponseTimeoutError,
|
53
55
|
IncompleteReadError,
|
54
56
|
)
|
55
57
|
from azure.core.pipeline import AsyncPipeline
|
@@ -76,6 +78,15 @@ if TYPE_CHECKING:
|
|
76
78
|
CONTENT_CHUNK_SIZE = 10 * 1024
|
77
79
|
_LOGGER = logging.getLogger(__name__)
|
78
80
|
|
81
|
+
try:
|
82
|
+
# ConnectionTimeoutError was only introduced in aiohttp 3.10 so we want to keep this
|
83
|
+
# backwards compatible. If client is using aiohttp <3.10, the behaviour will safely
|
84
|
+
# fall back to treating a TimeoutError as a ServiceResponseError (that wont be retried).
|
85
|
+
from aiohttp.client_exceptions import ConnectionTimeoutError
|
86
|
+
except ImportError:
|
87
|
+
|
88
|
+
class ConnectionTimeoutError(Exception): ... # type: ignore[no-redef]
|
89
|
+
|
79
90
|
|
80
91
|
class AioHttpTransport(AsyncHttpTransport):
|
81
92
|
"""AioHttp HTTP sender implementation.
|
@@ -130,6 +141,7 @@ class AioHttpTransport(AsyncHttpTransport):
|
|
130
141
|
await self.close()
|
131
142
|
|
132
143
|
async def open(self):
|
144
|
+
"""Opens the connection."""
|
133
145
|
if self._has_been_opened and not self.session:
|
134
146
|
raise ValueError(
|
135
147
|
"HTTP transport has already been closed. "
|
@@ -344,8 +356,10 @@ class AioHttpTransport(AsyncHttpTransport):
|
|
344
356
|
raise
|
345
357
|
except aiohttp.client_exceptions.ClientResponseError as err:
|
346
358
|
raise ServiceResponseError(err, error=err) from err
|
359
|
+
except ConnectionTimeoutError as err:
|
360
|
+
raise ServiceRequestTimeoutError(err, error=err) from err
|
347
361
|
except asyncio.TimeoutError as err:
|
348
|
-
raise
|
362
|
+
raise ServiceResponseTimeoutError(err, error=err) from err
|
349
363
|
except aiohttp.client_exceptions.ClientError as err:
|
350
364
|
raise ServiceRequestError(err, error=err) from err
|
351
365
|
return response
|
@@ -425,7 +439,7 @@ class AioHttpStreamDownloadGenerator(AsyncIterator):
|
|
425
439
|
except aiohttp.client_exceptions.ClientPayloadError as err:
|
426
440
|
# This is the case that server closes connection before we finish the reading. aiohttp library
|
427
441
|
# raises ClientPayloadError.
|
428
|
-
_LOGGER.warning("Incomplete download
|
442
|
+
_LOGGER.warning("Incomplete download.")
|
429
443
|
internal_response.close()
|
430
444
|
raise IncompleteReadError(err, error=err) from err
|
431
445
|
except aiohttp.client_exceptions.ClientResponseError as err:
|
@@ -435,7 +449,7 @@ class AioHttpStreamDownloadGenerator(AsyncIterator):
|
|
435
449
|
except aiohttp.client_exceptions.ClientError as err:
|
436
450
|
raise ServiceRequestError(err, error=err) from err
|
437
451
|
except Exception as err:
|
438
|
-
_LOGGER.warning("Unable to stream download
|
452
|
+
_LOGGER.warning("Unable to stream download.")
|
439
453
|
internal_response.close()
|
440
454
|
raise
|
441
455
|
|
@@ -507,6 +507,8 @@ class _HttpResponseBase:
|
|
507
507
|
|
508
508
|
|
509
509
|
class HttpResponse(_HttpResponseBase):
|
510
|
+
"""Represent a HTTP response."""
|
511
|
+
|
510
512
|
def stream_download(self, pipeline: Pipeline[HttpRequest, "HttpResponse"], **kwargs: Any) -> Iterator[bytes]:
|
511
513
|
"""Generator for streaming request body data.
|
512
514
|
|
@@ -194,10 +194,10 @@ class AsyncioRequestsTransport(RequestsAsyncTransportBase):
|
|
194
194
|
except requests.exceptions.ChunkedEncodingError as err:
|
195
195
|
msg = err.__str__()
|
196
196
|
if "IncompleteRead" in msg:
|
197
|
-
_LOGGER.warning("Incomplete download
|
197
|
+
_LOGGER.warning("Incomplete download.")
|
198
198
|
error = IncompleteReadError(err, error=err)
|
199
199
|
else:
|
200
|
-
_LOGGER.warning("Unable to stream download
|
200
|
+
_LOGGER.warning("Unable to stream download.")
|
201
201
|
error = HttpResponseError(err, error=err)
|
202
202
|
except requests.RequestException as err:
|
203
203
|
error = ServiceRequestError(err, error=err)
|
@@ -270,14 +270,14 @@ class AsyncioStreamDownloadGenerator(AsyncIterator):
|
|
270
270
|
except requests.exceptions.ChunkedEncodingError as err:
|
271
271
|
msg = err.__str__()
|
272
272
|
if "IncompleteRead" in msg:
|
273
|
-
_LOGGER.warning("Incomplete download
|
273
|
+
_LOGGER.warning("Incomplete download.")
|
274
274
|
internal_response.close()
|
275
275
|
raise IncompleteReadError(err, error=err) from err
|
276
|
-
_LOGGER.warning("Unable to stream download
|
276
|
+
_LOGGER.warning("Unable to stream download.")
|
277
277
|
internal_response.close()
|
278
278
|
raise HttpResponseError(err, error=err) from err
|
279
|
-
except Exception
|
280
|
-
_LOGGER.warning("Unable to stream download
|
279
|
+
except Exception:
|
280
|
+
_LOGGER.warning("Unable to stream download.")
|
281
281
|
internal_response.close()
|
282
282
|
raise
|
283
283
|
|
@@ -196,14 +196,14 @@ class StreamDownloadGenerator:
|
|
196
196
|
except requests.exceptions.ChunkedEncodingError as err:
|
197
197
|
msg = err.__str__()
|
198
198
|
if "IncompleteRead" in msg:
|
199
|
-
_LOGGER.warning("Incomplete download
|
199
|
+
_LOGGER.warning("Incomplete download.")
|
200
200
|
internal_response.close()
|
201
201
|
raise IncompleteReadError(err, error=err) from err
|
202
|
-
_LOGGER.warning("Unable to stream download
|
202
|
+
_LOGGER.warning("Unable to stream download.")
|
203
203
|
internal_response.close()
|
204
204
|
raise HttpResponseError(err, error=err) from err
|
205
205
|
except Exception as err:
|
206
|
-
_LOGGER.warning("Unable to stream download
|
206
|
+
_LOGGER.warning("Unable to stream download.")
|
207
207
|
internal_response.close()
|
208
208
|
raise
|
209
209
|
|
@@ -282,6 +282,7 @@ class RequestsTransport(HttpTransport):
|
|
282
282
|
session.mount(p, adapter)
|
283
283
|
|
284
284
|
def open(self):
|
285
|
+
"""Opens the connection."""
|
285
286
|
if self._has_been_opened and not self.session:
|
286
287
|
raise ValueError(
|
287
288
|
"HTTP transport has already been closed. "
|
@@ -297,6 +298,7 @@ class RequestsTransport(HttpTransport):
|
|
297
298
|
self._has_been_opened = True
|
298
299
|
|
299
300
|
def close(self):
|
301
|
+
"""Closes the connection."""
|
300
302
|
if self._session_owner and self.session:
|
301
303
|
self.session.close()
|
302
304
|
self.session = None
|
@@ -397,10 +399,10 @@ class RequestsTransport(HttpTransport):
|
|
397
399
|
except requests.exceptions.ChunkedEncodingError as err:
|
398
400
|
msg = err.__str__()
|
399
401
|
if "IncompleteRead" in msg:
|
400
|
-
_LOGGER.warning("Incomplete download
|
402
|
+
_LOGGER.warning("Incomplete download.")
|
401
403
|
error = IncompleteReadError(err, error=err)
|
402
404
|
else:
|
403
|
-
_LOGGER.warning("Unable to stream download
|
405
|
+
_LOGGER.warning("Unable to stream download.")
|
404
406
|
error = HttpResponseError(err, error=err)
|
405
407
|
except requests.RequestException as err:
|
406
408
|
error = ServiceRequestError(err, error=err)
|
@@ -133,14 +133,14 @@ class TrioStreamDownloadGenerator(AsyncIterator):
|
|
133
133
|
except requests.exceptions.ChunkedEncodingError as err:
|
134
134
|
msg = err.__str__()
|
135
135
|
if "IncompleteRead" in msg:
|
136
|
-
_LOGGER.warning("Incomplete download
|
136
|
+
_LOGGER.warning("Incomplete download.")
|
137
137
|
internal_response.close()
|
138
138
|
raise IncompleteReadError(err, error=err) from err
|
139
|
-
_LOGGER.warning("Unable to stream download
|
139
|
+
_LOGGER.warning("Unable to stream download.")
|
140
140
|
internal_response.close()
|
141
141
|
raise HttpResponseError(err, error=err) from err
|
142
|
-
except Exception
|
143
|
-
_LOGGER.warning("Unable to stream download
|
142
|
+
except Exception:
|
143
|
+
_LOGGER.warning("Unable to stream download.")
|
144
144
|
internal_response.close()
|
145
145
|
raise
|
146
146
|
|
@@ -286,10 +286,10 @@ class TrioRequestsTransport(RequestsAsyncTransportBase):
|
|
286
286
|
except requests.exceptions.ChunkedEncodingError as err:
|
287
287
|
msg = err.__str__()
|
288
288
|
if "IncompleteRead" in msg:
|
289
|
-
_LOGGER.warning("Incomplete download
|
289
|
+
_LOGGER.warning("Incomplete download.")
|
290
290
|
error = IncompleteReadError(err, error=err)
|
291
291
|
else:
|
292
|
-
_LOGGER.warning("Unable to stream download
|
292
|
+
_LOGGER.warning("Unable to stream download.")
|
293
293
|
error = HttpResponseError(err, error=err)
|
294
294
|
except requests.RequestException as err:
|
295
295
|
error = ServiceRequestError(err, error=err)
|
@@ -45,27 +45,72 @@ class AsyncPollingMethod(Generic[PollingReturnType_co]):
|
|
45
45
|
initial_response: Any,
|
46
46
|
deserialization_callback: DeserializationCallbackType,
|
47
47
|
) -> None:
|
48
|
+
"""Initialize the polling method with the client, initial response, and deserialization callback.
|
49
|
+
|
50
|
+
:param client: A pipeline service client.
|
51
|
+
:type client: ~azure.core.PipelineClient
|
52
|
+
:param initial_response: The initial call response.
|
53
|
+
:type initial_response: ~azure.core.pipeline.PipelineResponse
|
54
|
+
:param deserialization_callback: A callback that takes a Response and returns a deserialized object.
|
55
|
+
If a subclass of Model is given, this passes "deserialize" as callback.
|
56
|
+
:type deserialization_callback: callable or msrest.serialization.Model
|
57
|
+
:return: None
|
58
|
+
:rtype: None
|
59
|
+
"""
|
48
60
|
raise NotImplementedError("This method needs to be implemented")
|
49
61
|
|
50
62
|
async def run(self) -> None:
|
63
|
+
"""Run the polling method.
|
64
|
+
This method should be overridden to implement the polling logic.
|
65
|
+
|
66
|
+
:return: None
|
67
|
+
:rtype: None
|
68
|
+
"""
|
51
69
|
raise NotImplementedError("This method needs to be implemented")
|
52
70
|
|
53
71
|
def status(self) -> str:
|
72
|
+
"""Return the current status of the polling operation.
|
73
|
+
|
74
|
+
:returns: The current status string.
|
75
|
+
:rtype: str
|
76
|
+
"""
|
54
77
|
raise NotImplementedError("This method needs to be implemented")
|
55
78
|
|
56
79
|
def finished(self) -> bool:
|
80
|
+
"""Check if the polling operation is finished.
|
81
|
+
|
82
|
+
:returns: True if the polling operation is finished, False otherwise.
|
83
|
+
:rtype: bool
|
84
|
+
"""
|
57
85
|
raise NotImplementedError("This method needs to be implemented")
|
58
86
|
|
59
87
|
def resource(self) -> PollingReturnType_co:
|
88
|
+
"""Return the resource of the long running operation.
|
89
|
+
|
90
|
+
:returns: The deserialized resource of the long running operation, if one is available.
|
91
|
+
:rtype: any
|
92
|
+
"""
|
60
93
|
raise NotImplementedError("This method needs to be implemented")
|
61
94
|
|
62
95
|
def get_continuation_token(self) -> str:
|
96
|
+
"""Return a continuation token that allows to restart the poller later.
|
97
|
+
|
98
|
+
:returns: An opaque continuation token
|
99
|
+
:rtype: str
|
100
|
+
"""
|
63
101
|
raise TypeError("Polling method '{}' doesn't support get_continuation_token".format(self.__class__.__name__))
|
64
102
|
|
65
103
|
@classmethod
|
66
104
|
def from_continuation_token(
|
67
105
|
cls, continuation_token: str, **kwargs: Any
|
68
106
|
) -> Tuple[Any, Any, DeserializationCallbackType]:
|
107
|
+
"""Create a poller from a continuation token.
|
108
|
+
|
109
|
+
:param continuation_token: An opaque continuation token
|
110
|
+
:type continuation_token: str
|
111
|
+
:return: A tuple containing the client, initial response, and deserialization callback.
|
112
|
+
:rtype: Tuple[Any, Any, DeserializationCallbackType]
|
113
|
+
"""
|
69
114
|
raise TypeError("Polling method '{}' doesn't support from_continuation_token".format(cls.__name__))
|
70
115
|
|
71
116
|
|
@@ -157,6 +202,16 @@ class AsyncLROPoller(Generic[PollingReturnType_co], Awaitable[PollingReturnType_
|
|
157
202
|
def from_continuation_token(
|
158
203
|
cls, polling_method: AsyncPollingMethod[PollingReturnType_co], continuation_token: str, **kwargs: Any
|
159
204
|
) -> "AsyncLROPoller[PollingReturnType_co]":
|
205
|
+
"""Create a poller from a continuation token.
|
206
|
+
|
207
|
+
:param polling_method: The polling strategy to adopt
|
208
|
+
:type polling_method: ~azure.core.polling.AsyncPollingMethod
|
209
|
+
:param continuation_token: An opaque continuation token
|
210
|
+
:type continuation_token: str
|
211
|
+
:return: An instance of AsyncLROPoller
|
212
|
+
:rtype: ~azure.core.polling.AsyncLROPoller
|
213
|
+
:raises ~azure.core.exceptions.HttpResponseError: If the continuation token is invalid.
|
214
|
+
"""
|
160
215
|
(
|
161
216
|
client,
|
162
217
|
initial_response,
|
@@ -196,8 +251,8 @@ class AsyncLROPoller(Generic[PollingReturnType_co], Awaitable[PollingReturnType_
|
|
196
251
|
if not error.continuation_token:
|
197
252
|
try:
|
198
253
|
error.continuation_token = self.continuation_token()
|
199
|
-
except Exception
|
200
|
-
_LOGGER.warning("Unable to retrieve continuation token
|
254
|
+
except Exception: # pylint: disable=broad-except
|
255
|
+
_LOGGER.warning("Unable to retrieve continuation token.")
|
201
256
|
error.continuation_token = None
|
202
257
|
raise
|
203
258
|
self._done = True
|