azure-storage-blob 12.26.0b1__py3-none-any.whl → 12.27.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- azure/storage/blob/__init__.py +6 -5
- azure/storage/blob/_blob_client.py +59 -38
- azure/storage/blob/_blob_client.pyi +780 -0
- azure/storage/blob/_blob_client_helpers.py +4 -3
- azure/storage/blob/_blob_service_client.py +57 -17
- azure/storage/blob/_blob_service_client.pyi +182 -0
- azure/storage/blob/_container_client.py +47 -22
- azure/storage/blob/_container_client.pyi +380 -0
- azure/storage/blob/_deserialize.py +1 -1
- azure/storage/blob/_download.py +7 -7
- azure/storage/blob/_encryption.py +177 -184
- azure/storage/blob/_generated/_azure_blob_storage.py +3 -2
- azure/storage/blob/_generated/_configuration.py +2 -2
- azure/storage/blob/_generated/_utils/__init__.py +6 -0
- azure/storage/blob/_generated/{_serialization.py → _utils/serialization.py} +4 -22
- azure/storage/blob/_generated/aio/_azure_blob_storage.py +3 -2
- azure/storage/blob/_generated/aio/_configuration.py +2 -2
- azure/storage/blob/_generated/aio/operations/_append_blob_operations.py +6 -10
- azure/storage/blob/_generated/aio/operations/_blob_operations.py +35 -39
- azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +9 -13
- azure/storage/blob/_generated/aio/operations/_container_operations.py +20 -24
- azure/storage/blob/_generated/aio/operations/_page_blob_operations.py +13 -17
- azure/storage/blob/_generated/aio/operations/_service_operations.py +10 -14
- azure/storage/blob/_generated/models/_models_py3.py +30 -9
- azure/storage/blob/_generated/operations/_append_blob_operations.py +11 -15
- azure/storage/blob/_generated/operations/_blob_operations.py +60 -64
- azure/storage/blob/_generated/operations/_block_blob_operations.py +16 -20
- azure/storage/blob/_generated/operations/_container_operations.py +39 -43
- azure/storage/blob/_generated/operations/_page_blob_operations.py +23 -27
- azure/storage/blob/_generated/operations/_service_operations.py +19 -23
- azure/storage/blob/_lease.py +3 -2
- azure/storage/blob/_lease.pyi +81 -0
- azure/storage/blob/_list_blobs_helper.py +1 -1
- azure/storage/blob/_quick_query_helper.py +3 -3
- azure/storage/blob/_serialize.py +1 -0
- azure/storage/blob/_shared/__init__.py +7 -7
- azure/storage/blob/_shared/authentication.py +49 -32
- azure/storage/blob/_shared/avro/avro_io.py +44 -42
- azure/storage/blob/_shared/avro/avro_io_async.py +42 -41
- azure/storage/blob/_shared/avro/datafile.py +24 -21
- azure/storage/blob/_shared/avro/datafile_async.py +15 -15
- azure/storage/blob/_shared/avro/schema.py +196 -217
- azure/storage/blob/_shared/base_client.py +79 -70
- azure/storage/blob/_shared/base_client_async.py +53 -68
- azure/storage/blob/_shared/constants.py +1 -1
- azure/storage/blob/_shared/models.py +94 -92
- azure/storage/blob/_shared/parser.py +3 -3
- azure/storage/blob/_shared/policies.py +186 -147
- azure/storage/blob/_shared/policies_async.py +58 -69
- azure/storage/blob/_shared/request_handlers.py +50 -45
- azure/storage/blob/_shared/response_handlers.py +54 -45
- azure/storage/blob/_shared/shared_access_signature.py +65 -73
- azure/storage/blob/_shared/uploads.py +56 -49
- azure/storage/blob/_shared/uploads_async.py +70 -58
- azure/storage/blob/_version.py +1 -1
- azure/storage/blob/aio/__init__.py +8 -10
- azure/storage/blob/aio/_blob_client_async.py +81 -48
- azure/storage/blob/aio/_blob_client_async.pyi +763 -0
- azure/storage/blob/aio/_blob_service_client_async.py +54 -15
- azure/storage/blob/aio/_blob_service_client_async.pyi +187 -0
- azure/storage/blob/aio/_container_client_async.py +55 -26
- azure/storage/blob/aio/_container_client_async.pyi +384 -0
- azure/storage/blob/aio/_download_async.py +15 -11
- azure/storage/blob/aio/_lease_async.py +3 -2
- azure/storage/blob/aio/_lease_async.pyi +81 -0
- azure/storage/blob/aio/_quick_query_helper_async.py +3 -3
- {azure_storage_blob-12.26.0b1.dist-info → azure_storage_blob-12.27.0.dist-info}/METADATA +18 -6
- azure_storage_blob-12.27.0.dist-info/RECORD +94 -0
- {azure_storage_blob-12.26.0b1.dist-info → azure_storage_blob-12.27.0.dist-info}/WHEEL +1 -1
- azure_storage_blob-12.26.0b1.dist-info/RECORD +0 -85
- {azure_storage_blob-12.26.0b1.dist-info → azure_storage_blob-12.27.0.dist-info/licenses}/LICENSE +0 -0
- {azure_storage_blob-12.26.0b1.dist-info → azure_storage_blob-12.27.0.dist-info}/top_level.txt +0 -0
@@ -20,7 +20,10 @@ from urllib.parse import parse_qs, quote
|
|
20
20
|
from azure.core.credentials import AzureSasCredential, AzureNamedKeyCredential, TokenCredential
|
21
21
|
from azure.core.exceptions import HttpResponseError
|
22
22
|
from azure.core.pipeline import Pipeline
|
23
|
-
from azure.core.pipeline.transport import
|
23
|
+
from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import, no-name-in-module
|
24
|
+
HttpTransport,
|
25
|
+
RequestsTransport,
|
26
|
+
)
|
24
27
|
from azure.core.pipeline.policies import (
|
25
28
|
AzureSasCredentialPolicy,
|
26
29
|
ContentDecodePolicy,
|
@@ -73,8 +76,17 @@ class StorageAccountHostsMixin(object):
|
|
73
76
|
self,
|
74
77
|
parsed_url: Any,
|
75
78
|
service: str,
|
76
|
-
credential: Optional[
|
77
|
-
|
79
|
+
credential: Optional[
|
80
|
+
Union[
|
81
|
+
str,
|
82
|
+
Dict[str, str],
|
83
|
+
AzureNamedKeyCredential,
|
84
|
+
AzureSasCredential,
|
85
|
+
"AsyncTokenCredential",
|
86
|
+
TokenCredential,
|
87
|
+
]
|
88
|
+
] = None,
|
89
|
+
**kwargs: Any,
|
78
90
|
) -> None:
|
79
91
|
self._location_mode = kwargs.get("_location_mode", LocationMode.PRIMARY)
|
80
92
|
self._hosts = kwargs.get("_hosts", {})
|
@@ -83,12 +95,15 @@ class StorageAccountHostsMixin(object):
|
|
83
95
|
|
84
96
|
if service not in ["blob", "queue", "file-share", "dfs"]:
|
85
97
|
raise ValueError(f"Invalid service: {service}")
|
86
|
-
service_name = service.split(
|
98
|
+
service_name = service.split("-")[0]
|
87
99
|
account = parsed_url.netloc.split(f".{service_name}.core.")
|
88
100
|
|
89
101
|
self.account_name = account[0] if len(account) > 1 else None
|
90
|
-
if
|
91
|
-
|
102
|
+
if (
|
103
|
+
not self.account_name
|
104
|
+
and parsed_url.netloc.startswith("localhost")
|
105
|
+
or parsed_url.netloc.startswith("127.0.0.1")
|
106
|
+
):
|
92
107
|
self._is_localhost = True
|
93
108
|
self.account_name = parsed_url.path.strip("/")
|
94
109
|
|
@@ -106,84 +121,76 @@ class StorageAccountHostsMixin(object):
|
|
106
121
|
secondary_hostname = parsed_url.netloc.replace(account[0], account[0] + "-secondary")
|
107
122
|
if kwargs.get("secondary_hostname"):
|
108
123
|
secondary_hostname = kwargs["secondary_hostname"]
|
109
|
-
primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip(
|
124
|
+
primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip("/")
|
110
125
|
self._hosts = {LocationMode.PRIMARY: primary_hostname, LocationMode.SECONDARY: secondary_hostname}
|
111
126
|
|
112
127
|
self._sdk_moniker = f"storage-{service}/{VERSION}"
|
113
128
|
self._config, self._pipeline = self._create_pipeline(self.credential, sdk_moniker=self._sdk_moniker, **kwargs)
|
114
129
|
|
115
|
-
def __enter__(self):
|
116
|
-
self._client.__enter__()
|
117
|
-
return self
|
118
|
-
|
119
|
-
def __exit__(self, *args):
|
120
|
-
self._client.__exit__(*args)
|
121
|
-
|
122
|
-
def close(self):
|
123
|
-
""" This method is to close the sockets opened by the client.
|
124
|
-
It need not be used when using with a context manager.
|
125
|
-
"""
|
126
|
-
self._client.close()
|
127
|
-
|
128
130
|
@property
|
129
|
-
def url(self):
|
131
|
+
def url(self) -> str:
|
130
132
|
"""The full endpoint URL to this entity, including SAS token if used.
|
131
133
|
|
132
134
|
This could be either the primary endpoint,
|
133
135
|
or the secondary endpoint depending on the current :func:`location_mode`.
|
134
136
|
|
135
|
-
:
|
137
|
+
:return: The full endpoint URL to this entity, including SAS token if used.
|
136
138
|
:rtype: str
|
137
139
|
"""
|
138
|
-
return self._format_url(self._hosts[self._location_mode])
|
140
|
+
return self._format_url(self._hosts[self._location_mode]) # type: ignore
|
139
141
|
|
140
142
|
@property
|
141
|
-
def primary_endpoint(self):
|
143
|
+
def primary_endpoint(self) -> str:
|
142
144
|
"""The full primary endpoint URL.
|
143
145
|
|
146
|
+
:return: The full primary endpoint URL.
|
144
147
|
:rtype: str
|
145
148
|
"""
|
146
|
-
return self._format_url(self._hosts[LocationMode.PRIMARY])
|
149
|
+
return self._format_url(self._hosts[LocationMode.PRIMARY]) # type: ignore
|
147
150
|
|
148
151
|
@property
|
149
|
-
def primary_hostname(self):
|
152
|
+
def primary_hostname(self) -> str:
|
150
153
|
"""The hostname of the primary endpoint.
|
151
154
|
|
155
|
+
:return: The hostname of the primary endpoint.
|
152
156
|
:rtype: str
|
153
157
|
"""
|
154
158
|
return self._hosts[LocationMode.PRIMARY]
|
155
159
|
|
156
160
|
@property
|
157
|
-
def secondary_endpoint(self):
|
161
|
+
def secondary_endpoint(self) -> str:
|
158
162
|
"""The full secondary endpoint URL if configured.
|
159
163
|
|
160
164
|
If not available a ValueError will be raised. To explicitly specify a secondary hostname, use the optional
|
161
165
|
`secondary_hostname` keyword argument on instantiation.
|
162
166
|
|
167
|
+
:return: The full secondary endpoint URL.
|
163
168
|
:rtype: str
|
164
|
-
:raise ValueError:
|
169
|
+
:raise ValueError: If no secondary endpoint is configured.
|
165
170
|
"""
|
166
171
|
if not self._hosts[LocationMode.SECONDARY]:
|
167
172
|
raise ValueError("No secondary host configured.")
|
168
|
-
return self._format_url(self._hosts[LocationMode.SECONDARY])
|
173
|
+
return self._format_url(self._hosts[LocationMode.SECONDARY]) # type: ignore
|
169
174
|
|
170
175
|
@property
|
171
|
-
def secondary_hostname(self):
|
176
|
+
def secondary_hostname(self) -> Optional[str]:
|
172
177
|
"""The hostname of the secondary endpoint.
|
173
178
|
|
174
179
|
If not available this will be None. To explicitly specify a secondary hostname, use the optional
|
175
180
|
`secondary_hostname` keyword argument on instantiation.
|
176
181
|
|
182
|
+
:return: The hostname of the secondary endpoint, or None if not configured.
|
177
183
|
:rtype: Optional[str]
|
178
184
|
"""
|
179
185
|
return self._hosts[LocationMode.SECONDARY]
|
180
186
|
|
181
187
|
@property
|
182
|
-
def location_mode(self):
|
188
|
+
def location_mode(self) -> str:
|
183
189
|
"""The location mode that the client is currently using.
|
184
190
|
|
185
191
|
By default this will be "primary". Options include "primary" and "secondary".
|
186
192
|
|
193
|
+
:return: The current location mode.
|
187
194
|
:rtype: str
|
188
195
|
"""
|
189
196
|
|
@@ -206,11 +213,16 @@ class StorageAccountHostsMixin(object):
|
|
206
213
|
return self._client._config.version # pylint: disable=protected-access
|
207
214
|
|
208
215
|
def _format_query_string(
|
209
|
-
self,
|
210
|
-
|
216
|
+
self,
|
217
|
+
sas_token: Optional[str],
|
218
|
+
credential: Optional[
|
219
|
+
Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", TokenCredential]
|
220
|
+
],
|
211
221
|
snapshot: Optional[str] = None,
|
212
|
-
share_snapshot: Optional[str] = None
|
213
|
-
) -> Tuple[
|
222
|
+
share_snapshot: Optional[str] = None,
|
223
|
+
) -> Tuple[
|
224
|
+
str, Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", TokenCredential]]
|
225
|
+
]:
|
214
226
|
query_str = "?"
|
215
227
|
if snapshot:
|
216
228
|
query_str += f"snapshot={snapshot}&"
|
@@ -218,7 +230,8 @@ class StorageAccountHostsMixin(object):
|
|
218
230
|
query_str += f"sharesnapshot={share_snapshot}&"
|
219
231
|
if sas_token and isinstance(credential, AzureSasCredential):
|
220
232
|
raise ValueError(
|
221
|
-
"You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature."
|
233
|
+
"You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature."
|
234
|
+
)
|
222
235
|
if _is_credential_sastoken(credential):
|
223
236
|
credential = cast(str, credential)
|
224
237
|
query_str += credential.lstrip("?")
|
@@ -228,13 +241,16 @@ class StorageAccountHostsMixin(object):
|
|
228
241
|
return query_str.rstrip("?&"), credential
|
229
242
|
|
230
243
|
def _create_pipeline(
|
231
|
-
self,
|
232
|
-
|
244
|
+
self,
|
245
|
+
credential: Optional[
|
246
|
+
Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential]
|
247
|
+
] = None,
|
248
|
+
**kwargs: Any,
|
233
249
|
) -> Tuple[StorageConfiguration, Pipeline]:
|
234
250
|
self._credential_policy: Any = None
|
235
251
|
if hasattr(credential, "get_token"):
|
236
|
-
if kwargs.get(
|
237
|
-
audience = str(kwargs.pop(
|
252
|
+
if kwargs.get("audience"):
|
253
|
+
audience = str(kwargs.pop("audience")).rstrip("/") + DEFAULT_OAUTH_SCOPE
|
238
254
|
else:
|
239
255
|
audience = STORAGE_OAUTH_SCOPE
|
240
256
|
self._credential_policy = StorageBearerTokenCredentialPolicy(cast(TokenCredential, credential), audience)
|
@@ -268,22 +284,18 @@ class StorageAccountHostsMixin(object):
|
|
268
284
|
config.logging_policy,
|
269
285
|
StorageResponseHook(**kwargs),
|
270
286
|
DistributedTracingPolicy(**kwargs),
|
271
|
-
HttpLoggingPolicy(**kwargs)
|
287
|
+
HttpLoggingPolicy(**kwargs),
|
272
288
|
]
|
273
289
|
if kwargs.get("_additional_pipeline_policies"):
|
274
290
|
policies = policies + kwargs.get("_additional_pipeline_policies") # type: ignore
|
275
291
|
config.transport = transport # type: ignore
|
276
292
|
return config, Pipeline(transport, policies=policies)
|
277
293
|
|
278
|
-
def _batch_send(
|
279
|
-
self,
|
280
|
-
*reqs: "HttpRequest",
|
281
|
-
**kwargs: Any
|
282
|
-
) -> Iterator["HttpResponse"]:
|
294
|
+
def _batch_send(self, *reqs: "HttpRequest", **kwargs: Any) -> Iterator["HttpResponse"]:
|
283
295
|
"""Given a series of request, do a Storage batch call.
|
284
296
|
|
285
297
|
:param HttpRequest reqs: A collection of HttpRequest objects.
|
286
|
-
:
|
298
|
+
:return: An iterator of HttpResponse objects.
|
287
299
|
:rtype: Iterator[HttpResponse]
|
288
300
|
"""
|
289
301
|
# Pop it here, so requests doesn't feel bad about additional kwarg
|
@@ -292,25 +304,21 @@ class StorageAccountHostsMixin(object):
|
|
292
304
|
|
293
305
|
request = self._client._client.post( # pylint: disable=protected-access
|
294
306
|
url=(
|
295
|
-
f
|
307
|
+
f"{self.scheme}://{self.primary_hostname}/"
|
296
308
|
f"{kwargs.pop('path', '')}?{kwargs.pop('restype', '')}"
|
297
309
|
f"comp=batch{kwargs.pop('sas', '')}{kwargs.pop('timeout', '')}"
|
298
310
|
),
|
299
311
|
headers={
|
300
|
-
|
301
|
-
"Content-Type": "multipart/mixed; boundary=" + _get_batch_request_delimiter(batch_id, False, False)
|
302
|
-
}
|
312
|
+
"x-ms-version": self.api_version,
|
313
|
+
"Content-Type": "multipart/mixed; boundary=" + _get_batch_request_delimiter(batch_id, False, False),
|
314
|
+
},
|
303
315
|
)
|
304
316
|
|
305
317
|
policies = [StorageHeadersPolicy()]
|
306
318
|
if self._credential_policy:
|
307
319
|
policies.append(self._credential_policy)
|
308
320
|
|
309
|
-
request.set_multipart_mixed(
|
310
|
-
*reqs,
|
311
|
-
policies=policies,
|
312
|
-
enforce_https=False
|
313
|
-
)
|
321
|
+
request.set_multipart_mixed(*reqs, policies=policies, enforce_https=False)
|
314
322
|
|
315
323
|
Pipeline._prepare_multipart_mixed_request(request) # pylint: disable=protected-access
|
316
324
|
body = serialize_batch_body(request.multipart_mixed_info[0], batch_id)
|
@@ -318,9 +326,7 @@ class StorageAccountHostsMixin(object):
|
|
318
326
|
|
319
327
|
temp = request.multipart_mixed_info
|
320
328
|
request.multipart_mixed_info = None
|
321
|
-
pipeline_response = self._pipeline.run(
|
322
|
-
request, **kwargs
|
323
|
-
)
|
329
|
+
pipeline_response = self._pipeline.run(request, **kwargs)
|
324
330
|
response = pipeline_response.http_response
|
325
331
|
request.multipart_mixed_info = temp
|
326
332
|
|
@@ -332,8 +338,7 @@ class StorageAccountHostsMixin(object):
|
|
332
338
|
parts = list(response.parts())
|
333
339
|
if any(p for p in parts if not 200 <= p.status_code < 300):
|
334
340
|
error = PartialBatchErrorException(
|
335
|
-
message="There is a partial failure in the batch operation.",
|
336
|
-
response=response, parts=parts
|
341
|
+
message="There is a partial failure in the batch operation.", response=response, parts=parts
|
337
342
|
)
|
338
343
|
raise error
|
339
344
|
return iter(parts)
|
@@ -347,6 +352,7 @@ class TransportWrapper(HttpTransport):
|
|
347
352
|
by a `get_client` method does not close the outer transport for the parent
|
348
353
|
when used in a context manager.
|
349
354
|
"""
|
355
|
+
|
350
356
|
def __init__(self, transport):
|
351
357
|
self._transport = transport
|
352
358
|
|
@@ -368,7 +374,9 @@ class TransportWrapper(HttpTransport):
|
|
368
374
|
|
369
375
|
def _format_shared_key_credential(
|
370
376
|
account_name: Optional[str],
|
371
|
-
credential: Optional[
|
377
|
+
credential: Optional[
|
378
|
+
Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "AsyncTokenCredential", TokenCredential]
|
379
|
+
] = None,
|
372
380
|
) -> Any:
|
373
381
|
if isinstance(credential, str):
|
374
382
|
if not account_name:
|
@@ -388,8 +396,12 @@ def _format_shared_key_credential(
|
|
388
396
|
def parse_connection_str(
|
389
397
|
conn_str: str,
|
390
398
|
credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential]],
|
391
|
-
service: str
|
392
|
-
) -> Tuple[
|
399
|
+
service: str,
|
400
|
+
) -> Tuple[
|
401
|
+
str,
|
402
|
+
Optional[str],
|
403
|
+
Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential]],
|
404
|
+
]:
|
393
405
|
conn_str = conn_str.rstrip(";")
|
394
406
|
conn_settings_list = [s.split("=", 1) for s in conn_str.split(";")]
|
395
407
|
if any(len(tup) != 2 for tup in conn_settings_list):
|
@@ -411,14 +423,11 @@ def parse_connection_str(
|
|
411
423
|
if endpoints["secondary"] in conn_settings:
|
412
424
|
raise ValueError("Connection string specifies only secondary endpoint.")
|
413
425
|
try:
|
414
|
-
primary =(
|
426
|
+
primary = (
|
415
427
|
f"{conn_settings['DEFAULTENDPOINTSPROTOCOL']}://"
|
416
428
|
f"{conn_settings['ACCOUNTNAME']}.{service}.{conn_settings['ENDPOINTSUFFIX']}"
|
417
429
|
)
|
418
|
-
secondary =
|
419
|
-
f"{conn_settings['ACCOUNTNAME']}-secondary."
|
420
|
-
f"{service}.{conn_settings['ENDPOINTSUFFIX']}"
|
421
|
-
)
|
430
|
+
secondary = f"{conn_settings['ACCOUNTNAME']}-secondary." f"{service}.{conn_settings['ENDPOINTSUFFIX']}"
|
422
431
|
except KeyError:
|
423
432
|
pass
|
424
433
|
|
@@ -438,7 +447,7 @@ def parse_connection_str(
|
|
438
447
|
|
439
448
|
|
440
449
|
def create_configuration(**kwargs: Any) -> StorageConfiguration:
|
441
|
-
|
450
|
+
# Backwards compatibility if someone is not passing sdk_moniker
|
442
451
|
if not kwargs.get("sdk_moniker"):
|
443
452
|
kwargs["sdk_moniker"] = f"storage-{kwargs.pop('storage_sdk')}/{VERSION}"
|
444
453
|
config = StorageConfiguration(**kwargs)
|
@@ -51,31 +51,17 @@ _SERVICE_PARAMS = {
|
|
51
51
|
|
52
52
|
class AsyncStorageAccountHostsMixin(object):
|
53
53
|
|
54
|
-
def __enter__(self):
|
55
|
-
raise TypeError("Async client only supports 'async with'.")
|
56
|
-
|
57
|
-
def __exit__(self, *args):
|
58
|
-
pass
|
59
|
-
|
60
|
-
async def __aenter__(self):
|
61
|
-
await self._client.__aenter__()
|
62
|
-
return self
|
63
|
-
|
64
|
-
async def __aexit__(self, *args):
|
65
|
-
await self._client.__aexit__(*args)
|
66
|
-
|
67
|
-
async def close(self):
|
68
|
-
""" This method is to close the sockets opened by the client.
|
69
|
-
It need not be used when using with a context manager.
|
70
|
-
"""
|
71
|
-
await self._client.close()
|
72
|
-
|
73
54
|
def _format_query_string(
|
74
|
-
self,
|
75
|
-
|
55
|
+
self,
|
56
|
+
sas_token: Optional[str],
|
57
|
+
credential: Optional[
|
58
|
+
Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", AsyncTokenCredential]
|
59
|
+
],
|
76
60
|
snapshot: Optional[str] = None,
|
77
|
-
share_snapshot: Optional[str] = None
|
78
|
-
) -> Tuple[
|
61
|
+
share_snapshot: Optional[str] = None,
|
62
|
+
) -> Tuple[
|
63
|
+
str, Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", AsyncTokenCredential]]
|
64
|
+
]:
|
79
65
|
query_str = "?"
|
80
66
|
if snapshot:
|
81
67
|
query_str += f"snapshot={snapshot}&"
|
@@ -83,7 +69,8 @@ class AsyncStorageAccountHostsMixin(object):
|
|
83
69
|
query_str += f"sharesnapshot={share_snapshot}&"
|
84
70
|
if sas_token and isinstance(credential, AzureSasCredential):
|
85
71
|
raise ValueError(
|
86
|
-
"You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature."
|
72
|
+
"You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature."
|
73
|
+
)
|
87
74
|
if _is_credential_sastoken(credential):
|
88
75
|
query_str += credential.lstrip("?") # type: ignore [union-attr]
|
89
76
|
credential = None
|
@@ -92,35 +79,40 @@ class AsyncStorageAccountHostsMixin(object):
|
|
92
79
|
return query_str.rstrip("?&"), credential
|
93
80
|
|
94
81
|
def _create_pipeline(
|
95
|
-
self,
|
96
|
-
|
82
|
+
self,
|
83
|
+
credential: Optional[
|
84
|
+
Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]
|
85
|
+
] = None,
|
86
|
+
**kwargs: Any,
|
97
87
|
) -> Tuple[StorageConfiguration, AsyncPipeline]:
|
98
88
|
self._credential_policy: Optional[
|
99
|
-
Union[AsyncStorageBearerTokenCredentialPolicy,
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
audience = str(kwargs.pop('audience')).rstrip('/') + DEFAULT_OAUTH_SCOPE
|
89
|
+
Union[AsyncStorageBearerTokenCredentialPolicy, SharedKeyCredentialPolicy, AzureSasCredentialPolicy]
|
90
|
+
] = None
|
91
|
+
if hasattr(credential, "get_token"):
|
92
|
+
if kwargs.get("audience"):
|
93
|
+
audience = str(kwargs.pop("audience")).rstrip("/") + DEFAULT_OAUTH_SCOPE
|
105
94
|
else:
|
106
95
|
audience = STORAGE_OAUTH_SCOPE
|
107
96
|
self._credential_policy = AsyncStorageBearerTokenCredentialPolicy(
|
108
|
-
|
97
|
+
cast(AsyncTokenCredential, credential), audience
|
98
|
+
)
|
109
99
|
elif isinstance(credential, SharedKeyCredentialPolicy):
|
110
100
|
self._credential_policy = credential
|
111
101
|
elif isinstance(credential, AzureSasCredential):
|
112
102
|
self._credential_policy = AzureSasCredentialPolicy(credential)
|
113
103
|
elif credential is not None:
|
114
104
|
raise TypeError(f"Unsupported credential: {type(credential)}")
|
115
|
-
config = kwargs.get(
|
116
|
-
if kwargs.get(
|
117
|
-
return config, kwargs[
|
118
|
-
transport = kwargs.get(
|
105
|
+
config = kwargs.get("_configuration") or create_configuration(**kwargs)
|
106
|
+
if kwargs.get("_pipeline"):
|
107
|
+
return config, kwargs["_pipeline"]
|
108
|
+
transport = kwargs.get("transport")
|
119
109
|
kwargs.setdefault("connection_timeout", CONNECTION_TIMEOUT)
|
120
110
|
kwargs.setdefault("read_timeout", READ_TIMEOUT)
|
121
111
|
if not transport:
|
122
112
|
try:
|
123
|
-
from azure.core.pipeline.transport import
|
113
|
+
from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import
|
114
|
+
AioHttpTransport,
|
115
|
+
)
|
124
116
|
except ImportError as exc:
|
125
117
|
raise ImportError("Unable to create async transport. Please check aiohttp is installed.") from exc
|
126
118
|
transport = AioHttpTransport(**kwargs)
|
@@ -143,53 +135,41 @@ class AsyncStorageAccountHostsMixin(object):
|
|
143
135
|
HttpLoggingPolicy(**kwargs),
|
144
136
|
]
|
145
137
|
if kwargs.get("_additional_pipeline_policies"):
|
146
|
-
policies = policies + kwargs.get("_additional_pipeline_policies") #type: ignore
|
147
|
-
config.transport = transport
|
148
|
-
return config, AsyncPipeline(transport, policies=policies)
|
138
|
+
policies = policies + kwargs.get("_additional_pipeline_policies") # type: ignore
|
139
|
+
config.transport = transport # type: ignore
|
140
|
+
return config, AsyncPipeline(transport, policies=policies) # type: ignore
|
149
141
|
|
150
|
-
async def _batch_send(
|
151
|
-
self,
|
152
|
-
*reqs: "HttpRequest",
|
153
|
-
**kwargs: Any
|
154
|
-
) -> AsyncList["HttpResponse"]:
|
142
|
+
async def _batch_send(self, *reqs: "HttpRequest", **kwargs: Any) -> AsyncList["HttpResponse"]:
|
155
143
|
"""Given a series of request, do a Storage batch call.
|
156
144
|
|
157
145
|
:param HttpRequest reqs: A collection of HttpRequest objects.
|
158
|
-
:
|
146
|
+
:return: An AsyncList of HttpResponse objects.
|
159
147
|
:rtype: AsyncList[HttpResponse]
|
160
148
|
"""
|
161
149
|
# Pop it here, so requests doesn't feel bad about additional kwarg
|
162
150
|
raise_on_any_failure = kwargs.pop("raise_on_any_failure", True)
|
163
151
|
request = self._client._client.post( # pylint: disable=protected-access
|
164
152
|
url=(
|
165
|
-
f
|
153
|
+
f"{self.scheme}://{self.primary_hostname}/"
|
166
154
|
f"{kwargs.pop('path', '')}?{kwargs.pop('restype', '')}"
|
167
155
|
f"comp=batch{kwargs.pop('sas', '')}{kwargs.pop('timeout', '')}"
|
168
156
|
),
|
169
|
-
headers={
|
170
|
-
'x-ms-version': self.api_version
|
171
|
-
}
|
157
|
+
headers={"x-ms-version": self.api_version},
|
172
158
|
)
|
173
159
|
|
174
160
|
policies = [StorageHeadersPolicy()]
|
175
161
|
if self._credential_policy:
|
176
162
|
policies.append(self._credential_policy) # type: ignore
|
177
163
|
|
178
|
-
request.set_multipart_mixed(
|
179
|
-
*reqs,
|
180
|
-
policies=policies,
|
181
|
-
enforce_https=False
|
182
|
-
)
|
164
|
+
request.set_multipart_mixed(*reqs, policies=policies, enforce_https=False)
|
183
165
|
|
184
|
-
pipeline_response = await self._pipeline.run(
|
185
|
-
request, **kwargs
|
186
|
-
)
|
166
|
+
pipeline_response = await self._pipeline.run(request, **kwargs)
|
187
167
|
response = pipeline_response.http_response
|
188
168
|
|
189
169
|
try:
|
190
170
|
if response.status_code not in [202]:
|
191
171
|
raise HttpResponseError(response=response)
|
192
|
-
parts = response.parts()
|
172
|
+
parts = response.parts() # Return an AsyncIterator
|
193
173
|
if raise_on_any_failure:
|
194
174
|
parts_list = []
|
195
175
|
async for part in parts:
|
@@ -197,7 +177,8 @@ class AsyncStorageAccountHostsMixin(object):
|
|
197
177
|
if any(p for p in parts_list if not 200 <= p.status_code < 300):
|
198
178
|
error = PartialBatchErrorException(
|
199
179
|
message="There is a partial failure in the batch operation.",
|
200
|
-
response=response,
|
180
|
+
response=response,
|
181
|
+
parts=parts_list,
|
201
182
|
)
|
202
183
|
raise error
|
203
184
|
return AsyncList(parts_list)
|
@@ -205,11 +186,16 @@ class AsyncStorageAccountHostsMixin(object):
|
|
205
186
|
except HttpResponseError as error:
|
206
187
|
process_storage_error(error)
|
207
188
|
|
189
|
+
|
208
190
|
def parse_connection_str(
|
209
191
|
conn_str: str,
|
210
192
|
credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]],
|
211
|
-
service: str
|
212
|
-
) -> Tuple[
|
193
|
+
service: str,
|
194
|
+
) -> Tuple[
|
195
|
+
str,
|
196
|
+
Optional[str],
|
197
|
+
Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]],
|
198
|
+
]:
|
213
199
|
conn_str = conn_str.rstrip(";")
|
214
200
|
conn_settings_list = [s.split("=", 1) for s in conn_str.split(";")]
|
215
201
|
if any(len(tup) != 2 for tup in conn_settings_list):
|
@@ -231,14 +217,11 @@ def parse_connection_str(
|
|
231
217
|
if endpoints["secondary"] in conn_settings:
|
232
218
|
raise ValueError("Connection string specifies only secondary endpoint.")
|
233
219
|
try:
|
234
|
-
primary =(
|
220
|
+
primary = (
|
235
221
|
f"{conn_settings['DEFAULTENDPOINTSPROTOCOL']}://"
|
236
222
|
f"{conn_settings['ACCOUNTNAME']}.{service}.{conn_settings['ENDPOINTSUFFIX']}"
|
237
223
|
)
|
238
|
-
secondary =
|
239
|
-
f"{conn_settings['ACCOUNTNAME']}-secondary."
|
240
|
-
f"{service}.{conn_settings['ENDPOINTSUFFIX']}"
|
241
|
-
)
|
224
|
+
secondary = f"{conn_settings['ACCOUNTNAME']}-secondary." f"{service}.{conn_settings['ENDPOINTSUFFIX']}"
|
242
225
|
except KeyError:
|
243
226
|
pass
|
244
227
|
|
@@ -256,11 +239,13 @@ def parse_connection_str(
|
|
256
239
|
secondary = secondary.replace(".blob.", ".dfs.")
|
257
240
|
return primary, secondary, credential
|
258
241
|
|
242
|
+
|
259
243
|
class AsyncTransportWrapper(AsyncHttpTransport):
|
260
244
|
"""Wrapper class that ensures that an inner client created
|
261
245
|
by a `get_client` method does not close the outer transport for the parent
|
262
246
|
when used in a context manager.
|
263
247
|
"""
|
248
|
+
|
264
249
|
def __init__(self, async_transport):
|
265
250
|
self._transport = async_transport
|
266
251
|
|