azure-storage-blob 12.25.1__py3-none-any.whl → 12.27.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- azure/storage/blob/__init__.py +3 -2
- azure/storage/blob/_blob_client.py +94 -41
- azure/storage/blob/_blob_client_helpers.py +19 -4
- azure/storage/blob/_blob_service_client.py +16 -13
- azure/storage/blob/_container_client.py +25 -22
- azure/storage/blob/_deserialize.py +1 -1
- azure/storage/blob/_download.py +7 -7
- azure/storage/blob/_encryption.py +177 -184
- azure/storage/blob/_generated/_azure_blob_storage.py +3 -2
- azure/storage/blob/_generated/_configuration.py +2 -2
- azure/storage/blob/_generated/_utils/__init__.py +6 -0
- azure/storage/blob/_generated/{_serialization.py → _utils/serialization.py} +7 -25
- azure/storage/blob/_generated/aio/_azure_blob_storage.py +3 -2
- azure/storage/blob/_generated/aio/_configuration.py +2 -2
- azure/storage/blob/_generated/aio/operations/_append_blob_operations.py +11 -14
- azure/storage/blob/_generated/aio/operations/_blob_operations.py +40 -64
- azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +18 -20
- azure/storage/blob/_generated/aio/operations/_container_operations.py +21 -43
- azure/storage/blob/_generated/aio/operations/_page_blob_operations.py +18 -27
- azure/storage/blob/_generated/aio/operations/_service_operations.py +11 -22
- azure/storage/blob/_generated/models/__init__.py +2 -0
- azure/storage/blob/_generated/models/_azure_blob_storage_enums.py +6 -0
- azure/storage/blob/_generated/models/_models_py3.py +30 -9
- azure/storage/blob/_generated/operations/_append_blob_operations.py +19 -20
- azure/storage/blob/_generated/operations/_blob_operations.py +68 -89
- azure/storage/blob/_generated/operations/_block_blob_operations.py +31 -27
- azure/storage/blob/_generated/operations/_container_operations.py +40 -62
- azure/storage/blob/_generated/operations/_page_blob_operations.py +31 -37
- azure/storage/blob/_generated/operations/_service_operations.py +20 -32
- azure/storage/blob/_lease.py +1 -0
- azure/storage/blob/_list_blobs_helper.py +1 -1
- azure/storage/blob/_quick_query_helper.py +20 -24
- azure/storage/blob/_serialize.py +2 -0
- azure/storage/blob/_shared/__init__.py +7 -7
- azure/storage/blob/_shared/authentication.py +49 -32
- azure/storage/blob/_shared/avro/avro_io.py +44 -42
- azure/storage/blob/_shared/avro/avro_io_async.py +42 -41
- azure/storage/blob/_shared/avro/datafile.py +24 -21
- azure/storage/blob/_shared/avro/datafile_async.py +15 -15
- azure/storage/blob/_shared/avro/schema.py +196 -217
- azure/storage/blob/_shared/base_client.py +82 -59
- azure/storage/blob/_shared/base_client_async.py +58 -51
- azure/storage/blob/_shared/constants.py +1 -1
- azure/storage/blob/_shared/models.py +94 -92
- azure/storage/blob/_shared/parser.py +3 -3
- azure/storage/blob/_shared/policies.py +186 -147
- azure/storage/blob/_shared/policies_async.py +53 -65
- azure/storage/blob/_shared/request_handlers.py +50 -45
- azure/storage/blob/_shared/response_handlers.py +54 -45
- azure/storage/blob/_shared/shared_access_signature.py +67 -71
- azure/storage/blob/_shared/uploads.py +56 -49
- azure/storage/blob/_shared/uploads_async.py +70 -58
- azure/storage/blob/_shared_access_signature.py +3 -1
- azure/storage/blob/_version.py +1 -1
- azure/storage/blob/aio/__init__.py +3 -2
- azure/storage/blob/aio/_blob_client_async.py +241 -44
- azure/storage/blob/aio/_blob_service_client_async.py +13 -11
- azure/storage/blob/aio/_container_client_async.py +28 -25
- azure/storage/blob/aio/_download_async.py +7 -7
- azure/storage/blob/aio/_lease_async.py +1 -0
- azure/storage/blob/aio/_quick_query_helper_async.py +194 -0
- {azure_storage_blob-12.25.1.dist-info → azure_storage_blob-12.27.0b1.dist-info}/METADATA +4 -5
- azure_storage_blob-12.27.0b1.dist-info/RECORD +86 -0
- azure_storage_blob-12.25.1.dist-info/RECORD +0 -84
- {azure_storage_blob-12.25.1.dist-info → azure_storage_blob-12.27.0b1.dist-info}/LICENSE +0 -0
- {azure_storage_blob-12.25.1.dist-info → azure_storage_blob-12.27.0b1.dist-info}/WHEEL +0 -0
- {azure_storage_blob-12.25.1.dist-info → azure_storage_blob-12.27.0b1.dist-info}/top_level.txt +0 -0
@@ -20,7 +20,10 @@ from urllib.parse import parse_qs, quote
|
|
20
20
|
from azure.core.credentials import AzureSasCredential, AzureNamedKeyCredential, TokenCredential
|
21
21
|
from azure.core.exceptions import HttpResponseError
|
22
22
|
from azure.core.pipeline import Pipeline
|
23
|
-
from azure.core.pipeline.transport import
|
23
|
+
from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import, no-name-in-module
|
24
|
+
HttpTransport,
|
25
|
+
RequestsTransport,
|
26
|
+
)
|
24
27
|
from azure.core.pipeline.policies import (
|
25
28
|
AzureSasCredentialPolicy,
|
26
29
|
ContentDecodePolicy,
|
@@ -73,8 +76,17 @@ class StorageAccountHostsMixin(object):
|
|
73
76
|
self,
|
74
77
|
parsed_url: Any,
|
75
78
|
service: str,
|
76
|
-
credential: Optional[
|
77
|
-
|
79
|
+
credential: Optional[
|
80
|
+
Union[
|
81
|
+
str,
|
82
|
+
Dict[str, str],
|
83
|
+
AzureNamedKeyCredential,
|
84
|
+
AzureSasCredential,
|
85
|
+
"AsyncTokenCredential",
|
86
|
+
TokenCredential,
|
87
|
+
]
|
88
|
+
] = None,
|
89
|
+
**kwargs: Any,
|
78
90
|
) -> None:
|
79
91
|
self._location_mode = kwargs.get("_location_mode", LocationMode.PRIMARY)
|
80
92
|
self._hosts = kwargs.get("_hosts", {})
|
@@ -83,12 +95,15 @@ class StorageAccountHostsMixin(object):
|
|
83
95
|
|
84
96
|
if service not in ["blob", "queue", "file-share", "dfs"]:
|
85
97
|
raise ValueError(f"Invalid service: {service}")
|
86
|
-
service_name = service.split(
|
98
|
+
service_name = service.split("-")[0]
|
87
99
|
account = parsed_url.netloc.split(f".{service_name}.core.")
|
88
100
|
|
89
101
|
self.account_name = account[0] if len(account) > 1 else None
|
90
|
-
if
|
91
|
-
|
102
|
+
if (
|
103
|
+
not self.account_name
|
104
|
+
and parsed_url.netloc.startswith("localhost")
|
105
|
+
or parsed_url.netloc.startswith("127.0.0.1")
|
106
|
+
):
|
92
107
|
self._is_localhost = True
|
93
108
|
self.account_name = parsed_url.path.strip("/")
|
94
109
|
|
@@ -106,7 +121,7 @@ class StorageAccountHostsMixin(object):
|
|
106
121
|
secondary_hostname = parsed_url.netloc.replace(account[0], account[0] + "-secondary")
|
107
122
|
if kwargs.get("secondary_hostname"):
|
108
123
|
secondary_hostname = kwargs["secondary_hostname"]
|
109
|
-
primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip(
|
124
|
+
primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip("/")
|
110
125
|
self._hosts = {LocationMode.PRIMARY: primary_hostname, LocationMode.SECONDARY: secondary_hostname}
|
111
126
|
|
112
127
|
self._sdk_moniker = f"storage-{service}/{VERSION}"
|
@@ -119,70 +134,76 @@ class StorageAccountHostsMixin(object):
|
|
119
134
|
def __exit__(self, *args):
|
120
135
|
self._client.__exit__(*args)
|
121
136
|
|
122
|
-
def close(self):
|
123
|
-
"""
|
137
|
+
def close(self) -> None:
|
138
|
+
"""This method is to close the sockets opened by the client.
|
124
139
|
It need not be used when using with a context manager.
|
125
140
|
"""
|
126
141
|
self._client.close()
|
127
142
|
|
128
143
|
@property
|
129
|
-
def url(self):
|
144
|
+
def url(self) -> str:
|
130
145
|
"""The full endpoint URL to this entity, including SAS token if used.
|
131
146
|
|
132
147
|
This could be either the primary endpoint,
|
133
148
|
or the secondary endpoint depending on the current :func:`location_mode`.
|
134
|
-
|
149
|
+
|
150
|
+
:return: The full endpoint URL to this entity, including SAS token if used.
|
135
151
|
:rtype: str
|
136
152
|
"""
|
137
|
-
return self._format_url(self._hosts[self._location_mode])
|
153
|
+
return self._format_url(self._hosts[self._location_mode]) # type: ignore
|
138
154
|
|
139
155
|
@property
|
140
|
-
def primary_endpoint(self):
|
156
|
+
def primary_endpoint(self) -> str:
|
141
157
|
"""The full primary endpoint URL.
|
142
158
|
|
159
|
+
:return: The full primary endpoint URL.
|
143
160
|
:rtype: str
|
144
161
|
"""
|
145
|
-
return self._format_url(self._hosts[LocationMode.PRIMARY])
|
162
|
+
return self._format_url(self._hosts[LocationMode.PRIMARY]) # type: ignore
|
146
163
|
|
147
164
|
@property
|
148
|
-
def primary_hostname(self):
|
165
|
+
def primary_hostname(self) -> str:
|
149
166
|
"""The hostname of the primary endpoint.
|
150
167
|
|
168
|
+
:return: The hostname of the primary endpoint.
|
151
169
|
:rtype: str
|
152
170
|
"""
|
153
171
|
return self._hosts[LocationMode.PRIMARY]
|
154
172
|
|
155
173
|
@property
|
156
|
-
def secondary_endpoint(self):
|
174
|
+
def secondary_endpoint(self) -> str:
|
157
175
|
"""The full secondary endpoint URL if configured.
|
158
176
|
|
159
177
|
If not available a ValueError will be raised. To explicitly specify a secondary hostname, use the optional
|
160
178
|
`secondary_hostname` keyword argument on instantiation.
|
161
179
|
|
180
|
+
:return: The full secondary endpoint URL.
|
162
181
|
:rtype: str
|
163
|
-
:raise ValueError:
|
182
|
+
:raise ValueError: If no secondary endpoint is configured.
|
164
183
|
"""
|
165
184
|
if not self._hosts[LocationMode.SECONDARY]:
|
166
185
|
raise ValueError("No secondary host configured.")
|
167
|
-
return self._format_url(self._hosts[LocationMode.SECONDARY])
|
186
|
+
return self._format_url(self._hosts[LocationMode.SECONDARY]) # type: ignore
|
168
187
|
|
169
188
|
@property
|
170
|
-
def secondary_hostname(self):
|
189
|
+
def secondary_hostname(self) -> Optional[str]:
|
171
190
|
"""The hostname of the secondary endpoint.
|
172
191
|
|
173
192
|
If not available this will be None. To explicitly specify a secondary hostname, use the optional
|
174
193
|
`secondary_hostname` keyword argument on instantiation.
|
175
194
|
|
195
|
+
:return: The hostname of the secondary endpoint, or None if not configured.
|
176
196
|
:rtype: Optional[str]
|
177
197
|
"""
|
178
198
|
return self._hosts[LocationMode.SECONDARY]
|
179
199
|
|
180
200
|
@property
|
181
|
-
def location_mode(self):
|
201
|
+
def location_mode(self) -> str:
|
182
202
|
"""The location mode that the client is currently using.
|
183
203
|
|
184
204
|
By default this will be "primary". Options include "primary" and "secondary".
|
185
205
|
|
206
|
+
:return: The current location mode.
|
186
207
|
:rtype: str
|
187
208
|
"""
|
188
209
|
|
@@ -205,11 +226,16 @@ class StorageAccountHostsMixin(object):
|
|
205
226
|
return self._client._config.version # pylint: disable=protected-access
|
206
227
|
|
207
228
|
def _format_query_string(
|
208
|
-
self,
|
209
|
-
|
229
|
+
self,
|
230
|
+
sas_token: Optional[str],
|
231
|
+
credential: Optional[
|
232
|
+
Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", TokenCredential]
|
233
|
+
],
|
210
234
|
snapshot: Optional[str] = None,
|
211
|
-
share_snapshot: Optional[str] = None
|
212
|
-
) -> Tuple[
|
235
|
+
share_snapshot: Optional[str] = None,
|
236
|
+
) -> Tuple[
|
237
|
+
str, Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", TokenCredential]]
|
238
|
+
]:
|
213
239
|
query_str = "?"
|
214
240
|
if snapshot:
|
215
241
|
query_str += f"snapshot={snapshot}&"
|
@@ -217,7 +243,8 @@ class StorageAccountHostsMixin(object):
|
|
217
243
|
query_str += f"sharesnapshot={share_snapshot}&"
|
218
244
|
if sas_token and isinstance(credential, AzureSasCredential):
|
219
245
|
raise ValueError(
|
220
|
-
"You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature."
|
246
|
+
"You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature."
|
247
|
+
)
|
221
248
|
if _is_credential_sastoken(credential):
|
222
249
|
credential = cast(str, credential)
|
223
250
|
query_str += credential.lstrip("?")
|
@@ -227,13 +254,16 @@ class StorageAccountHostsMixin(object):
|
|
227
254
|
return query_str.rstrip("?&"), credential
|
228
255
|
|
229
256
|
def _create_pipeline(
|
230
|
-
self,
|
231
|
-
|
257
|
+
self,
|
258
|
+
credential: Optional[
|
259
|
+
Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential]
|
260
|
+
] = None,
|
261
|
+
**kwargs: Any,
|
232
262
|
) -> Tuple[StorageConfiguration, Pipeline]:
|
233
263
|
self._credential_policy: Any = None
|
234
264
|
if hasattr(credential, "get_token"):
|
235
|
-
if kwargs.get(
|
236
|
-
audience = str(kwargs.pop(
|
265
|
+
if kwargs.get("audience"):
|
266
|
+
audience = str(kwargs.pop("audience")).rstrip("/") + DEFAULT_OAUTH_SCOPE
|
237
267
|
else:
|
238
268
|
audience = STORAGE_OAUTH_SCOPE
|
239
269
|
self._credential_policy = StorageBearerTokenCredentialPolicy(cast(TokenCredential, credential), audience)
|
@@ -267,22 +297,18 @@ class StorageAccountHostsMixin(object):
|
|
267
297
|
config.logging_policy,
|
268
298
|
StorageResponseHook(**kwargs),
|
269
299
|
DistributedTracingPolicy(**kwargs),
|
270
|
-
HttpLoggingPolicy(**kwargs)
|
300
|
+
HttpLoggingPolicy(**kwargs),
|
271
301
|
]
|
272
302
|
if kwargs.get("_additional_pipeline_policies"):
|
273
303
|
policies = policies + kwargs.get("_additional_pipeline_policies") # type: ignore
|
274
304
|
config.transport = transport # type: ignore
|
275
305
|
return config, Pipeline(transport, policies=policies)
|
276
306
|
|
277
|
-
def _batch_send(
|
278
|
-
self,
|
279
|
-
*reqs: "HttpRequest",
|
280
|
-
**kwargs: Any
|
281
|
-
) -> Iterator["HttpResponse"]:
|
307
|
+
def _batch_send(self, *reqs: "HttpRequest", **kwargs: Any) -> Iterator["HttpResponse"]:
|
282
308
|
"""Given a series of request, do a Storage batch call.
|
283
309
|
|
284
310
|
:param HttpRequest reqs: A collection of HttpRequest objects.
|
285
|
-
:
|
311
|
+
:return: An iterator of HttpResponse objects.
|
286
312
|
:rtype: Iterator[HttpResponse]
|
287
313
|
"""
|
288
314
|
# Pop it here, so requests doesn't feel bad about additional kwarg
|
@@ -291,25 +317,21 @@ class StorageAccountHostsMixin(object):
|
|
291
317
|
|
292
318
|
request = self._client._client.post( # pylint: disable=protected-access
|
293
319
|
url=(
|
294
|
-
f
|
320
|
+
f"{self.scheme}://{self.primary_hostname}/"
|
295
321
|
f"{kwargs.pop('path', '')}?{kwargs.pop('restype', '')}"
|
296
322
|
f"comp=batch{kwargs.pop('sas', '')}{kwargs.pop('timeout', '')}"
|
297
323
|
),
|
298
324
|
headers={
|
299
|
-
|
300
|
-
"Content-Type": "multipart/mixed; boundary=" + _get_batch_request_delimiter(batch_id, False, False)
|
301
|
-
}
|
325
|
+
"x-ms-version": self.api_version,
|
326
|
+
"Content-Type": "multipart/mixed; boundary=" + _get_batch_request_delimiter(batch_id, False, False),
|
327
|
+
},
|
302
328
|
)
|
303
329
|
|
304
330
|
policies = [StorageHeadersPolicy()]
|
305
331
|
if self._credential_policy:
|
306
332
|
policies.append(self._credential_policy)
|
307
333
|
|
308
|
-
request.set_multipart_mixed(
|
309
|
-
*reqs,
|
310
|
-
policies=policies,
|
311
|
-
enforce_https=False
|
312
|
-
)
|
334
|
+
request.set_multipart_mixed(*reqs, policies=policies, enforce_https=False)
|
313
335
|
|
314
336
|
Pipeline._prepare_multipart_mixed_request(request) # pylint: disable=protected-access
|
315
337
|
body = serialize_batch_body(request.multipart_mixed_info[0], batch_id)
|
@@ -317,9 +339,7 @@ class StorageAccountHostsMixin(object):
|
|
317
339
|
|
318
340
|
temp = request.multipart_mixed_info
|
319
341
|
request.multipart_mixed_info = None
|
320
|
-
pipeline_response = self._pipeline.run(
|
321
|
-
request, **kwargs
|
322
|
-
)
|
342
|
+
pipeline_response = self._pipeline.run(request, **kwargs)
|
323
343
|
response = pipeline_response.http_response
|
324
344
|
request.multipart_mixed_info = temp
|
325
345
|
|
@@ -331,8 +351,7 @@ class StorageAccountHostsMixin(object):
|
|
331
351
|
parts = list(response.parts())
|
332
352
|
if any(p for p in parts if not 200 <= p.status_code < 300):
|
333
353
|
error = PartialBatchErrorException(
|
334
|
-
message="There is a partial failure in the batch operation.",
|
335
|
-
response=response, parts=parts
|
354
|
+
message="There is a partial failure in the batch operation.", response=response, parts=parts
|
336
355
|
)
|
337
356
|
raise error
|
338
357
|
return iter(parts)
|
@@ -346,6 +365,7 @@ class TransportWrapper(HttpTransport):
|
|
346
365
|
by a `get_client` method does not close the outer transport for the parent
|
347
366
|
when used in a context manager.
|
348
367
|
"""
|
368
|
+
|
349
369
|
def __init__(self, transport):
|
350
370
|
self._transport = transport
|
351
371
|
|
@@ -367,7 +387,9 @@ class TransportWrapper(HttpTransport):
|
|
367
387
|
|
368
388
|
def _format_shared_key_credential(
|
369
389
|
account_name: Optional[str],
|
370
|
-
credential: Optional[
|
390
|
+
credential: Optional[
|
391
|
+
Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "AsyncTokenCredential", TokenCredential]
|
392
|
+
] = None,
|
371
393
|
) -> Any:
|
372
394
|
if isinstance(credential, str):
|
373
395
|
if not account_name:
|
@@ -387,8 +409,12 @@ def _format_shared_key_credential(
|
|
387
409
|
def parse_connection_str(
|
388
410
|
conn_str: str,
|
389
411
|
credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential]],
|
390
|
-
service: str
|
391
|
-
) -> Tuple[
|
412
|
+
service: str,
|
413
|
+
) -> Tuple[
|
414
|
+
str,
|
415
|
+
Optional[str],
|
416
|
+
Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential]],
|
417
|
+
]:
|
392
418
|
conn_str = conn_str.rstrip(";")
|
393
419
|
conn_settings_list = [s.split("=", 1) for s in conn_str.split(";")]
|
394
420
|
if any(len(tup) != 2 for tup in conn_settings_list):
|
@@ -410,14 +436,11 @@ def parse_connection_str(
|
|
410
436
|
if endpoints["secondary"] in conn_settings:
|
411
437
|
raise ValueError("Connection string specifies only secondary endpoint.")
|
412
438
|
try:
|
413
|
-
primary =(
|
439
|
+
primary = (
|
414
440
|
f"{conn_settings['DEFAULTENDPOINTSPROTOCOL']}://"
|
415
441
|
f"{conn_settings['ACCOUNTNAME']}.{service}.{conn_settings['ENDPOINTSUFFIX']}"
|
416
442
|
)
|
417
|
-
secondary =
|
418
|
-
f"{conn_settings['ACCOUNTNAME']}-secondary."
|
419
|
-
f"{service}.{conn_settings['ENDPOINTSUFFIX']}"
|
420
|
-
)
|
443
|
+
secondary = f"{conn_settings['ACCOUNTNAME']}-secondary." f"{service}.{conn_settings['ENDPOINTSUFFIX']}"
|
421
444
|
except KeyError:
|
422
445
|
pass
|
423
446
|
|
@@ -437,7 +460,7 @@ def parse_connection_str(
|
|
437
460
|
|
438
461
|
|
439
462
|
def create_configuration(**kwargs: Any) -> StorageConfiguration:
|
440
|
-
|
463
|
+
# Backwards compatibility if someone is not passing sdk_moniker
|
441
464
|
if not kwargs.get("sdk_moniker"):
|
442
465
|
kwargs["sdk_moniker"] = f"storage-{kwargs.pop('storage_sdk')}/{VERSION}"
|
443
466
|
config = StorageConfiguration(**kwargs)
|
@@ -64,18 +64,26 @@ class AsyncStorageAccountHostsMixin(object):
|
|
64
64
|
async def __aexit__(self, *args):
|
65
65
|
await self._client.__aexit__(*args)
|
66
66
|
|
67
|
-
async def close(self):
|
68
|
-
"""
|
67
|
+
async def close(self) -> None:
|
68
|
+
"""This method is to close the sockets opened by the client.
|
69
69
|
It need not be used when using with a context manager.
|
70
|
+
|
71
|
+
:return: None
|
72
|
+
:rtype: None
|
70
73
|
"""
|
71
74
|
await self._client.close()
|
72
75
|
|
73
76
|
def _format_query_string(
|
74
|
-
self,
|
75
|
-
|
77
|
+
self,
|
78
|
+
sas_token: Optional[str],
|
79
|
+
credential: Optional[
|
80
|
+
Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", AsyncTokenCredential]
|
81
|
+
],
|
76
82
|
snapshot: Optional[str] = None,
|
77
|
-
share_snapshot: Optional[str] = None
|
78
|
-
) -> Tuple[
|
83
|
+
share_snapshot: Optional[str] = None,
|
84
|
+
) -> Tuple[
|
85
|
+
str, Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", AsyncTokenCredential]]
|
86
|
+
]:
|
79
87
|
query_str = "?"
|
80
88
|
if snapshot:
|
81
89
|
query_str += f"snapshot={snapshot}&"
|
@@ -83,7 +91,8 @@ class AsyncStorageAccountHostsMixin(object):
|
|
83
91
|
query_str += f"sharesnapshot={share_snapshot}&"
|
84
92
|
if sas_token and isinstance(credential, AzureSasCredential):
|
85
93
|
raise ValueError(
|
86
|
-
"You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature."
|
94
|
+
"You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature."
|
95
|
+
)
|
87
96
|
if _is_credential_sastoken(credential):
|
88
97
|
query_str += credential.lstrip("?") # type: ignore [union-attr]
|
89
98
|
credential = None
|
@@ -92,35 +101,40 @@ class AsyncStorageAccountHostsMixin(object):
|
|
92
101
|
return query_str.rstrip("?&"), credential
|
93
102
|
|
94
103
|
def _create_pipeline(
|
95
|
-
self,
|
96
|
-
|
104
|
+
self,
|
105
|
+
credential: Optional[
|
106
|
+
Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]
|
107
|
+
] = None,
|
108
|
+
**kwargs: Any,
|
97
109
|
) -> Tuple[StorageConfiguration, AsyncPipeline]:
|
98
110
|
self._credential_policy: Optional[
|
99
|
-
Union[AsyncStorageBearerTokenCredentialPolicy,
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
audience = str(kwargs.pop('audience')).rstrip('/') + DEFAULT_OAUTH_SCOPE
|
111
|
+
Union[AsyncStorageBearerTokenCredentialPolicy, SharedKeyCredentialPolicy, AzureSasCredentialPolicy]
|
112
|
+
] = None
|
113
|
+
if hasattr(credential, "get_token"):
|
114
|
+
if kwargs.get("audience"):
|
115
|
+
audience = str(kwargs.pop("audience")).rstrip("/") + DEFAULT_OAUTH_SCOPE
|
105
116
|
else:
|
106
117
|
audience = STORAGE_OAUTH_SCOPE
|
107
118
|
self._credential_policy = AsyncStorageBearerTokenCredentialPolicy(
|
108
|
-
|
119
|
+
cast(AsyncTokenCredential, credential), audience
|
120
|
+
)
|
109
121
|
elif isinstance(credential, SharedKeyCredentialPolicy):
|
110
122
|
self._credential_policy = credential
|
111
123
|
elif isinstance(credential, AzureSasCredential):
|
112
124
|
self._credential_policy = AzureSasCredentialPolicy(credential)
|
113
125
|
elif credential is not None:
|
114
126
|
raise TypeError(f"Unsupported credential: {type(credential)}")
|
115
|
-
config = kwargs.get(
|
116
|
-
if kwargs.get(
|
117
|
-
return config, kwargs[
|
118
|
-
transport = kwargs.get(
|
127
|
+
config = kwargs.get("_configuration") or create_configuration(**kwargs)
|
128
|
+
if kwargs.get("_pipeline"):
|
129
|
+
return config, kwargs["_pipeline"]
|
130
|
+
transport = kwargs.get("transport")
|
119
131
|
kwargs.setdefault("connection_timeout", CONNECTION_TIMEOUT)
|
120
132
|
kwargs.setdefault("read_timeout", READ_TIMEOUT)
|
121
133
|
if not transport:
|
122
134
|
try:
|
123
|
-
from azure.core.pipeline.transport import
|
135
|
+
from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import
|
136
|
+
AioHttpTransport,
|
137
|
+
)
|
124
138
|
except ImportError as exc:
|
125
139
|
raise ImportError("Unable to create async transport. Please check aiohttp is installed.") from exc
|
126
140
|
transport = AioHttpTransport(**kwargs)
|
@@ -143,53 +157,41 @@ class AsyncStorageAccountHostsMixin(object):
|
|
143
157
|
HttpLoggingPolicy(**kwargs),
|
144
158
|
]
|
145
159
|
if kwargs.get("_additional_pipeline_policies"):
|
146
|
-
policies = policies + kwargs.get("_additional_pipeline_policies") #type: ignore
|
147
|
-
config.transport = transport
|
148
|
-
return config, AsyncPipeline(transport, policies=policies)
|
160
|
+
policies = policies + kwargs.get("_additional_pipeline_policies") # type: ignore
|
161
|
+
config.transport = transport # type: ignore
|
162
|
+
return config, AsyncPipeline(transport, policies=policies) # type: ignore
|
149
163
|
|
150
|
-
async def _batch_send(
|
151
|
-
self,
|
152
|
-
*reqs: "HttpRequest",
|
153
|
-
**kwargs: Any
|
154
|
-
) -> AsyncList["HttpResponse"]:
|
164
|
+
async def _batch_send(self, *reqs: "HttpRequest", **kwargs: Any) -> AsyncList["HttpResponse"]:
|
155
165
|
"""Given a series of request, do a Storage batch call.
|
156
166
|
|
157
167
|
:param HttpRequest reqs: A collection of HttpRequest objects.
|
158
|
-
:
|
168
|
+
:return: An AsyncList of HttpResponse objects.
|
159
169
|
:rtype: AsyncList[HttpResponse]
|
160
170
|
"""
|
161
171
|
# Pop it here, so requests doesn't feel bad about additional kwarg
|
162
172
|
raise_on_any_failure = kwargs.pop("raise_on_any_failure", True)
|
163
173
|
request = self._client._client.post( # pylint: disable=protected-access
|
164
174
|
url=(
|
165
|
-
f
|
175
|
+
f"{self.scheme}://{self.primary_hostname}/"
|
166
176
|
f"{kwargs.pop('path', '')}?{kwargs.pop('restype', '')}"
|
167
177
|
f"comp=batch{kwargs.pop('sas', '')}{kwargs.pop('timeout', '')}"
|
168
178
|
),
|
169
|
-
headers={
|
170
|
-
'x-ms-version': self.api_version
|
171
|
-
}
|
179
|
+
headers={"x-ms-version": self.api_version},
|
172
180
|
)
|
173
181
|
|
174
182
|
policies = [StorageHeadersPolicy()]
|
175
183
|
if self._credential_policy:
|
176
184
|
policies.append(self._credential_policy) # type: ignore
|
177
185
|
|
178
|
-
request.set_multipart_mixed(
|
179
|
-
*reqs,
|
180
|
-
policies=policies,
|
181
|
-
enforce_https=False
|
182
|
-
)
|
186
|
+
request.set_multipart_mixed(*reqs, policies=policies, enforce_https=False)
|
183
187
|
|
184
|
-
pipeline_response = await self._pipeline.run(
|
185
|
-
request, **kwargs
|
186
|
-
)
|
188
|
+
pipeline_response = await self._pipeline.run(request, **kwargs)
|
187
189
|
response = pipeline_response.http_response
|
188
190
|
|
189
191
|
try:
|
190
192
|
if response.status_code not in [202]:
|
191
193
|
raise HttpResponseError(response=response)
|
192
|
-
parts = response.parts()
|
194
|
+
parts = response.parts() # Return an AsyncIterator
|
193
195
|
if raise_on_any_failure:
|
194
196
|
parts_list = []
|
195
197
|
async for part in parts:
|
@@ -197,7 +199,8 @@ class AsyncStorageAccountHostsMixin(object):
|
|
197
199
|
if any(p for p in parts_list if not 200 <= p.status_code < 300):
|
198
200
|
error = PartialBatchErrorException(
|
199
201
|
message="There is a partial failure in the batch operation.",
|
200
|
-
response=response,
|
202
|
+
response=response,
|
203
|
+
parts=parts_list,
|
201
204
|
)
|
202
205
|
raise error
|
203
206
|
return AsyncList(parts_list)
|
@@ -205,11 +208,16 @@ class AsyncStorageAccountHostsMixin(object):
|
|
205
208
|
except HttpResponseError as error:
|
206
209
|
process_storage_error(error)
|
207
210
|
|
211
|
+
|
208
212
|
def parse_connection_str(
|
209
213
|
conn_str: str,
|
210
214
|
credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]],
|
211
|
-
service: str
|
212
|
-
) -> Tuple[
|
215
|
+
service: str,
|
216
|
+
) -> Tuple[
|
217
|
+
str,
|
218
|
+
Optional[str],
|
219
|
+
Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]],
|
220
|
+
]:
|
213
221
|
conn_str = conn_str.rstrip(";")
|
214
222
|
conn_settings_list = [s.split("=", 1) for s in conn_str.split(";")]
|
215
223
|
if any(len(tup) != 2 for tup in conn_settings_list):
|
@@ -231,14 +239,11 @@ def parse_connection_str(
|
|
231
239
|
if endpoints["secondary"] in conn_settings:
|
232
240
|
raise ValueError("Connection string specifies only secondary endpoint.")
|
233
241
|
try:
|
234
|
-
primary =(
|
242
|
+
primary = (
|
235
243
|
f"{conn_settings['DEFAULTENDPOINTSPROTOCOL']}://"
|
236
244
|
f"{conn_settings['ACCOUNTNAME']}.{service}.{conn_settings['ENDPOINTSUFFIX']}"
|
237
245
|
)
|
238
|
-
secondary =
|
239
|
-
f"{conn_settings['ACCOUNTNAME']}-secondary."
|
240
|
-
f"{service}.{conn_settings['ENDPOINTSUFFIX']}"
|
241
|
-
)
|
246
|
+
secondary = f"{conn_settings['ACCOUNTNAME']}-secondary." f"{service}.{conn_settings['ENDPOINTSUFFIX']}"
|
242
247
|
except KeyError:
|
243
248
|
pass
|
244
249
|
|
@@ -256,11 +261,13 @@ def parse_connection_str(
|
|
256
261
|
secondary = secondary.replace(".blob.", ".dfs.")
|
257
262
|
return primary, secondary, credential
|
258
263
|
|
264
|
+
|
259
265
|
class AsyncTransportWrapper(AsyncHttpTransport):
|
260
266
|
"""Wrapper class that ensures that an inner client created
|
261
267
|
by a `get_client` method does not close the outer transport for the parent
|
262
268
|
when used in a context manager.
|
263
269
|
"""
|
270
|
+
|
264
271
|
def __init__(self, async_transport):
|
265
272
|
self._transport = async_transport
|
266
273
|
|