azure-storage-blob 12.23.0b1__py3-none-any.whl → 12.24.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- azure/storage/blob/_blob_client.py +34 -10
- azure/storage/blob/_blob_client_helpers.py +7 -3
- azure/storage/blob/_blob_service_client.py +1 -1
- azure/storage/blob/_container_client.py +8 -2
- azure/storage/blob/_container_client_helpers.py +11 -6
- azure/storage/blob/_deserialize.py +2 -2
- azure/storage/blob/_encryption.py +15 -10
- azure/storage/blob/_generated/_azure_blob_storage.py +3 -2
- azure/storage/blob/_generated/_configuration.py +2 -2
- azure/storage/blob/_generated/_serialization.py +267 -150
- azure/storage/blob/_generated/aio/_azure_blob_storage.py +3 -2
- azure/storage/blob/_generated/aio/_configuration.py +2 -2
- azure/storage/blob/_generated/aio/operations/_append_blob_operations.py +23 -11
- azure/storage/blob/_generated/aio/operations/_blob_operations.py +137 -73
- azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +42 -16
- azure/storage/blob/_generated/aio/operations/_container_operations.py +49 -44
- azure/storage/blob/_generated/aio/operations/_page_blob_operations.py +35 -23
- azure/storage/blob/_generated/aio/operations/_service_operations.py +30 -25
- azure/storage/blob/_generated/models/_azure_blob_storage_enums.py +1 -0
- azure/storage/blob/_generated/operations/_append_blob_operations.py +35 -15
- azure/storage/blob/_generated/operations/_blob_operations.py +187 -98
- azure/storage/blob/_generated/operations/_block_blob_operations.py +64 -22
- azure/storage/blob/_generated/operations/_container_operations.py +67 -62
- azure/storage/blob/_generated/operations/_page_blob_operations.py +52 -32
- azure/storage/blob/_generated/operations/_service_operations.py +38 -33
- azure/storage/blob/_list_blobs_helper.py +1 -1
- azure/storage/blob/_models.py +4 -3
- azure/storage/blob/_serialize.py +1 -0
- azure/storage/blob/_shared/avro/schema.py +1 -0
- azure/storage/blob/_shared/base_client.py +10 -8
- azure/storage/blob/_shared/base_client_async.py +5 -5
- azure/storage/blob/_shared/models.py +5 -2
- azure/storage/blob/_shared/policies.py +14 -16
- azure/storage/blob/_shared/policies_async.py +19 -6
- azure/storage/blob/_shared/request_handlers.py +2 -3
- azure/storage/blob/_shared/response_handlers.py +2 -2
- azure/storage/blob/_shared/uploads.py +4 -4
- azure/storage/blob/_shared/uploads_async.py +4 -4
- azure/storage/blob/_shared_access_signature.py +0 -1
- azure/storage/blob/_version.py +1 -1
- azure/storage/blob/aio/_blob_client_async.py +36 -13
- azure/storage/blob/aio/_blob_service_client_async.py +7 -3
- azure/storage/blob/aio/_container_client_async.py +10 -4
- azure/storage/blob/aio/_download_async.py +94 -71
- azure/storage/blob/aio/_lease_async.py +1 -1
- azure/storage/blob/aio/_list_blobs_helper.py +1 -2
- azure/storage/blob/aio/_models.py +1 -2
- {azure_storage_blob-12.23.0b1.dist-info → azure_storage_blob-12.24.0.dist-info}/METADATA +10 -10
- azure_storage_blob-12.24.0.dist-info/RECORD +84 -0
- {azure_storage_blob-12.23.0b1.dist-info → azure_storage_blob-12.24.0.dist-info}/WHEEL +1 -1
- azure/storage/blob/_generated/_vendor.py +0 -16
- azure_storage_blob-12.23.0b1.dist-info/RECORD +0 -85
- {azure_storage_blob-12.23.0b1.dist-info → azure_storage_blob-12.24.0.dist-info}/LICENSE +0 -0
- {azure_storage_blob-12.23.0b1.dist-info → azure_storage_blob-12.24.0.dist-info}/top_level.txt +0 -0
@@ -10,12 +10,12 @@ import logging
|
|
10
10
|
import random
|
11
11
|
from typing import Any, Dict, TYPE_CHECKING
|
12
12
|
|
13
|
-
from azure.core.exceptions import AzureError
|
13
|
+
from azure.core.exceptions import AzureError, StreamClosedError, StreamConsumedError
|
14
14
|
from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy, AsyncHTTPPolicy
|
15
15
|
|
16
16
|
from .authentication import AzureSigningError, StorageHttpChallenge
|
17
17
|
from .constants import DEFAULT_OAUTH_SCOPE
|
18
|
-
from .policies import is_retry, StorageRetryPolicy
|
18
|
+
from .policies import encode_base64, is_retry, StorageContentValidation, StorageRetryPolicy
|
19
19
|
|
20
20
|
if TYPE_CHECKING:
|
21
21
|
from azure.core.credentials_async import AsyncTokenCredential
|
@@ -42,9 +42,23 @@ async def retry_hook(settings, **kwargs):
|
|
42
42
|
**kwargs)
|
43
43
|
|
44
44
|
|
45
|
+
async def is_checksum_retry(response):
|
46
|
+
# retry if invalid content md5
|
47
|
+
if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'):
|
48
|
+
try:
|
49
|
+
await response.http_response.read() # Load the body in memory and close the socket
|
50
|
+
except (StreamClosedError, StreamConsumedError):
|
51
|
+
pass
|
52
|
+
computed_md5 = response.http_request.headers.get('content-md5', None) or \
|
53
|
+
encode_base64(StorageContentValidation.get_content_md5(response.http_response.content))
|
54
|
+
if response.http_response.headers['content-md5'] != computed_md5:
|
55
|
+
return True
|
56
|
+
return False
|
57
|
+
|
58
|
+
|
45
59
|
class AsyncStorageResponseHook(AsyncHTTPPolicy):
|
46
60
|
|
47
|
-
def __init__(self, **kwargs):
|
61
|
+
def __init__(self, **kwargs):
|
48
62
|
self._response_callback = kwargs.get('raw_response_hook')
|
49
63
|
super(AsyncStorageResponseHook, self).__init__()
|
50
64
|
|
@@ -64,9 +78,8 @@ class AsyncStorageResponseHook(AsyncHTTPPolicy):
|
|
64
78
|
request.context.options.pop('raw_response_hook', self._response_callback)
|
65
79
|
|
66
80
|
response = await self.next.send(request)
|
67
|
-
|
81
|
+
will_retry = is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response)
|
68
82
|
|
69
|
-
will_retry = is_retry(response, request.context.options.get('mode'))
|
70
83
|
# Auth error could come from Bearer challenge, in which case this request will be made again
|
71
84
|
is_auth_error = response.http_response.status_code == 401
|
72
85
|
should_update_counts = not (will_retry or is_auth_error)
|
@@ -112,7 +125,7 @@ class AsyncStorageRetryPolicy(StorageRetryPolicy):
|
|
112
125
|
while retries_remaining:
|
113
126
|
try:
|
114
127
|
response = await self.next.send(request)
|
115
|
-
if is_retry(response, retry_settings['mode']):
|
128
|
+
if is_retry(response, retry_settings['mode']) or await is_checksum_retry(response):
|
116
129
|
retries_remaining = self.increment(
|
117
130
|
retry_settings,
|
118
131
|
request=request.http_request,
|
@@ -4,12 +4,11 @@
|
|
4
4
|
# license information.
|
5
5
|
# --------------------------------------------------------------------------
|
6
6
|
|
7
|
-
from typing import Dict, Optional
|
8
|
-
|
9
7
|
import logging
|
10
|
-
from os import fstat
|
11
8
|
import stat
|
12
9
|
from io import (SEEK_END, SEEK_SET, UnsupportedOperation)
|
10
|
+
from os import fstat
|
11
|
+
from typing import Dict, Optional
|
13
12
|
|
14
13
|
import isodate
|
15
14
|
|
@@ -60,9 +60,9 @@ def normalize_headers(headers):
|
|
60
60
|
|
61
61
|
def deserialize_metadata(response, obj, headers): # pylint: disable=unused-argument
|
62
62
|
try:
|
63
|
-
raw_metadata = {k: v for k, v in response.http_response.headers.items() if k.startswith(
|
63
|
+
raw_metadata = {k: v for k, v in response.http_response.headers.items() if k.lower().startswith('x-ms-meta-')}
|
64
64
|
except AttributeError:
|
65
|
-
raw_metadata = {k: v for k, v in response.headers.items() if k.startswith(
|
65
|
+
raw_metadata = {k: v for k, v in response.headers.items() if k.lower().startswith('x-ms-meta-')}
|
66
66
|
return {k[10:]: v for k, v in raw_metadata.items()}
|
67
67
|
|
68
68
|
|
@@ -281,7 +281,7 @@ class BlockBlobChunkUploader(_ChunkUploader):
|
|
281
281
|
return block_id
|
282
282
|
|
283
283
|
|
284
|
-
class PageBlobChunkUploader(_ChunkUploader):
|
284
|
+
class PageBlobChunkUploader(_ChunkUploader):
|
285
285
|
|
286
286
|
def _is_chunk_empty(self, chunk_data):
|
287
287
|
# read until non-zero byte is encountered
|
@@ -312,7 +312,7 @@ class PageBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method
|
|
312
312
|
pass
|
313
313
|
|
314
314
|
|
315
|
-
class AppendBlobChunkUploader(_ChunkUploader):
|
315
|
+
class AppendBlobChunkUploader(_ChunkUploader):
|
316
316
|
|
317
317
|
def __init__(self, *args, **kwargs):
|
318
318
|
super(AppendBlobChunkUploader, self).__init__(*args, **kwargs)
|
@@ -345,7 +345,7 @@ class AppendBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-metho
|
|
345
345
|
pass
|
346
346
|
|
347
347
|
|
348
|
-
class DataLakeFileChunkUploader(_ChunkUploader):
|
348
|
+
class DataLakeFileChunkUploader(_ChunkUploader):
|
349
349
|
|
350
350
|
def _upload_chunk(self, chunk_offset, chunk_data):
|
351
351
|
# avoid uploading the empty pages
|
@@ -377,7 +377,7 @@ class DataLakeFileChunkUploader(_ChunkUploader): # pylint: disable=abstract-met
|
|
377
377
|
block_stream.close()
|
378
378
|
|
379
379
|
|
380
|
-
class FileChunkUploader(_ChunkUploader):
|
380
|
+
class FileChunkUploader(_ChunkUploader):
|
381
381
|
|
382
382
|
def _upload_chunk(self, chunk_offset, chunk_data):
|
383
383
|
length = len(chunk_data)
|
@@ -306,7 +306,7 @@ class BlockBlobChunkUploader(_ChunkUploader):
|
|
306
306
|
return block_id
|
307
307
|
|
308
308
|
|
309
|
-
class PageBlobChunkUploader(_ChunkUploader):
|
309
|
+
class PageBlobChunkUploader(_ChunkUploader):
|
310
310
|
|
311
311
|
def _is_chunk_empty(self, chunk_data):
|
312
312
|
# read until non-zero byte is encountered
|
@@ -339,7 +339,7 @@ class PageBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method
|
|
339
339
|
pass
|
340
340
|
|
341
341
|
|
342
|
-
class AppendBlobChunkUploader(_ChunkUploader):
|
342
|
+
class AppendBlobChunkUploader(_ChunkUploader):
|
343
343
|
|
344
344
|
def __init__(self, *args, **kwargs):
|
345
345
|
super(AppendBlobChunkUploader, self).__init__(*args, **kwargs)
|
@@ -370,7 +370,7 @@ class AppendBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-metho
|
|
370
370
|
pass
|
371
371
|
|
372
372
|
|
373
|
-
class DataLakeFileChunkUploader(_ChunkUploader):
|
373
|
+
class DataLakeFileChunkUploader(_ChunkUploader):
|
374
374
|
|
375
375
|
async def _upload_chunk(self, chunk_offset, chunk_data):
|
376
376
|
self.response_headers = await self.service.append_data(
|
@@ -401,7 +401,7 @@ class DataLakeFileChunkUploader(_ChunkUploader): # pylint: disable=abstract-met
|
|
401
401
|
block_stream.close()
|
402
402
|
|
403
403
|
|
404
|
-
class FileChunkUploader(_ChunkUploader):
|
404
|
+
class FileChunkUploader(_ChunkUploader):
|
405
405
|
|
406
406
|
async def _upload_chunk(self, chunk_offset, chunk_data):
|
407
407
|
length = len(chunk_data)
|
@@ -281,7 +281,6 @@ class _BlobSharedAccessHelper(_SharedAccessHelper):
|
|
281
281
|
return return_value + '\n'
|
282
282
|
|
283
283
|
def add_resource_signature(self, account_name, account_key, path, user_delegation_key=None):
|
284
|
-
# pylint: disable = no-member
|
285
284
|
if path[0] != '/':
|
286
285
|
path = '/' + path
|
287
286
|
|
azure/storage/blob/_version.py
CHANGED
@@ -3,7 +3,7 @@
|
|
3
3
|
# Licensed under the MIT License. See License.txt in the project root for
|
4
4
|
# license information.
|
5
5
|
# --------------------------------------------------------------------------
|
6
|
-
# pylint: disable=too-many-lines,
|
6
|
+
# pylint: disable=too-many-lines, docstring-keyword-should-match-keyword-only
|
7
7
|
|
8
8
|
import warnings
|
9
9
|
from datetime import datetime
|
@@ -184,7 +184,7 @@ class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, Storag
|
|
184
184
|
self._query_str, credential = self._format_query_string(sas_token, credential, snapshot=self.snapshot)
|
185
185
|
super(BlobClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs)
|
186
186
|
self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline)
|
187
|
-
self._client._config.version = get_api_version(kwargs) # type: ignore [assignment]
|
187
|
+
self._client._config.version = get_api_version(kwargs) # type: ignore [assignment]
|
188
188
|
self._configure_encryption(kwargs)
|
189
189
|
|
190
190
|
def _format_url(self, hostname: str) -> str:
|
@@ -310,7 +310,12 @@ class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, Storag
|
|
310
310
|
process_storage_error(error)
|
311
311
|
|
312
312
|
@distributed_trace_async
|
313
|
-
async def upload_blob_from_url(
|
313
|
+
async def upload_blob_from_url(
|
314
|
+
self, source_url: str,
|
315
|
+
*,
|
316
|
+
metadata: Optional[Dict[str, str]] = None,
|
317
|
+
**kwargs: Any
|
318
|
+
) -> Dict[str, Any]:
|
314
319
|
"""
|
315
320
|
Creates a new Block Blob where the content of the blob is read from a given URL.
|
316
321
|
The content of an existing blob is overwritten with the new blob.
|
@@ -327,6 +332,8 @@ class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, Storag
|
|
327
332
|
https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=<DateTime>
|
328
333
|
|
329
334
|
https://otheraccount.blob.core.windows.net/mycontainer/myblob?sastoken
|
335
|
+
:keyword dict(str, str) metadata:
|
336
|
+
Name-value pairs associated with the blob as metadata.
|
330
337
|
:keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data.
|
331
338
|
If True, upload_blob will overwrite the existing data. If set to False, the
|
332
339
|
operation will fail with ResourceExistsError.
|
@@ -412,6 +419,7 @@ class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, Storag
|
|
412
419
|
raise ValueError("Customer provided encryption key must be used over HTTPS.")
|
413
420
|
options = _upload_blob_from_url_options(
|
414
421
|
source_url=source_url,
|
422
|
+
metadata=metadata,
|
415
423
|
**kwargs)
|
416
424
|
try:
|
417
425
|
return cast(Dict[str, Any], await self._client.block_blob.put_blob_from_url(**options))
|
@@ -522,8 +530,9 @@ class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, Storag
|
|
522
530
|
value specified in this header, the request will fail with
|
523
531
|
MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed).
|
524
532
|
:keyword int max_concurrency:
|
525
|
-
Maximum number of parallel connections to use when the blob
|
526
|
-
|
533
|
+
Maximum number of parallel connections to use when transferring the blob in chunks.
|
534
|
+
This option does not affect the underlying connection pool, and may
|
535
|
+
require a separate configuration of the connection pool.
|
527
536
|
:keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk:
|
528
537
|
Encrypts the data on the service-side with the given key.
|
529
538
|
Use of customer-provided keys must be done over HTTPS.
|
@@ -679,7 +688,9 @@ class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, Storag
|
|
679
688
|
As the encryption key itself is provided in the request,
|
680
689
|
a secure connection must be established to transfer the key.
|
681
690
|
:keyword int max_concurrency:
|
682
|
-
|
691
|
+
Maximum number of parallel connections to use when transferring the blob in chunks.
|
692
|
+
This option does not affect the underlying connection pool, and may
|
693
|
+
require a separate configuration of the connection pool.
|
683
694
|
:keyword str encoding:
|
684
695
|
Encoding to decode the downloaded bytes. Default is None, i.e. no decoding.
|
685
696
|
:keyword progress_hook:
|
@@ -1117,6 +1128,9 @@ class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, Storag
|
|
1117
1128
|
.. versionadded:: 12.10.0
|
1118
1129
|
This was introduced in API version '2020-10-02'.
|
1119
1130
|
|
1131
|
+
:keyword str version_id:
|
1132
|
+
The version id parameter is an opaque DateTime
|
1133
|
+
value that, when present, specifies the version of the blob to check if it exists.
|
1120
1134
|
:keyword int timeout:
|
1121
1135
|
Sets the server-side timeout for the operation in seconds. For more details see
|
1122
1136
|
https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations.
|
@@ -1127,10 +1141,11 @@ class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, Storag
|
|
1127
1141
|
:rtype: Dict[str, str]
|
1128
1142
|
"""
|
1129
1143
|
|
1144
|
+
version_id = get_version_id(self.version_id, kwargs)
|
1130
1145
|
kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time
|
1131
1146
|
kwargs['immutability_policy_mode'] = immutability_policy.policy_mode
|
1132
|
-
return cast(Dict[str, str],
|
1133
|
-
|
1147
|
+
return cast(Dict[str, str], await self._client.blob.set_immutability_policy(
|
1148
|
+
cls=return_response_headers,version_id=version_id, **kwargs))
|
1134
1149
|
|
1135
1150
|
@distributed_trace_async
|
1136
1151
|
async def delete_immutability_policy(self, **kwargs: Any) -> None:
|
@@ -1139,6 +1154,9 @@ class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, Storag
|
|
1139
1154
|
.. versionadded:: 12.10.0
|
1140
1155
|
This operation was introduced in API version '2020-10-02'.
|
1141
1156
|
|
1157
|
+
:keyword str version_id:
|
1158
|
+
The version id parameter is an opaque DateTime
|
1159
|
+
value that, when present, specifies the version of the blob to check if it exists.
|
1142
1160
|
:keyword int timeout:
|
1143
1161
|
Sets the server-side timeout for the operation in seconds. For more details see
|
1144
1162
|
https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations.
|
@@ -1149,7 +1167,8 @@ class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, Storag
|
|
1149
1167
|
:rtype: Dict[str, str]
|
1150
1168
|
"""
|
1151
1169
|
|
1152
|
-
|
1170
|
+
version_id = get_version_id(self.version_id, kwargs)
|
1171
|
+
await self._client.blob.delete_immutability_policy(version_id=version_id, **kwargs)
|
1153
1172
|
|
1154
1173
|
@distributed_trace_async
|
1155
1174
|
async def set_legal_hold(self, legal_hold: bool, **kwargs: Any) -> Dict[str, Union[str, datetime, bool]]:
|
@@ -1160,6 +1179,9 @@ class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, Storag
|
|
1160
1179
|
|
1161
1180
|
:param bool legal_hold:
|
1162
1181
|
Specified if a legal hold should be set on the blob.
|
1182
|
+
:keyword str version_id:
|
1183
|
+
The version id parameter is an opaque DateTime
|
1184
|
+
value that, when present, specifies the version of the blob to check if it exists.
|
1163
1185
|
:keyword int timeout:
|
1164
1186
|
Sets the server-side timeout for the operation in seconds. For more details see
|
1165
1187
|
https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations.
|
@@ -1170,8 +1192,9 @@ class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, Storag
|
|
1170
1192
|
:rtype: Dict[str, Union[str, datetime, bool]]
|
1171
1193
|
"""
|
1172
1194
|
|
1173
|
-
|
1174
|
-
|
1195
|
+
version_id = get_version_id(self.version_id, kwargs)
|
1196
|
+
return cast(Dict[str, Union[str, datetime, bool]], await self._client.blob.set_legal_hold(
|
1197
|
+
legal_hold, version_id=version_id, cls=return_response_headers, **kwargs))
|
1175
1198
|
|
1176
1199
|
@distributed_trace_async
|
1177
1200
|
async def create_page_blob(
|
@@ -2216,7 +2239,7 @@ class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, Storag
|
|
2216
2239
|
options = _get_blob_tags_options(version_id=version_id, snapshot=self.snapshot, **kwargs)
|
2217
2240
|
try:
|
2218
2241
|
_, tags = await self._client.blob.get_tags(**options)
|
2219
|
-
return cast(Dict[str, str], parse_tags(tags))
|
2242
|
+
return cast(Dict[str, str], parse_tags(tags))
|
2220
2243
|
except HttpResponseError as error:
|
2221
2244
|
process_storage_error(error)
|
2222
2245
|
|
@@ -3183,7 +3206,7 @@ class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, Storag
|
|
3183
3206
|
self._pipeline._impl_policies) # pylint: disable = protected-access
|
3184
3207
|
)
|
3185
3208
|
else:
|
3186
|
-
_pipeline = self._pipeline
|
3209
|
+
_pipeline = self._pipeline
|
3187
3210
|
return ContainerClient(
|
3188
3211
|
f"{self.scheme}://{self.primary_hostname}", container_name=self.container_name,
|
3189
3212
|
credential=self._raw_credential, api_version=self.api_version, _configuration=self._config,
|
@@ -3,7 +3,7 @@
|
|
3
3
|
# Licensed under the MIT License. See License.txt in the project root for
|
4
4
|
# license information.
|
5
5
|
# --------------------------------------------------------------------------
|
6
|
-
# pylint: disable=
|
6
|
+
# pylint: disable=docstring-keyword-should-match-keyword-only
|
7
7
|
|
8
8
|
import functools
|
9
9
|
import warnings
|
@@ -58,7 +58,11 @@ if TYPE_CHECKING:
|
|
58
58
|
from .._shared.models import UserDelegationKey
|
59
59
|
|
60
60
|
|
61
|
-
class BlobServiceClient(
|
61
|
+
class BlobServiceClient( # type: ignore [misc]
|
62
|
+
AsyncStorageAccountHostsMixin,
|
63
|
+
StorageAccountHostsMixin,
|
64
|
+
StorageEncryptionMixin
|
65
|
+
):
|
62
66
|
"""A client to interact with the Blob Service at the account level.
|
63
67
|
|
64
68
|
This client provides operations to retrieve and configure the account properties
|
@@ -132,7 +136,7 @@ class BlobServiceClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin,
|
|
132
136
|
self._query_str, credential = self._format_query_string(sas_token, credential)
|
133
137
|
super(BlobServiceClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs)
|
134
138
|
self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline)
|
135
|
-
self._client._config.version = get_api_version(kwargs) # type: ignore [assignment]
|
139
|
+
self._client._config.version = get_api_version(kwargs) # type: ignore [assignment]
|
136
140
|
self._configure_encryption(kwargs)
|
137
141
|
|
138
142
|
def _format_url(self, hostname):
|
@@ -3,7 +3,7 @@
|
|
3
3
|
# Licensed under the MIT License. See License.txt in the project root for
|
4
4
|
# license information.
|
5
5
|
# --------------------------------------------------------------------------
|
6
|
-
# pylint: disable=too-many-lines,
|
6
|
+
# pylint: disable=too-many-lines, docstring-keyword-should-match-keyword-only
|
7
7
|
|
8
8
|
import functools
|
9
9
|
import warnings
|
@@ -597,7 +597,7 @@ class ContainerClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, S
|
|
597
597
|
mod_conditions = get_modify_conditions(kwargs)
|
598
598
|
timeout = kwargs.pop('timeout', None)
|
599
599
|
try:
|
600
|
-
return await self._client.container.set_metadata(
|
600
|
+
return await self._client.container.set_metadata( # type: ignore
|
601
601
|
timeout=timeout,
|
602
602
|
lease_access_conditions=access_conditions,
|
603
603
|
modified_access_conditions=mod_conditions,
|
@@ -608,7 +608,7 @@ class ContainerClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, S
|
|
608
608
|
process_storage_error(error)
|
609
609
|
|
610
610
|
@distributed_trace
|
611
|
-
def _get_blob_service_client(self) -> "BlobServiceClient":
|
611
|
+
def _get_blob_service_client(self) -> "BlobServiceClient":
|
612
612
|
"""Get a client to interact with the container's parent service account.
|
613
613
|
|
614
614
|
Defaults to current container's credentials.
|
@@ -632,7 +632,7 @@ class ContainerClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, S
|
|
632
632
|
policies=self._pipeline._impl_policies #type: ignore [arg-type] # pylint: disable = protected-access
|
633
633
|
)
|
634
634
|
else:
|
635
|
-
_pipeline = self._pipeline
|
635
|
+
_pipeline = self._pipeline
|
636
636
|
return BlobServiceClient(
|
637
637
|
f"{self.scheme}://{self.primary_hostname}",
|
638
638
|
credential=self._raw_credential, api_version=self.api_version, _configuration=self._config,
|
@@ -1406,6 +1406,8 @@ class ContainerClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, S
|
|
1406
1406
|
"""
|
1407
1407
|
if len(blobs) == 0:
|
1408
1408
|
return AsyncList([])
|
1409
|
+
if self._is_localhost:
|
1410
|
+
kwargs['url_prepend'] = self.account_name
|
1409
1411
|
|
1410
1412
|
reqs, options = _generate_delete_blobs_options(
|
1411
1413
|
self._query_str,
|
@@ -1485,6 +1487,8 @@ class ContainerClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, S
|
|
1485
1487
|
:return: An async iterator of responses, one for each blob in order
|
1486
1488
|
:rtype: asynciterator[~azure.core.pipeline.transport.AsyncHttpResponse]
|
1487
1489
|
"""
|
1490
|
+
if self._is_localhost:
|
1491
|
+
kwargs['url_prepend'] = self.account_name
|
1488
1492
|
reqs, options = _generate_set_tiers_options(
|
1489
1493
|
self._query_str,
|
1490
1494
|
self.container_name,
|
@@ -1544,6 +1548,8 @@ class ContainerClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, S
|
|
1544
1548
|
:return: An async iterator of responses, one for each blob in order
|
1545
1549
|
:rtype: asynciterator[~azure.core.pipeline.transport.AsyncHttpResponse]
|
1546
1550
|
"""
|
1551
|
+
if self._is_localhost:
|
1552
|
+
kwargs['url_prepend'] = self.account_name
|
1547
1553
|
reqs, options = _generate_set_tiers_options(
|
1548
1554
|
self._query_str,
|
1549
1555
|
self.container_name,
|
@@ -19,7 +19,7 @@ from typing import (
|
|
19
19
|
Tuple, TypeVar, Union, TYPE_CHECKING
|
20
20
|
)
|
21
21
|
|
22
|
-
from azure.core.exceptions import HttpResponseError
|
22
|
+
from azure.core.exceptions import DecodeError, HttpResponseError, IncompleteReadError
|
23
23
|
|
24
24
|
from .._shared.request_handlers import validate_and_format_range_headers
|
25
25
|
from .._shared.response_handlers import parse_length_from_content_range, process_storage_error
|
@@ -46,7 +46,8 @@ T = TypeVar('T', bytes, str)
|
|
46
46
|
async def process_content(data: Any, start_offset: int, end_offset: int, encryption: Dict[str, Any]) -> bytes:
|
47
47
|
if data is None:
|
48
48
|
raise ValueError("Response cannot be None.")
|
49
|
-
|
49
|
+
await data.response.read()
|
50
|
+
content = cast(bytes, data.response.content)
|
50
51
|
if encryption.get('key') is not None or encryption.get('resolver') is not None:
|
51
52
|
try:
|
52
53
|
return decrypt_blob(
|
@@ -120,20 +121,30 @@ class _AsyncChunkDownloader(_ChunkDownloader):
|
|
120
121
|
download_range[1],
|
121
122
|
check_content_md5=self.validate_content
|
122
123
|
)
|
123
|
-
try:
|
124
|
-
_, response = await cast(Awaitable[Any], self.client.download(
|
125
|
-
range=range_header,
|
126
|
-
range_get_content_md5=range_validation,
|
127
|
-
validate_content=self.validate_content,
|
128
|
-
data_stream_total=self.total_size,
|
129
|
-
download_stream_current=self.progress_total,
|
130
|
-
**self.request_options
|
131
|
-
))
|
132
124
|
|
133
|
-
|
134
|
-
|
125
|
+
retry_active = True
|
126
|
+
retry_total = 3
|
127
|
+
while retry_active:
|
128
|
+
try:
|
129
|
+
_, response = await cast(Awaitable[Any], self.client.download(
|
130
|
+
range=range_header,
|
131
|
+
range_get_content_md5=range_validation,
|
132
|
+
validate_content=self.validate_content,
|
133
|
+
data_stream_total=self.total_size,
|
134
|
+
download_stream_current=self.progress_total,
|
135
|
+
**self.request_options
|
136
|
+
))
|
137
|
+
except HttpResponseError as error:
|
138
|
+
process_storage_error(error)
|
135
139
|
|
136
|
-
|
140
|
+
try:
|
141
|
+
chunk_data = await process_content(response, offset[0], offset[1], self.encryption_options)
|
142
|
+
retry_active = False
|
143
|
+
except (IncompleteReadError, HttpResponseError, DecodeError) as error:
|
144
|
+
retry_total -= 1
|
145
|
+
if retry_total <= 0:
|
146
|
+
raise HttpResponseError(error, error=error) from error
|
147
|
+
await asyncio.sleep(1)
|
137
148
|
content_length = response.content_length
|
138
149
|
|
139
150
|
# This makes sure that if_match is set so that we can validate
|
@@ -342,66 +353,78 @@ class StorageStreamDownloader(Generic[T]): # pylint: disable=too-many-instance-
|
|
342
353
|
self._initial_range[1],
|
343
354
|
start_range_required=False,
|
344
355
|
end_range_required=False,
|
345
|
-
check_content_md5=self._validate_content
|
356
|
+
check_content_md5=self._validate_content
|
357
|
+
)
|
346
358
|
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
|
360
|
-
# Parse the total file size and adjust the download size if ranges
|
361
|
-
# were specified
|
362
|
-
self._file_size = parse_length_from_content_range(response.properties.content_range)
|
363
|
-
if self._file_size is None:
|
364
|
-
raise ValueError("Required Content-Range response header is missing or malformed.")
|
365
|
-
# Remove any extra encryption data size from blob size
|
366
|
-
self._file_size = adjust_blob_size_for_encryption(self._file_size, self._encryption_data)
|
367
|
-
|
368
|
-
if self._end_range is not None and self._start_range is not None:
|
369
|
-
# Use the length unless it is over the end of the file
|
370
|
-
self.size = min(self._file_size - self._start_range, self._end_range - self._start_range + 1)
|
371
|
-
elif self._start_range is not None:
|
372
|
-
self.size = self._file_size - self._start_range
|
373
|
-
else:
|
374
|
-
self.size = self._file_size
|
359
|
+
retry_active = True
|
360
|
+
retry_total = 3
|
361
|
+
while retry_active:
|
362
|
+
try:
|
363
|
+
location_mode, response = cast(Tuple[Optional[str], Any], await self._clients.blob.download(
|
364
|
+
range=range_header,
|
365
|
+
range_get_content_md5=range_validation,
|
366
|
+
validate_content=self._validate_content,
|
367
|
+
data_stream_total=None,
|
368
|
+
download_stream_current=0,
|
369
|
+
**self._request_options
|
370
|
+
))
|
375
371
|
|
376
|
-
|
377
|
-
|
378
|
-
|
379
|
-
|
380
|
-
#
|
381
|
-
|
382
|
-
|
383
|
-
|
384
|
-
|
385
|
-
|
386
|
-
|
387
|
-
|
388
|
-
|
389
|
-
|
390
|
-
|
391
|
-
self.
|
392
|
-
|
393
|
-
|
394
|
-
|
372
|
+
# Check the location we read from to ensure we use the same one
|
373
|
+
# for subsequent requests.
|
374
|
+
self._location_mode = location_mode
|
375
|
+
|
376
|
+
# Parse the total file size and adjust the download size if ranges
|
377
|
+
# were specified
|
378
|
+
self._file_size = parse_length_from_content_range(response.properties.content_range)
|
379
|
+
if self._file_size is None:
|
380
|
+
raise ValueError("Required Content-Range response header is missing or malformed.")
|
381
|
+
# Remove any extra encryption data size from blob size
|
382
|
+
self._file_size = adjust_blob_size_for_encryption(self._file_size, self._encryption_data)
|
383
|
+
|
384
|
+
if self._end_range is not None and self._start_range is not None:
|
385
|
+
# Use the length unless it is over the end of the file
|
386
|
+
self.size = min(self._file_size - self._start_range, self._end_range - self._start_range + 1)
|
387
|
+
elif self._start_range is not None:
|
388
|
+
self.size = self._file_size - self._start_range
|
389
|
+
else:
|
390
|
+
self.size = self._file_size
|
395
391
|
|
396
|
-
|
397
|
-
|
398
|
-
|
399
|
-
|
400
|
-
|
401
|
-
|
402
|
-
|
403
|
-
|
404
|
-
|
392
|
+
except HttpResponseError as error:
|
393
|
+
if self._start_range is None and error.response and error.status_code == 416:
|
394
|
+
# Get range will fail on an empty file. If the user did not
|
395
|
+
# request a range, do a regular get request in order to get
|
396
|
+
# any properties.
|
397
|
+
try:
|
398
|
+
_, response = cast(Tuple[Optional[Any], Any], await self._clients.blob.download(
|
399
|
+
validate_content=self._validate_content,
|
400
|
+
data_stream_total=0,
|
401
|
+
download_stream_current=0,
|
402
|
+
**self._request_options))
|
403
|
+
except HttpResponseError as e:
|
404
|
+
process_storage_error(e)
|
405
|
+
|
406
|
+
# Set the download size to empty
|
407
|
+
self.size = 0
|
408
|
+
self._file_size = 0
|
409
|
+
else:
|
410
|
+
process_storage_error(error)
|
411
|
+
|
412
|
+
try:
|
413
|
+
if self.size == 0:
|
414
|
+
self._current_content = b""
|
415
|
+
else:
|
416
|
+
self._current_content = await process_content(
|
417
|
+
response,
|
418
|
+
self._initial_offset[0],
|
419
|
+
self._initial_offset[1],
|
420
|
+
self._encryption_options
|
421
|
+
)
|
422
|
+
retry_active = False
|
423
|
+
except (IncompleteReadError, HttpResponseError, DecodeError) as error:
|
424
|
+
retry_total -= 1
|
425
|
+
if retry_total <= 0:
|
426
|
+
raise HttpResponseError(error, error=error) from error
|
427
|
+
await asyncio.sleep(1)
|
405
428
|
self._download_offset += len(self._current_content)
|
406
429
|
self._raw_download_offset += response.content_length
|
407
430
|
|
@@ -3,7 +3,7 @@
|
|
3
3
|
# Licensed under the MIT License. See License.txt in the project root for
|
4
4
|
# license information.
|
5
5
|
# --------------------------------------------------------------------------
|
6
|
-
# pylint: disable=
|
6
|
+
# pylint: disable=docstring-keyword-should-match-keyword-only
|
7
7
|
|
8
8
|
import uuid
|
9
9
|
from typing import Any, Optional, Union, TYPE_CHECKING
|
@@ -1,4 +1,3 @@
|
|
1
|
-
# pylint: disable=too-many-lines
|
2
1
|
# -------------------------------------------------------------------------
|
3
2
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
4
3
|
# Licensed under the MIT License. See License.txt in the project root for
|
@@ -102,7 +101,7 @@ class BlobPropertiesPaged(AsyncPageIterator):
|
|
102
101
|
if isinstance(item, BlobProperties):
|
103
102
|
return item
|
104
103
|
if isinstance(item, BlobItemInternal):
|
105
|
-
blob = get_blob_properties_from_generated_code(item)
|
104
|
+
blob = get_blob_properties_from_generated_code(item)
|
106
105
|
blob.container = self.container # type: ignore [assignment]
|
107
106
|
return blob
|
108
107
|
return item
|
@@ -3,8 +3,7 @@
|
|
3
3
|
# Licensed under the MIT License. See License.txt in the project root for
|
4
4
|
# license information.
|
5
5
|
# --------------------------------------------------------------------------
|
6
|
-
# pylint: disable=too-few-public-methods
|
7
|
-
# pylint: disable=super-init-not-called, too-many-lines
|
6
|
+
# pylint: disable=too-few-public-methods
|
8
7
|
|
9
8
|
from typing import Callable, List, Optional, TYPE_CHECKING
|
10
9
|
|