azure-storage-blob 12.19.1__py3-none-any.whl → 12.20.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- azure/storage/blob/__init__.py +7 -5
- azure/storage/blob/_blob_client.py +12 -4
- azure/storage/blob/_blob_service_client.py +4 -3
- azure/storage/blob/_container_client.py +28 -12
- azure/storage/blob/_download.py +3 -3
- azure/storage/blob/_encryption.py +254 -165
- azure/storage/blob/_generated/_azure_blob_storage.py +21 -3
- azure/storage/blob/_generated/_configuration.py +4 -11
- azure/storage/blob/_generated/_serialization.py +41 -49
- azure/storage/blob/_generated/aio/_azure_blob_storage.py +23 -3
- azure/storage/blob/_generated/aio/_configuration.py +4 -11
- azure/storage/blob/_generated/aio/operations/_append_blob_operations.py +24 -58
- azure/storage/blob/_generated/aio/operations/_blob_operations.py +123 -306
- azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +37 -86
- azure/storage/blob/_generated/aio/operations/_container_operations.py +98 -289
- azure/storage/blob/_generated/aio/operations/_page_blob_operations.py +51 -150
- azure/storage/blob/_generated/aio/operations/_service_operations.py +49 -125
- azure/storage/blob/_generated/models/_models_py3.py +31 -31
- azure/storage/blob/_generated/operations/_append_blob_operations.py +25 -59
- azure/storage/blob/_generated/operations/_blob_operations.py +123 -306
- azure/storage/blob/_generated/operations/_block_blob_operations.py +39 -88
- azure/storage/blob/_generated/operations/_container_operations.py +100 -291
- azure/storage/blob/_generated/operations/_page_blob_operations.py +52 -151
- azure/storage/blob/_generated/operations/_service_operations.py +50 -126
- azure/storage/blob/_models.py +3 -4
- azure/storage/blob/_serialize.py +1 -0
- azure/storage/blob/_shared/authentication.py +1 -1
- azure/storage/blob/_shared/avro/avro_io.py +0 -6
- azure/storage/blob/_shared/avro/avro_io_async.py +0 -6
- azure/storage/blob/_shared/avro/datafile.py +0 -4
- azure/storage/blob/_shared/avro/datafile_async.py +0 -4
- azure/storage/blob/_shared/avro/schema.py +4 -4
- azure/storage/blob/_shared/base_client.py +72 -87
- azure/storage/blob/_shared/base_client_async.py +115 -27
- azure/storage/blob/_shared/models.py +112 -20
- azure/storage/blob/_shared/parser.py +7 -6
- azure/storage/blob/_shared/policies.py +96 -66
- azure/storage/blob/_shared/policies_async.py +48 -21
- azure/storage/blob/_shared/response_handlers.py +14 -16
- azure/storage/blob/_shared/shared_access_signature.py +2 -3
- azure/storage/blob/_shared_access_signature.py +37 -27
- azure/storage/blob/_upload_helpers.py +4 -7
- azure/storage/blob/_version.py +1 -1
- azure/storage/blob/aio/__init__.py +2 -2
- azure/storage/blob/aio/_blob_client_async.py +16 -5
- azure/storage/blob/aio/_blob_service_client_async.py +3 -1
- azure/storage/blob/aio/_container_client_async.py +25 -8
- azure/storage/blob/aio/_download_async.py +9 -9
- azure/storage/blob/aio/_encryption_async.py +72 -0
- azure/storage/blob/aio/_upload_helpers.py +8 -10
- {azure_storage_blob-12.19.1.dist-info → azure_storage_blob-12.20.0b1.dist-info}/METADATA +9 -9
- azure_storage_blob-12.20.0b1.dist-info/RECORD +81 -0
- {azure_storage_blob-12.19.1.dist-info → azure_storage_blob-12.20.0b1.dist-info}/WHEEL +1 -1
- azure/storage/blob/_generated/py.typed +0 -1
- azure_storage_blob-12.19.1.dist-info/RECORD +0 -81
- {azure_storage_blob-12.19.1.dist-info → azure_storage_blob-12.20.0b1.dist-info}/LICENSE +0 -0
- {azure_storage_blob-12.19.1.dist-info → azure_storage_blob-12.20.0b1.dist-info}/top_level.txt +0 -0
@@ -6,12 +6,12 @@
|
|
6
6
|
# pylint: disable=invalid-overridden-method
|
7
7
|
|
8
8
|
import asyncio
|
9
|
-
import random
|
10
9
|
import logging
|
11
|
-
|
10
|
+
import random
|
11
|
+
from typing import Any, Dict, TYPE_CHECKING
|
12
12
|
|
13
|
-
from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy, AsyncHTTPPolicy
|
14
13
|
from azure.core.exceptions import AzureError
|
14
|
+
from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy, AsyncHTTPPolicy
|
15
15
|
|
16
16
|
from .authentication import StorageHttpChallenge
|
17
17
|
from .constants import DEFAULT_OAUTH_SCOPE
|
@@ -19,7 +19,10 @@ from .policies import is_retry, StorageRetryPolicy
|
|
19
19
|
|
20
20
|
if TYPE_CHECKING:
|
21
21
|
from azure.core.credentials_async import AsyncTokenCredential
|
22
|
-
from azure.core.pipeline import
|
22
|
+
from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import
|
23
|
+
PipelineRequest,
|
24
|
+
PipelineResponse
|
25
|
+
)
|
23
26
|
|
24
27
|
|
25
28
|
_LOGGER = logging.getLogger(__name__)
|
@@ -45,8 +48,7 @@ class AsyncStorageResponseHook(AsyncHTTPPolicy):
|
|
45
48
|
self._response_callback = kwargs.get('raw_response_hook')
|
46
49
|
super(AsyncStorageResponseHook, self).__init__()
|
47
50
|
|
48
|
-
async def send(self, request):
|
49
|
-
# type: (PipelineRequest) -> PipelineResponse
|
51
|
+
async def send(self, request: "PipelineRequest") -> "PipelineResponse":
|
50
52
|
# Values could be 0
|
51
53
|
data_stream_total = request.context.get('data_stream_total')
|
52
54
|
if data_stream_total is None:
|
@@ -80,12 +82,13 @@ class AsyncStorageResponseHook(AsyncHTTPPolicy):
|
|
80
82
|
elif should_update_counts and upload_stream_current is not None:
|
81
83
|
upload_stream_current += int(response.http_request.headers.get('Content-Length', 0))
|
82
84
|
for pipeline_obj in [request, response]:
|
83
|
-
pipeline_obj
|
84
|
-
|
85
|
-
|
85
|
+
if hasattr(pipeline_obj, 'context'):
|
86
|
+
pipeline_obj.context['data_stream_total'] = data_stream_total
|
87
|
+
pipeline_obj.context['download_stream_current'] = download_stream_current
|
88
|
+
pipeline_obj.context['upload_stream_current'] = upload_stream_current
|
86
89
|
if response_callback:
|
87
90
|
if asyncio.iscoroutine(response_callback):
|
88
|
-
await response_callback(response)
|
91
|
+
await response_callback(response) # type: ignore
|
89
92
|
else:
|
90
93
|
response_callback(response)
|
91
94
|
request.context['response_callback'] = response_callback
|
@@ -144,9 +147,23 @@ class AsyncStorageRetryPolicy(StorageRetryPolicy):
|
|
144
147
|
class ExponentialRetry(AsyncStorageRetryPolicy):
|
145
148
|
"""Exponential retry."""
|
146
149
|
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
+
initial_backoff: int
|
151
|
+
"""The initial backoff interval, in seconds, for the first retry."""
|
152
|
+
increment_base: int
|
153
|
+
"""The base, in seconds, to increment the initial_backoff by after the
|
154
|
+
first retry."""
|
155
|
+
random_jitter_range: int
|
156
|
+
"""A number in seconds which indicates a range to jitter/randomize for the back-off interval."""
|
157
|
+
|
158
|
+
def __init__(
|
159
|
+
self,
|
160
|
+
initial_backoff: int = 15,
|
161
|
+
increment_base: int = 3,
|
162
|
+
retry_total: int = 3,
|
163
|
+
retry_to_secondary: bool = False,
|
164
|
+
random_jitter_range: int = 3, **kwargs
|
165
|
+
) -> None:
|
166
|
+
"""
|
150
167
|
Constructs an Exponential retry object. The initial_backoff is used for
|
151
168
|
the first retry. Subsequent retries are retried after initial_backoff +
|
152
169
|
increment_power^retry_count seconds. For example, by default the first retry
|
@@ -167,18 +184,18 @@ class ExponentialRetry(AsyncStorageRetryPolicy):
|
|
167
184
|
:param int random_jitter_range:
|
168
185
|
A number in seconds which indicates a range to jitter/randomize for the back-off interval.
|
169
186
|
For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3.
|
170
|
-
|
187
|
+
"""
|
171
188
|
self.initial_backoff = initial_backoff
|
172
189
|
self.increment_base = increment_base
|
173
190
|
self.random_jitter_range = random_jitter_range
|
174
191
|
super(ExponentialRetry, self).__init__(
|
175
192
|
retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
|
176
193
|
|
177
|
-
def get_backoff_time(self, settings):
|
194
|
+
def get_backoff_time(self, settings: Dict[str, Any]) -> float:
|
178
195
|
"""
|
179
196
|
Calculates how long to sleep before retrying.
|
180
197
|
|
181
|
-
:param
|
198
|
+
:param Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
|
182
199
|
:return:
|
183
200
|
An integer indicating how long to wait before retrying the request,
|
184
201
|
or None to indicate no retry should be performed.
|
@@ -194,7 +211,18 @@ class ExponentialRetry(AsyncStorageRetryPolicy):
|
|
194
211
|
class LinearRetry(AsyncStorageRetryPolicy):
|
195
212
|
"""Linear retry."""
|
196
213
|
|
197
|
-
|
214
|
+
initial_backoff: int
|
215
|
+
"""The backoff interval, in seconds, between retries."""
|
216
|
+
random_jitter_range: int
|
217
|
+
"""A number in seconds which indicates a range to jitter/randomize for the back-off interval."""
|
218
|
+
|
219
|
+
def __init__(
|
220
|
+
self, backoff: int = 15,
|
221
|
+
retry_total: int = 3,
|
222
|
+
retry_to_secondary: bool = False,
|
223
|
+
random_jitter_range: int = 3,
|
224
|
+
**kwargs: Any
|
225
|
+
) -> None:
|
198
226
|
"""
|
199
227
|
Constructs a Linear retry object.
|
200
228
|
|
@@ -215,11 +243,11 @@ class LinearRetry(AsyncStorageRetryPolicy):
|
|
215
243
|
super(LinearRetry, self).__init__(
|
216
244
|
retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
|
217
245
|
|
218
|
-
def get_backoff_time(self, settings):
|
246
|
+
def get_backoff_time(self, settings: Dict[str, Any]) -> float:
|
219
247
|
"""
|
220
248
|
Calculates how long to sleep before retrying.
|
221
249
|
|
222
|
-
:param
|
250
|
+
:param Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
|
223
251
|
:return:
|
224
252
|
An integer indicating how long to wait before retrying the request,
|
225
253
|
or None to indicate no retry should be performed.
|
@@ -240,8 +268,7 @@ class AsyncStorageBearerTokenCredentialPolicy(AsyncBearerTokenCredentialPolicy):
|
|
240
268
|
def __init__(self, credential: "AsyncTokenCredential", audience: str, **kwargs: Any) -> None:
|
241
269
|
super(AsyncStorageBearerTokenCredentialPolicy, self).__init__(credential, audience, **kwargs)
|
242
270
|
|
243
|
-
async def on_challenge(self, request, response):
|
244
|
-
# type: (PipelineRequest, PipelineResponse) -> bool
|
271
|
+
async def on_challenge(self, request: "PipelineRequest", response: "PipelineResponse") -> bool:
|
245
272
|
try:
|
246
273
|
auth_header = response.http_response.headers.get("WWW-Authenticate")
|
247
274
|
challenge = StorageHttpChallenge(auth_header)
|
@@ -3,26 +3,22 @@
|
|
3
3
|
# Licensed under the MIT License. See License.txt in the project root for
|
4
4
|
# license information.
|
5
5
|
# --------------------------------------------------------------------------
|
6
|
-
from typing import NoReturn, TYPE_CHECKING
|
7
6
|
import logging
|
7
|
+
from typing import NoReturn
|
8
8
|
from xml.etree.ElementTree import Element
|
9
9
|
|
10
|
-
from azure.core.pipeline.policies import ContentDecodePolicy
|
11
10
|
from azure.core.exceptions import (
|
11
|
+
ClientAuthenticationError,
|
12
|
+
DecodeError,
|
12
13
|
HttpResponseError,
|
13
|
-
ResourceNotFoundError,
|
14
|
-
ResourceModifiedError,
|
15
14
|
ResourceExistsError,
|
16
|
-
|
17
|
-
|
15
|
+
ResourceModifiedError,
|
16
|
+
ResourceNotFoundError,
|
17
|
+
)
|
18
|
+
from azure.core.pipeline.policies import ContentDecodePolicy
|
18
19
|
|
19
|
-
from .parser import _to_utc_datetime
|
20
20
|
from .models import StorageErrorCode, UserDelegationKey, get_enum_value
|
21
|
-
|
22
|
-
|
23
|
-
if TYPE_CHECKING:
|
24
|
-
from datetime import datetime
|
25
|
-
from azure.core.exceptions import AzureError
|
21
|
+
from .parser import _to_utc_datetime
|
26
22
|
|
27
23
|
|
28
24
|
_LOGGER = logging.getLogger(__name__)
|
@@ -85,10 +81,10 @@ def return_raw_deserialized(response, *_):
|
|
85
81
|
return response.http_response.location_mode, response.context[ContentDecodePolicy.CONTEXT_NAME]
|
86
82
|
|
87
83
|
|
88
|
-
def process_storage_error(storage_error) -> NoReturn:
|
84
|
+
def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # pylint:disable=too-many-statements
|
89
85
|
raise_error = HttpResponseError
|
90
86
|
serialized = False
|
91
|
-
if not storage_error.response:
|
87
|
+
if not storage_error.response or storage_error.response.status_code in [200, 204]:
|
92
88
|
raise storage_error
|
93
89
|
# If it is one of those three then it has been serialized prior by the generated layer.
|
94
90
|
if isinstance(storage_error, (PartialBatchErrorException,
|
@@ -101,7 +97,8 @@ def process_storage_error(storage_error) -> NoReturn: # pylint:disable=too-many
|
|
101
97
|
try:
|
102
98
|
error_body = ContentDecodePolicy.deserialize_from_http_generics(storage_error.response)
|
103
99
|
try:
|
104
|
-
error_body
|
100
|
+
if error_body is None or len(error_body) == 0:
|
101
|
+
error_body = storage_error.response.reason
|
105
102
|
except AttributeError:
|
106
103
|
error_body = ''
|
107
104
|
# If it is an XML response
|
@@ -119,7 +116,8 @@ def process_storage_error(storage_error) -> NoReturn: # pylint:disable=too-many
|
|
119
116
|
error_dict = {'message': str(error_body)}
|
120
117
|
|
121
118
|
# If we extracted from a Json or XML response
|
122
|
-
|
119
|
+
# There is a chance error_dict is just a string
|
120
|
+
if error_dict and isinstance(error_dict, dict):
|
123
121
|
error_code = error_dict.get('code')
|
124
122
|
error_message = error_dict.get('message')
|
125
123
|
additional_data = {k: v for k, v in error_dict.items() if k not in {'code', 'message'}}
|
@@ -136,9 +136,8 @@ class SharedAccessSignature(object):
|
|
136
136
|
:param start:
|
137
137
|
The time at which the shared access signature becomes valid. If
|
138
138
|
omitted, start time for this call is assumed to be the time when the
|
139
|
-
storage service receives the request.
|
140
|
-
|
141
|
-
be UTC.
|
139
|
+
storage service receives the request. The provided datetime will always
|
140
|
+
be interpreted as UTC.
|
142
141
|
:type start: datetime or str
|
143
142
|
:param str ip:
|
144
143
|
Specifies an IP address or a range of IP addresses from which to accept requests.
|
@@ -7,6 +7,7 @@
|
|
7
7
|
from typing import ( # pylint: disable=unused-import
|
8
8
|
Union, Optional, Any, TYPE_CHECKING
|
9
9
|
)
|
10
|
+
from urllib.parse import parse_qs
|
10
11
|
|
11
12
|
from ._shared import sign_string, url_quote
|
12
13
|
from ._shared.constants import X_MS_VERSION
|
@@ -90,9 +91,8 @@ class BlobSharedAccessSignature(SharedAccessSignature):
|
|
90
91
|
:param start:
|
91
92
|
The time at which the shared access signature becomes valid. If
|
92
93
|
omitted, start time for this call is assumed to be the time when the
|
93
|
-
storage service receives the request.
|
94
|
-
|
95
|
-
be UTC.
|
94
|
+
storage service receives the request. The provided datetime will always
|
95
|
+
be interpreted as UTC.
|
96
96
|
:type start: datetime or str
|
97
97
|
:param str policy_id:
|
98
98
|
A unique value up to 64 characters in length that correlates to a
|
@@ -177,9 +177,8 @@ class BlobSharedAccessSignature(SharedAccessSignature):
|
|
177
177
|
:param start:
|
178
178
|
The time at which the shared access signature becomes valid. If
|
179
179
|
omitted, start time for this call is assumed to be the time when the
|
180
|
-
storage service receives the request.
|
181
|
-
|
182
|
-
be UTC.
|
180
|
+
storage service receives the request. The provided datetime will always
|
181
|
+
be interpreted as UTC.
|
183
182
|
:type start: datetime or str
|
184
183
|
:param str policy_id:
|
185
184
|
A unique value up to 64 characters in length that correlates to a
|
@@ -307,15 +306,17 @@ class _BlobSharedAccessHelper(_SharedAccessHelper):
|
|
307
306
|
|
308
307
|
|
309
308
|
def generate_account_sas(
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
309
|
+
account_name: str,
|
310
|
+
account_key: str,
|
311
|
+
resource_types: Union["ResourceTypes", str],
|
312
|
+
permission: Union["AccountSasPermissions", str],
|
313
|
+
expiry: Union["datetime", str],
|
314
|
+
start: Optional[Union["datetime", str]] = None,
|
315
|
+
ip: Optional[str] = None,
|
316
|
+
*,
|
317
|
+
services: Union[Services, str] = Services(blob=True),
|
318
|
+
**kwargs: Any
|
319
|
+
) -> str:
|
319
320
|
"""Generates a shared access signature for the blob service.
|
320
321
|
|
321
322
|
Use the returned signature with the credential parameter of any BlobServiceClient,
|
@@ -334,15 +335,13 @@ def generate_account_sas(
|
|
334
335
|
:type permission: str or ~azure.storage.blob.AccountSasPermissions
|
335
336
|
:param expiry:
|
336
337
|
The time at which the shared access signature becomes invalid.
|
337
|
-
|
338
|
-
without timezone info, it is assumed to be UTC.
|
338
|
+
The provided datetime will always be interpreted as UTC.
|
339
339
|
:type expiry: ~datetime.datetime or str
|
340
340
|
:param start:
|
341
341
|
The time at which the shared access signature becomes valid. If
|
342
342
|
omitted, start time for this call is assumed to be the time when the
|
343
|
-
storage service receives the request.
|
344
|
-
|
345
|
-
be UTC.
|
343
|
+
storage service receives the request. The provided datetime will always
|
344
|
+
be interpreted as UTC.
|
346
345
|
:type start: ~datetime.datetime or str
|
347
346
|
:param str ip:
|
348
347
|
Specifies an IP address or a range of IP addresses from which to accept requests.
|
@@ -350,6 +349,9 @@ def generate_account_sas(
|
|
350
349
|
or address range specified on the SAS token, the request is not authenticated.
|
351
350
|
For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS
|
352
351
|
restricts the request to those IP addresses.
|
352
|
+
:keyword Union[Services, str] services:
|
353
|
+
Specifies the services that the Shared Access Signature (sas) token will be able to be utilized with.
|
354
|
+
Will default to only this package (i.e. blobs) if not provided.
|
353
355
|
:keyword str protocol:
|
354
356
|
Specifies the protocol permitted for a request made. The default value is https.
|
355
357
|
:keyword str encryption_scope:
|
@@ -368,7 +370,7 @@ def generate_account_sas(
|
|
368
370
|
"""
|
369
371
|
sas = SharedAccessSignature(account_name, account_key)
|
370
372
|
return sas.generate_account(
|
371
|
-
services=
|
373
|
+
services=services,
|
372
374
|
resource_types=resource_types,
|
373
375
|
permission=permission,
|
374
376
|
expiry=expiry,
|
@@ -427,9 +429,8 @@ def generate_container_sas(
|
|
427
429
|
:param start:
|
428
430
|
The time at which the shared access signature becomes valid. If
|
429
431
|
omitted, start time for this call is assumed to be the time when the
|
430
|
-
storage service receives the request.
|
431
|
-
|
432
|
-
be UTC.
|
432
|
+
storage service receives the request. The provided datetime will always
|
433
|
+
be interpreted as UTC.
|
433
434
|
:type start: ~datetime.datetime or str
|
434
435
|
:param str policy_id:
|
435
436
|
A unique value up to 64 characters in length that correlates to a
|
@@ -554,9 +555,8 @@ def generate_blob_sas(
|
|
554
555
|
:param start:
|
555
556
|
The time at which the shared access signature becomes valid. If
|
556
557
|
omitted, start time for this call is assumed to be the time when the
|
557
|
-
storage service receives the request.
|
558
|
-
|
559
|
-
be UTC.
|
558
|
+
storage service receives the request. The provided datetime will always
|
559
|
+
be interpreted as UTC.
|
560
560
|
:type start: ~datetime.datetime or str
|
561
561
|
:param str policy_id:
|
562
562
|
A unique value up to 64 characters in length that correlates to a
|
@@ -629,3 +629,13 @@ def generate_blob_sas(
|
|
629
629
|
ip=ip,
|
630
630
|
**kwargs
|
631
631
|
)
|
632
|
+
|
633
|
+
def _is_credential_sastoken(credential: Any) -> bool:
|
634
|
+
if not credential or not isinstance(credential, str):
|
635
|
+
return False
|
636
|
+
|
637
|
+
sas_values = QueryStringConstants.to_list()
|
638
|
+
parsed_query = parse_qs(credential.lstrip("?"))
|
639
|
+
if parsed_query and all(k in sas_values for k in parsed_query):
|
640
|
+
return True
|
641
|
+
return False
|
@@ -64,7 +64,6 @@ def _any_conditions(modified_access_conditions=None, **kwargs): # pylint: disab
|
|
64
64
|
|
65
65
|
def upload_block_blob( # pylint: disable=too-many-locals, too-many-statements
|
66
66
|
client=None,
|
67
|
-
data=None,
|
68
67
|
stream=None,
|
69
68
|
length=None,
|
70
69
|
overwrite=None,
|
@@ -92,12 +91,10 @@ def upload_block_blob( # pylint: disable=too-many-locals, too-many-statements
|
|
92
91
|
|
93
92
|
# Do single put if the size is smaller than or equal config.max_single_put_size
|
94
93
|
if adjusted_count is not None and (adjusted_count <= blob_settings.max_single_put_size):
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
except AttributeError:
|
100
|
-
pass
|
94
|
+
data = stream.read(length)
|
95
|
+
if not isinstance(data, bytes):
|
96
|
+
raise TypeError('Blob data should be of type bytes.')
|
97
|
+
|
101
98
|
if encryption_options.get('key'):
|
102
99
|
encryption_data, data = encrypt_blob(data, encryption_options['key'], encryption_options['version'])
|
103
100
|
headers['x-ms-meta-encryptiondata'] = encryption_data
|
azure/storage/blob/_version.py
CHANGED
@@ -40,7 +40,7 @@ async def upload_blob_to_url(
|
|
40
40
|
- except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError.
|
41
41
|
If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key"
|
42
42
|
should be the storage account key.
|
43
|
-
:
|
43
|
+
:type credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "TokenCredential"]] # pylint: disable=line-too-long
|
44
44
|
:keyword bool overwrite:
|
45
45
|
Whether the blob to be uploaded should overwrite the current data.
|
46
46
|
If True, upload_blob_to_url will overwrite any existing data. If set to False, the
|
@@ -99,7 +99,7 @@ async def download_blob_from_url(
|
|
99
99
|
- except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError.
|
100
100
|
If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key"
|
101
101
|
should be the storage account key.
|
102
|
-
:
|
102
|
+
:type credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "TokenCredential"]] # pylint: disable=line-too-long
|
103
103
|
:keyword bool overwrite:
|
104
104
|
Whether the local file should be overwritten if it already exists. The default value is
|
105
105
|
`False` - in which case a ValueError will be raised if the file already exists. If set to
|
@@ -254,6 +254,8 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
|
|
254
254
|
:keyword str source_authorization:
|
255
255
|
Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is
|
256
256
|
the prefix of the source_authorization string.
|
257
|
+
:returns: Response from creating a new block blob for a given URL.
|
258
|
+
:rtype: Dict[str, Any]
|
257
259
|
"""
|
258
260
|
options = self._upload_blob_from_url_options(
|
259
261
|
source_url=self._encode_source_url(source_url),
|
@@ -894,6 +896,7 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
|
|
894
896
|
see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob
|
895
897
|
#other-client--per-operation-configuration>`_.
|
896
898
|
:returns: Blob-updated property dict (Etag and last modified)
|
899
|
+
:rtype: Dict[str, Union[str, datetime]]
|
897
900
|
"""
|
898
901
|
options = self._set_blob_metadata_options(metadata=metadata, **kwargs)
|
899
902
|
try:
|
@@ -1589,13 +1592,14 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
|
|
1589
1592
|
length=None, # type: Optional[int]
|
1590
1593
|
**kwargs
|
1591
1594
|
):
|
1592
|
-
# type: (...) ->
|
1595
|
+
# type: (...) -> Dict[str, Any]
|
1593
1596
|
"""Creates a new block to be committed as part of a blob.
|
1594
1597
|
|
1595
1598
|
:param str block_id: A string value that identifies the block.
|
1596
1599
|
The string should be less than or equal to 64 bytes in size.
|
1597
1600
|
For a given blob, the block_id must be the same size for each block.
|
1598
1601
|
:param data: The blob data.
|
1602
|
+
:type data: Union[Iterable[AnyStr], IO[AnyStr]]
|
1599
1603
|
:param int length: Size of the block.
|
1600
1604
|
:keyword bool validate_content:
|
1601
1605
|
If true, calculates an MD5 hash for each chunk of the blob. The storage
|
@@ -1631,7 +1635,8 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
|
|
1631
1635
|
This value is not tracked or validated on the client. To configure client-side network timesouts
|
1632
1636
|
see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob
|
1633
1637
|
#other-client--per-operation-configuration>`_.
|
1634
|
-
:
|
1638
|
+
:returns: Blob property dict.
|
1639
|
+
:rtype: Dict[str, Any]
|
1635
1640
|
"""
|
1636
1641
|
options = self._stage_block_options(
|
1637
1642
|
block_id,
|
@@ -1652,7 +1657,7 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
|
|
1652
1657
|
source_content_md5=None, # type: Optional[Union[bytes, bytearray]]
|
1653
1658
|
**kwargs
|
1654
1659
|
):
|
1655
|
-
# type: (...) ->
|
1660
|
+
# type: (...) -> Dict[str, Any]
|
1656
1661
|
"""Creates a new block to be committed as part of a blob where
|
1657
1662
|
the contents are read from a URL.
|
1658
1663
|
|
@@ -1693,7 +1698,8 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
|
|
1693
1698
|
:keyword str source_authorization:
|
1694
1699
|
Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is
|
1695
1700
|
the prefix of the source_authorization string.
|
1696
|
-
:
|
1701
|
+
:returns: Blob property dict.
|
1702
|
+
:rtype: Dict[str, Any]
|
1697
1703
|
"""
|
1698
1704
|
options = self._stage_block_from_url_options(
|
1699
1705
|
block_id,
|
@@ -2177,7 +2183,7 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
|
|
2177
2183
|
.. versionadded:: 12.2.0
|
2178
2184
|
This operation was introduced in API version '2019-07-07'.
|
2179
2185
|
|
2180
|
-
:param previous_snapshot_url:
|
2186
|
+
:param str previous_snapshot_url:
|
2181
2187
|
Specifies the URL of a previous snapshot of the managed disk.
|
2182
2188
|
The response will only contain pages that were changed between the target blob and
|
2183
2189
|
its previous snapshot.
|
@@ -2558,6 +2564,8 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
|
|
2558
2564
|
:keyword str source_authorization:
|
2559
2565
|
Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is
|
2560
2566
|
the prefix of the source_authorization string.
|
2567
|
+
:returns: Response after uploading pages from specified URL.
|
2568
|
+
:rtype: Dict[str, Any]
|
2561
2569
|
"""
|
2562
2570
|
|
2563
2571
|
options = self._upload_pages_from_url_options(
|
@@ -2654,6 +2662,7 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
|
|
2654
2662
|
|
2655
2663
|
:param data:
|
2656
2664
|
Content of the block.
|
2665
|
+
:type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]]
|
2657
2666
|
:param int length:
|
2658
2667
|
Size of the block in bytes.
|
2659
2668
|
:keyword bool validate_content:
|
@@ -2833,6 +2842,8 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
|
|
2833
2842
|
:keyword str source_authorization:
|
2834
2843
|
Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is
|
2835
2844
|
the prefix of the source_authorization string.
|
2845
|
+
:returns: Result after appending a new block.
|
2846
|
+
:rtype: Dict[str, Union[str, datetime, int]]
|
2836
2847
|
"""
|
2837
2848
|
options = self._append_block_from_url_options(
|
2838
2849
|
copy_source_url=self._encode_source_url(copy_source_url),
|
@@ -490,6 +490,7 @@ class BlobServiceClient(AsyncStorageAccountHostsMixin, BlobServiceClientBase, St
|
|
490
490
|
This value is not tracked or validated on the client. To configure client-side network timesouts
|
491
491
|
see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob
|
492
492
|
#other-client--per-operation-configuration>`_.
|
493
|
+
:returns: A container client to interact with the newly created container.
|
493
494
|
:rtype: ~azure.storage.blob.aio.ContainerClient
|
494
495
|
|
495
496
|
.. admonition:: Example:
|
@@ -527,7 +528,7 @@ class BlobServiceClient(AsyncStorageAccountHostsMixin, BlobServiceClientBase, St
|
|
527
528
|
If specified, delete_container only succeeds if the
|
528
529
|
container's lease is active and matches this ID.
|
529
530
|
Required if the container has an active lease.
|
530
|
-
:
|
531
|
+
:type lease: ~azure.storage.blob.aio.BlobLeaseClient or str
|
531
532
|
:keyword ~datetime.datetime if_modified_since:
|
532
533
|
A DateTime value. Azure expects the date value passed in to be UTC.
|
533
534
|
If timezone is included, any non-UTC datetimes will be converted to UTC.
|
@@ -591,6 +592,7 @@ class BlobServiceClient(AsyncStorageAccountHostsMixin, BlobServiceClientBase, St
|
|
591
592
|
This value is not tracked or validated on the client. To configure client-side network timesouts
|
592
593
|
see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob
|
593
594
|
#other-client--per-operation-configuration>`_.
|
595
|
+
:returns: A container client for the renamed container.
|
594
596
|
:rtype: ~azure.storage.blob.ContainerClient
|
595
597
|
"""
|
596
598
|
renamed_container = self.get_container_client(new_name)
|
@@ -208,6 +208,7 @@ class ContainerClient(AsyncStorageAccountHostsMixin, ContainerClientBase, Storag
|
|
208
208
|
This value is not tracked or validated on the client. To configure client-side network timesouts
|
209
209
|
see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob
|
210
210
|
#other-client--per-operation-configuration>`_.
|
211
|
+
:returns: The renamed container.
|
211
212
|
:rtype: ~azure.storage.blob.ContainerClient
|
212
213
|
"""
|
213
214
|
lease = kwargs.pop('lease', None)
|
@@ -461,6 +462,7 @@ class ContainerClient(AsyncStorageAccountHostsMixin, ContainerClientBase, Storag
|
|
461
462
|
see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob
|
462
463
|
#other-client--per-operation-configuration>`_.
|
463
464
|
:returns: Container-updated property dict (Etag and last modified).
|
465
|
+
:rtype: Dict[str, Union[str, datetime]]
|
464
466
|
|
465
467
|
.. admonition:: Example:
|
466
468
|
|
@@ -647,8 +649,11 @@ class ContainerClient(AsyncStorageAccountHostsMixin, ContainerClientBase, Storag
|
|
647
649
|
process_storage_error(error)
|
648
650
|
|
649
651
|
@distributed_trace
|
650
|
-
def list_blobs(
|
651
|
-
|
652
|
+
def list_blobs(
|
653
|
+
self, name_starts_with: Optional[str] = None,
|
654
|
+
include: Optional[Union[str, List[str]]] = None,
|
655
|
+
**kwargs: Any
|
656
|
+
) -> AsyncItemPaged[BlobProperties]:
|
652
657
|
"""Returns a generator to list the blobs under the specified container.
|
653
658
|
The generator will lazily follow the continuation tokens returned by
|
654
659
|
the service.
|
@@ -679,6 +684,10 @@ class ContainerClient(AsyncStorageAccountHostsMixin, ContainerClientBase, Storag
|
|
679
684
|
:dedent: 12
|
680
685
|
:caption: List the blobs in the container.
|
681
686
|
"""
|
687
|
+
if kwargs.pop('prefix', None):
|
688
|
+
raise ValueError("Passing 'prefix' has no effect on filtering, " +
|
689
|
+
"please use the 'name_starts_with' parameter instead.")
|
690
|
+
|
682
691
|
if include and not isinstance(include, list):
|
683
692
|
include = [include]
|
684
693
|
|
@@ -718,6 +727,10 @@ class ContainerClient(AsyncStorageAccountHostsMixin, ContainerClientBase, Storag
|
|
718
727
|
:returns: An iterable (auto-paging) response of blob names as strings.
|
719
728
|
:rtype: ~azure.core.async_paging.AsyncItemPaged[str]
|
720
729
|
"""
|
730
|
+
if kwargs.pop('prefix', None):
|
731
|
+
raise ValueError("Passing 'prefix' has no effect on filtering, " +
|
732
|
+
"please use the 'name_starts_with' parameter instead.")
|
733
|
+
|
721
734
|
name_starts_with = kwargs.pop('name_starts_with', None)
|
722
735
|
results_per_page = kwargs.pop('results_per_page', None)
|
723
736
|
timeout = kwargs.pop('timeout', None)
|
@@ -739,12 +752,11 @@ class ContainerClient(AsyncStorageAccountHostsMixin, ContainerClientBase, Storag
|
|
739
752
|
|
740
753
|
@distributed_trace
|
741
754
|
def walk_blobs(
|
742
|
-
|
743
|
-
|
744
|
-
|
745
|
-
|
746
|
-
):
|
747
|
-
# type: (...) -> AsyncItemPaged[BlobProperties]
|
755
|
+
self, name_starts_with: Optional[str] = None,
|
756
|
+
include: Optional[Union[List[str], str]] = None,
|
757
|
+
delimiter: str = "/",
|
758
|
+
**kwargs: Any
|
759
|
+
) -> AsyncItemPaged[BlobProperties]:
|
748
760
|
"""Returns a generator to list the blobs under the specified container.
|
749
761
|
The generator will lazily follow the continuation tokens returned by
|
750
762
|
the service. This operation will list blobs in accordance with a hierarchy,
|
@@ -772,6 +784,10 @@ class ContainerClient(AsyncStorageAccountHostsMixin, ContainerClientBase, Storag
|
|
772
784
|
:returns: An iterable (auto-paging) response of BlobProperties.
|
773
785
|
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.BlobProperties]
|
774
786
|
"""
|
787
|
+
if kwargs.pop('prefix', None):
|
788
|
+
raise ValueError("Passing 'prefix' has no effect on filtering, " +
|
789
|
+
"please use the 'name_starts_with' parameter instead.")
|
790
|
+
|
775
791
|
if include and not isinstance(include, list):
|
776
792
|
include = [include]
|
777
793
|
|
@@ -838,6 +854,7 @@ class ContainerClient(AsyncStorageAccountHostsMixin, ContainerClientBase, Storag
|
|
838
854
|
|
839
855
|
:param str name: The blob with which to interact.
|
840
856
|
:param data: The blob data to upload.
|
857
|
+
:type data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]]
|
841
858
|
:param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be
|
842
859
|
either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob.
|
843
860
|
:param int length:
|