azure-storage-blob 12.19.1__py3-none-any.whl → 12.20.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. azure/storage/blob/__init__.py +17 -5
  2. azure/storage/blob/_blob_client.py +23 -7
  3. azure/storage/blob/_blob_service_client.py +9 -3
  4. azure/storage/blob/_container_client.py +37 -13
  5. azure/storage/blob/_download.py +3 -3
  6. azure/storage/blob/_encryption.py +254 -165
  7. azure/storage/blob/_generated/_azure_blob_storage.py +21 -3
  8. azure/storage/blob/_generated/_configuration.py +4 -11
  9. azure/storage/blob/_generated/_serialization.py +41 -49
  10. azure/storage/blob/_generated/aio/_azure_blob_storage.py +23 -3
  11. azure/storage/blob/_generated/aio/_configuration.py +4 -11
  12. azure/storage/blob/_generated/aio/operations/_append_blob_operations.py +24 -58
  13. azure/storage/blob/_generated/aio/operations/_blob_operations.py +123 -306
  14. azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +37 -86
  15. azure/storage/blob/_generated/aio/operations/_container_operations.py +98 -289
  16. azure/storage/blob/_generated/aio/operations/_page_blob_operations.py +51 -150
  17. azure/storage/blob/_generated/aio/operations/_service_operations.py +49 -125
  18. azure/storage/blob/_generated/models/_models_py3.py +31 -31
  19. azure/storage/blob/_generated/operations/_append_blob_operations.py +25 -59
  20. azure/storage/blob/_generated/operations/_blob_operations.py +123 -306
  21. azure/storage/blob/_generated/operations/_block_blob_operations.py +39 -88
  22. azure/storage/blob/_generated/operations/_container_operations.py +100 -291
  23. azure/storage/blob/_generated/operations/_page_blob_operations.py +52 -151
  24. azure/storage/blob/_generated/operations/_service_operations.py +50 -126
  25. azure/storage/blob/_lease.py +1 -0
  26. azure/storage/blob/_models.py +3 -4
  27. azure/storage/blob/_serialize.py +1 -0
  28. azure/storage/blob/_shared/authentication.py +1 -1
  29. azure/storage/blob/_shared/avro/avro_io.py +0 -6
  30. azure/storage/blob/_shared/avro/avro_io_async.py +0 -6
  31. azure/storage/blob/_shared/avro/datafile.py +0 -4
  32. azure/storage/blob/_shared/avro/datafile_async.py +0 -4
  33. azure/storage/blob/_shared/avro/schema.py +4 -4
  34. azure/storage/blob/_shared/base_client.py +72 -87
  35. azure/storage/blob/_shared/base_client_async.py +115 -27
  36. azure/storage/blob/_shared/models.py +120 -27
  37. azure/storage/blob/_shared/parser.py +7 -6
  38. azure/storage/blob/_shared/policies.py +96 -66
  39. azure/storage/blob/_shared/policies_async.py +48 -21
  40. azure/storage/blob/_shared/response_handlers.py +14 -16
  41. azure/storage/blob/_shared/shared_access_signature.py +3 -3
  42. azure/storage/blob/_shared_access_signature.py +38 -27
  43. azure/storage/blob/_upload_helpers.py +4 -7
  44. azure/storage/blob/_version.py +1 -1
  45. azure/storage/blob/aio/__init__.py +13 -4
  46. azure/storage/blob/aio/_blob_client_async.py +17 -6
  47. azure/storage/blob/aio/_blob_service_client_async.py +4 -2
  48. azure/storage/blob/aio/_container_client_async.py +26 -9
  49. azure/storage/blob/aio/_download_async.py +9 -9
  50. azure/storage/blob/aio/_encryption_async.py +72 -0
  51. azure/storage/blob/aio/_lease_async.py +1 -1
  52. azure/storage/blob/aio/_upload_helpers.py +8 -10
  53. {azure_storage_blob-12.19.1.dist-info → azure_storage_blob-12.20.0.dist-info}/METADATA +10 -10
  54. azure_storage_blob-12.20.0.dist-info/RECORD +81 -0
  55. {azure_storage_blob-12.19.1.dist-info → azure_storage_blob-12.20.0.dist-info}/WHEEL +1 -1
  56. azure/storage/blob/_generated/py.typed +0 -1
  57. azure_storage_blob-12.19.1.dist-info/RECORD +0 -81
  58. {azure_storage_blob-12.19.1.dist-info → azure_storage_blob-12.20.0.dist-info}/LICENSE +0 -0
  59. {azure_storage_blob-12.19.1.dist-info → azure_storage_blob-12.20.0.dist-info}/top_level.txt +0 -0
@@ -6,12 +6,12 @@
6
6
  # pylint: disable=invalid-overridden-method
7
7
 
8
8
  import asyncio
9
- import random
10
9
  import logging
11
- from typing import Any, TYPE_CHECKING
10
+ import random
11
+ from typing import Any, Dict, TYPE_CHECKING
12
12
 
13
- from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy, AsyncHTTPPolicy
14
13
  from azure.core.exceptions import AzureError
14
+ from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy, AsyncHTTPPolicy
15
15
 
16
16
  from .authentication import StorageHttpChallenge
17
17
  from .constants import DEFAULT_OAUTH_SCOPE
@@ -19,7 +19,10 @@ from .policies import is_retry, StorageRetryPolicy
19
19
 
20
20
  if TYPE_CHECKING:
21
21
  from azure.core.credentials_async import AsyncTokenCredential
22
- from azure.core.pipeline import PipelineRequest, PipelineResponse
22
+ from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import
23
+ PipelineRequest,
24
+ PipelineResponse
25
+ )
23
26
 
24
27
 
25
28
  _LOGGER = logging.getLogger(__name__)
@@ -45,8 +48,7 @@ class AsyncStorageResponseHook(AsyncHTTPPolicy):
45
48
  self._response_callback = kwargs.get('raw_response_hook')
46
49
  super(AsyncStorageResponseHook, self).__init__()
47
50
 
48
- async def send(self, request):
49
- # type: (PipelineRequest) -> PipelineResponse
51
+ async def send(self, request: "PipelineRequest") -> "PipelineResponse":
50
52
  # Values could be 0
51
53
  data_stream_total = request.context.get('data_stream_total')
52
54
  if data_stream_total is None:
@@ -80,12 +82,13 @@ class AsyncStorageResponseHook(AsyncHTTPPolicy):
80
82
  elif should_update_counts and upload_stream_current is not None:
81
83
  upload_stream_current += int(response.http_request.headers.get('Content-Length', 0))
82
84
  for pipeline_obj in [request, response]:
83
- pipeline_obj.context['data_stream_total'] = data_stream_total
84
- pipeline_obj.context['download_stream_current'] = download_stream_current
85
- pipeline_obj.context['upload_stream_current'] = upload_stream_current
85
+ if hasattr(pipeline_obj, 'context'):
86
+ pipeline_obj.context['data_stream_total'] = data_stream_total
87
+ pipeline_obj.context['download_stream_current'] = download_stream_current
88
+ pipeline_obj.context['upload_stream_current'] = upload_stream_current
86
89
  if response_callback:
87
90
  if asyncio.iscoroutine(response_callback):
88
- await response_callback(response)
91
+ await response_callback(response) # type: ignore
89
92
  else:
90
93
  response_callback(response)
91
94
  request.context['response_callback'] = response_callback
@@ -144,9 +147,23 @@ class AsyncStorageRetryPolicy(StorageRetryPolicy):
144
147
  class ExponentialRetry(AsyncStorageRetryPolicy):
145
148
  """Exponential retry."""
146
149
 
147
- def __init__(self, initial_backoff=15, increment_base=3, retry_total=3,
148
- retry_to_secondary=False, random_jitter_range=3, **kwargs):
149
- '''
150
+ initial_backoff: int
151
+ """The initial backoff interval, in seconds, for the first retry."""
152
+ increment_base: int
153
+ """The base, in seconds, to increment the initial_backoff by after the
154
+ first retry."""
155
+ random_jitter_range: int
156
+ """A number in seconds which indicates a range to jitter/randomize for the back-off interval."""
157
+
158
+ def __init__(
159
+ self,
160
+ initial_backoff: int = 15,
161
+ increment_base: int = 3,
162
+ retry_total: int = 3,
163
+ retry_to_secondary: bool = False,
164
+ random_jitter_range: int = 3, **kwargs
165
+ ) -> None:
166
+ """
150
167
  Constructs an Exponential retry object. The initial_backoff is used for
151
168
  the first retry. Subsequent retries are retried after initial_backoff +
152
169
  increment_power^retry_count seconds. For example, by default the first retry
@@ -167,18 +184,18 @@ class ExponentialRetry(AsyncStorageRetryPolicy):
167
184
  :param int random_jitter_range:
168
185
  A number in seconds which indicates a range to jitter/randomize for the back-off interval.
169
186
  For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3.
170
- '''
187
+ """
171
188
  self.initial_backoff = initial_backoff
172
189
  self.increment_base = increment_base
173
190
  self.random_jitter_range = random_jitter_range
174
191
  super(ExponentialRetry, self).__init__(
175
192
  retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
176
193
 
177
- def get_backoff_time(self, settings):
194
+ def get_backoff_time(self, settings: Dict[str, Any]) -> float:
178
195
  """
179
196
  Calculates how long to sleep before retrying.
180
197
 
181
- :param Optional[Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
198
+ :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
182
199
  :return:
183
200
  An integer indicating how long to wait before retrying the request,
184
201
  or None to indicate no retry should be performed.
@@ -194,7 +211,18 @@ class ExponentialRetry(AsyncStorageRetryPolicy):
194
211
  class LinearRetry(AsyncStorageRetryPolicy):
195
212
  """Linear retry."""
196
213
 
197
- def __init__(self, backoff=15, retry_total=3, retry_to_secondary=False, random_jitter_range=3, **kwargs):
214
+ initial_backoff: int
215
+ """The backoff interval, in seconds, between retries."""
216
+ random_jitter_range: int
217
+ """A number in seconds which indicates a range to jitter/randomize for the back-off interval."""
218
+
219
+ def __init__(
220
+ self, backoff: int = 15,
221
+ retry_total: int = 3,
222
+ retry_to_secondary: bool = False,
223
+ random_jitter_range: int = 3,
224
+ **kwargs: Any
225
+ ) -> None:
198
226
  """
199
227
  Constructs a Linear retry object.
200
228
 
@@ -215,11 +243,11 @@ class LinearRetry(AsyncStorageRetryPolicy):
215
243
  super(LinearRetry, self).__init__(
216
244
  retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
217
245
 
218
- def get_backoff_time(self, settings):
246
+ def get_backoff_time(self, settings: Dict[str, Any]) -> float:
219
247
  """
220
248
  Calculates how long to sleep before retrying.
221
249
 
222
- :param Optional[Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
250
+ :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
223
251
  :return:
224
252
  An integer indicating how long to wait before retrying the request,
225
253
  or None to indicate no retry should be performed.
@@ -240,8 +268,7 @@ class AsyncStorageBearerTokenCredentialPolicy(AsyncBearerTokenCredentialPolicy):
240
268
  def __init__(self, credential: "AsyncTokenCredential", audience: str, **kwargs: Any) -> None:
241
269
  super(AsyncStorageBearerTokenCredentialPolicy, self).__init__(credential, audience, **kwargs)
242
270
 
243
- async def on_challenge(self, request, response):
244
- # type: (PipelineRequest, PipelineResponse) -> bool
271
+ async def on_challenge(self, request: "PipelineRequest", response: "PipelineResponse") -> bool:
245
272
  try:
246
273
  auth_header = response.http_response.headers.get("WWW-Authenticate")
247
274
  challenge = StorageHttpChallenge(auth_header)
@@ -3,26 +3,22 @@
3
3
  # Licensed under the MIT License. See License.txt in the project root for
4
4
  # license information.
5
5
  # --------------------------------------------------------------------------
6
- from typing import NoReturn, TYPE_CHECKING
7
6
  import logging
7
+ from typing import NoReturn
8
8
  from xml.etree.ElementTree import Element
9
9
 
10
- from azure.core.pipeline.policies import ContentDecodePolicy
11
10
  from azure.core.exceptions import (
11
+ ClientAuthenticationError,
12
+ DecodeError,
12
13
  HttpResponseError,
13
- ResourceNotFoundError,
14
- ResourceModifiedError,
15
14
  ResourceExistsError,
16
- ClientAuthenticationError,
17
- DecodeError)
15
+ ResourceModifiedError,
16
+ ResourceNotFoundError,
17
+ )
18
+ from azure.core.pipeline.policies import ContentDecodePolicy
18
19
 
19
- from .parser import _to_utc_datetime
20
20
  from .models import StorageErrorCode, UserDelegationKey, get_enum_value
21
-
22
-
23
- if TYPE_CHECKING:
24
- from datetime import datetime
25
- from azure.core.exceptions import AzureError
21
+ from .parser import _to_utc_datetime
26
22
 
27
23
 
28
24
  _LOGGER = logging.getLogger(__name__)
@@ -85,10 +81,10 @@ def return_raw_deserialized(response, *_):
85
81
  return response.http_response.location_mode, response.context[ContentDecodePolicy.CONTEXT_NAME]
86
82
 
87
83
 
88
- def process_storage_error(storage_error) -> NoReturn: # pylint:disable=too-many-statements
84
+ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # pylint:disable=too-many-statements
89
85
  raise_error = HttpResponseError
90
86
  serialized = False
91
- if not storage_error.response:
87
+ if not storage_error.response or storage_error.response.status_code in [200, 204]:
92
88
  raise storage_error
93
89
  # If it is one of those three then it has been serialized prior by the generated layer.
94
90
  if isinstance(storage_error, (PartialBatchErrorException,
@@ -101,7 +97,8 @@ def process_storage_error(storage_error) -> NoReturn: # pylint:disable=too-many
101
97
  try:
102
98
  error_body = ContentDecodePolicy.deserialize_from_http_generics(storage_error.response)
103
99
  try:
104
- error_body = error_body or storage_error.response.reason
100
+ if error_body is None or len(error_body) == 0:
101
+ error_body = storage_error.response.reason
105
102
  except AttributeError:
106
103
  error_body = ''
107
104
  # If it is an XML response
@@ -119,7 +116,8 @@ def process_storage_error(storage_error) -> NoReturn: # pylint:disable=too-many
119
116
  error_dict = {'message': str(error_body)}
120
117
 
121
118
  # If we extracted from a Json or XML response
122
- if error_dict:
119
+ # There is a chance error_dict is just a string
120
+ if error_dict and isinstance(error_dict, dict):
123
121
  error_code = error_dict.get('code')
124
122
  error_message = error_dict.get('message')
125
123
  additional_data = {k: v for k, v in error_dict.items() if k not in {'code', 'message'}}
@@ -3,6 +3,7 @@
3
3
  # Licensed under the MIT License. See License.txt in the project root for
4
4
  # license information.
5
5
  # --------------------------------------------------------------------------
6
+ # pylint: disable=docstring-keyword-should-match-keyword-only
6
7
 
7
8
  from datetime import date
8
9
 
@@ -136,9 +137,8 @@ class SharedAccessSignature(object):
136
137
  :param start:
137
138
  The time at which the shared access signature becomes valid. If
138
139
  omitted, start time for this call is assumed to be the time when the
139
- storage service receives the request. Azure will always convert values
140
- to UTC. If a date is passed in without timezone info, it is assumed to
141
- be UTC.
140
+ storage service receives the request. The provided datetime will always
141
+ be interpreted as UTC.
142
142
  :type start: datetime or str
143
143
  :param str ip:
144
144
  Specifies an IP address or a range of IP addresses from which to accept requests.
@@ -3,10 +3,12 @@
3
3
  # Licensed under the MIT License. See License.txt in the project root for
4
4
  # license information.
5
5
  # --------------------------------------------------------------------------
6
+ # pylint: disable=docstring-keyword-should-match-keyword-only
6
7
 
7
8
  from typing import ( # pylint: disable=unused-import
8
9
  Union, Optional, Any, TYPE_CHECKING
9
10
  )
11
+ from urllib.parse import parse_qs
10
12
 
11
13
  from ._shared import sign_string, url_quote
12
14
  from ._shared.constants import X_MS_VERSION
@@ -90,9 +92,8 @@ class BlobSharedAccessSignature(SharedAccessSignature):
90
92
  :param start:
91
93
  The time at which the shared access signature becomes valid. If
92
94
  omitted, start time for this call is assumed to be the time when the
93
- storage service receives the request. Azure will always convert values
94
- to UTC. If a date is passed in without timezone info, it is assumed to
95
- be UTC.
95
+ storage service receives the request. The provided datetime will always
96
+ be interpreted as UTC.
96
97
  :type start: datetime or str
97
98
  :param str policy_id:
98
99
  A unique value up to 64 characters in length that correlates to a
@@ -177,9 +178,8 @@ class BlobSharedAccessSignature(SharedAccessSignature):
177
178
  :param start:
178
179
  The time at which the shared access signature becomes valid. If
179
180
  omitted, start time for this call is assumed to be the time when the
180
- storage service receives the request. Azure will always convert values
181
- to UTC. If a date is passed in without timezone info, it is assumed to
182
- be UTC.
181
+ storage service receives the request. The provided datetime will always
182
+ be interpreted as UTC.
183
183
  :type start: datetime or str
184
184
  :param str policy_id:
185
185
  A unique value up to 64 characters in length that correlates to a
@@ -307,15 +307,17 @@ class _BlobSharedAccessHelper(_SharedAccessHelper):
307
307
 
308
308
 
309
309
  def generate_account_sas(
310
- account_name, # type: str
311
- account_key, # type: str
312
- resource_types, # type: Union[ResourceTypes, str]
313
- permission, # type: Union[AccountSasPermissions, str]
314
- expiry, # type: Union[datetime, str]
315
- start=None, # type: Optional[Union[datetime, str]]
316
- ip=None, # type: Optional[str]
317
- **kwargs # type: Any
318
- ): # type: (...) -> str
310
+ account_name: str,
311
+ account_key: str,
312
+ resource_types: Union["ResourceTypes", str],
313
+ permission: Union["AccountSasPermissions", str],
314
+ expiry: Union["datetime", str],
315
+ start: Optional[Union["datetime", str]] = None,
316
+ ip: Optional[str] = None,
317
+ *,
318
+ services: Union[Services, str] = Services(blob=True),
319
+ **kwargs: Any
320
+ ) -> str:
319
321
  """Generates a shared access signature for the blob service.
320
322
 
321
323
  Use the returned signature with the credential parameter of any BlobServiceClient,
@@ -334,15 +336,13 @@ def generate_account_sas(
334
336
  :type permission: str or ~azure.storage.blob.AccountSasPermissions
335
337
  :param expiry:
336
338
  The time at which the shared access signature becomes invalid.
337
- Azure will always convert values to UTC. If a date is passed in
338
- without timezone info, it is assumed to be UTC.
339
+ The provided datetime will always be interpreted as UTC.
339
340
  :type expiry: ~datetime.datetime or str
340
341
  :param start:
341
342
  The time at which the shared access signature becomes valid. If
342
343
  omitted, start time for this call is assumed to be the time when the
343
- storage service receives the request. Azure will always convert values
344
- to UTC. If a date is passed in without timezone info, it is assumed to
345
- be UTC.
344
+ storage service receives the request. The provided datetime will always
345
+ be interpreted as UTC.
346
346
  :type start: ~datetime.datetime or str
347
347
  :param str ip:
348
348
  Specifies an IP address or a range of IP addresses from which to accept requests.
@@ -350,6 +350,9 @@ def generate_account_sas(
350
350
  or address range specified on the SAS token, the request is not authenticated.
351
351
  For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS
352
352
  restricts the request to those IP addresses.
353
+ :keyword Union[Services, str] services:
354
+ Specifies the services that the Shared Access Signature (sas) token will be able to be utilized with.
355
+ Will default to only this package (i.e. blobs) if not provided.
353
356
  :keyword str protocol:
354
357
  Specifies the protocol permitted for a request made. The default value is https.
355
358
  :keyword str encryption_scope:
@@ -368,7 +371,7 @@ def generate_account_sas(
368
371
  """
369
372
  sas = SharedAccessSignature(account_name, account_key)
370
373
  return sas.generate_account(
371
- services=Services(blob=True),
374
+ services=services,
372
375
  resource_types=resource_types,
373
376
  permission=permission,
374
377
  expiry=expiry,
@@ -427,9 +430,8 @@ def generate_container_sas(
427
430
  :param start:
428
431
  The time at which the shared access signature becomes valid. If
429
432
  omitted, start time for this call is assumed to be the time when the
430
- storage service receives the request. Azure will always convert values
431
- to UTC. If a date is passed in without timezone info, it is assumed to
432
- be UTC.
433
+ storage service receives the request. The provided datetime will always
434
+ be interpreted as UTC.
433
435
  :type start: ~datetime.datetime or str
434
436
  :param str policy_id:
435
437
  A unique value up to 64 characters in length that correlates to a
@@ -554,9 +556,8 @@ def generate_blob_sas(
554
556
  :param start:
555
557
  The time at which the shared access signature becomes valid. If
556
558
  omitted, start time for this call is assumed to be the time when the
557
- storage service receives the request. Azure will always convert values
558
- to UTC. If a date is passed in without timezone info, it is assumed to
559
- be UTC.
559
+ storage service receives the request. The provided datetime will always
560
+ be interpreted as UTC.
560
561
  :type start: ~datetime.datetime or str
561
562
  :param str policy_id:
562
563
  A unique value up to 64 characters in length that correlates to a
@@ -629,3 +630,13 @@ def generate_blob_sas(
629
630
  ip=ip,
630
631
  **kwargs
631
632
  )
633
+
634
+ def _is_credential_sastoken(credential: Any) -> bool:
635
+ if not credential or not isinstance(credential, str):
636
+ return False
637
+
638
+ sas_values = QueryStringConstants.to_list()
639
+ parsed_query = parse_qs(credential.lstrip("?"))
640
+ if parsed_query and all(k in sas_values for k in parsed_query):
641
+ return True
642
+ return False
@@ -64,7 +64,6 @@ def _any_conditions(modified_access_conditions=None, **kwargs): # pylint: disab
64
64
 
65
65
  def upload_block_blob( # pylint: disable=too-many-locals, too-many-statements
66
66
  client=None,
67
- data=None,
68
67
  stream=None,
69
68
  length=None,
70
69
  overwrite=None,
@@ -92,12 +91,10 @@ def upload_block_blob( # pylint: disable=too-many-locals, too-many-statements
92
91
 
93
92
  # Do single put if the size is smaller than or equal config.max_single_put_size
94
93
  if adjusted_count is not None and (adjusted_count <= blob_settings.max_single_put_size):
95
- try:
96
- data = data.read(length)
97
- if not isinstance(data, bytes):
98
- raise TypeError('Blob data should be of type bytes.')
99
- except AttributeError:
100
- pass
94
+ data = stream.read(length)
95
+ if not isinstance(data, bytes):
96
+ raise TypeError('Blob data should be of type bytes.')
97
+
101
98
  if encryption_options.get('key'):
102
99
  encryption_data, data = encrypt_blob(data, encryption_options['key'], encryption_options['version'])
103
100
  headers['x-ms-meta-encryptiondata'] = encryption_data
@@ -4,4 +4,4 @@
4
4
  # license information.
5
5
  # --------------------------------------------------------------------------
6
6
 
7
- VERSION = "12.19.1"
7
+ VERSION = "12.20.0"
@@ -3,6 +3,7 @@
3
3
  # Licensed under the MIT License. See License.txt in the project root for
4
4
  # license information.
5
5
  # --------------------------------------------------------------------------
6
+ # pylint: disable=docstring-keyword-should-match-keyword-only
6
7
 
7
8
  import os
8
9
 
@@ -31,7 +32,7 @@ async def upload_blob_to_url(
31
32
  :param data:
32
33
  The data to upload. This can be bytes, text, an iterable or a file-like object.
33
34
  :type data: bytes or str or Iterable
34
- :param credential:
35
+ :param credential:
35
36
  The credentials with which to authenticate. This is optional if the
36
37
  blob URL already has a SAS token. The value can be a SAS token string,
37
38
  an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials,
@@ -40,7 +41,11 @@ async def upload_blob_to_url(
40
41
  - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError.
41
42
  If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key"
42
43
  should be the storage account key.
43
- :paramtype credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "TokenCredential"]] # pylint: disable=line-too-long
44
+ :type credential:
45
+ ~azure.core.credentials.AzureNamedKeyCredential or
46
+ ~azure.core.credentials.AzureSasCredential or
47
+ ~azure.core.credentials.TokenCredential or
48
+ str or dict[str, str] or None
44
49
  :keyword bool overwrite:
45
50
  Whether the blob to be uploaded should overwrite the current data.
46
51
  If True, upload_blob_to_url will overwrite any existing data. If set to False, the
@@ -64,7 +69,7 @@ async def upload_blob_to_url(
64
69
  :keyword str encoding:
65
70
  Encoding to use if text is supplied as input. Defaults to UTF-8.
66
71
  :returns: Blob-updated property dict (Etag and last modified)
67
- :rtype: dict(str, Any)
72
+ :rtype: dict[str, Any]
68
73
  """
69
74
  async with BlobClient.from_blob_url(blob_url, credential=credential) as client:
70
75
  return await client.upload_blob(data=data, blob_type=BlobType.BlockBlob, **kwargs)
@@ -99,7 +104,11 @@ async def download_blob_from_url(
99
104
  - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError.
100
105
  If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key"
101
106
  should be the storage account key.
102
- :paramtype credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "TokenCredential"]] # pylint: disable=line-too-long
107
+ :type credential:
108
+ ~azure.core.credentials.AzureNamedKeyCredential or
109
+ ~azure.core.credentials.AzureSasCredential or
110
+ ~azure.core.credentials.TokenCredential or
111
+ str or dict[str, str] or None
103
112
  :keyword bool overwrite:
104
113
  Whether the local file should be overwritten if it already exists. The default value is
105
114
  `False` - in which case a ValueError will be raised if the file already exists. If set to
@@ -3,7 +3,7 @@
3
3
  # Licensed under the MIT License. See License.txt in the project root for
4
4
  # license information.
5
5
  # --------------------------------------------------------------------------
6
- # pylint: disable=too-many-lines, invalid-overridden-method
6
+ # pylint: disable=too-many-lines, invalid-overridden-method, docstring-keyword-should-match-keyword-only
7
7
 
8
8
  import warnings
9
9
  from functools import partial
@@ -254,6 +254,8 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
254
254
  :keyword str source_authorization:
255
255
  Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is
256
256
  the prefix of the source_authorization string.
257
+ :returns: Response from creating a new block blob for a given URL.
258
+ :rtype: Dict[str, Any]
257
259
  """
258
260
  options = self._upload_blob_from_url_options(
259
261
  source_url=self._encode_source_url(source_url),
@@ -894,6 +896,7 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
894
896
  see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob
895
897
  #other-client--per-operation-configuration>`_.
896
898
  :returns: Blob-updated property dict (Etag and last modified)
899
+ :rtype: Dict[str, Union[str, datetime]]
897
900
  """
898
901
  options = self._set_blob_metadata_options(metadata=metadata, **kwargs)
899
902
  try:
@@ -1589,13 +1592,14 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
1589
1592
  length=None, # type: Optional[int]
1590
1593
  **kwargs
1591
1594
  ):
1592
- # type: (...) -> None
1595
+ # type: (...) -> Dict[str, Any]
1593
1596
  """Creates a new block to be committed as part of a blob.
1594
1597
 
1595
1598
  :param str block_id: A string value that identifies the block.
1596
1599
  The string should be less than or equal to 64 bytes in size.
1597
1600
  For a given blob, the block_id must be the same size for each block.
1598
1601
  :param data: The blob data.
1602
+ :type data: Union[Iterable[AnyStr], IO[AnyStr]]
1599
1603
  :param int length: Size of the block.
1600
1604
  :keyword bool validate_content:
1601
1605
  If true, calculates an MD5 hash for each chunk of the blob. The storage
@@ -1631,7 +1635,8 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
1631
1635
  This value is not tracked or validated on the client. To configure client-side network timesouts
1632
1636
  see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob
1633
1637
  #other-client--per-operation-configuration>`_.
1634
- :rtype: None
1638
+ :returns: Blob property dict.
1639
+ :rtype: Dict[str, Any]
1635
1640
  """
1636
1641
  options = self._stage_block_options(
1637
1642
  block_id,
@@ -1652,7 +1657,7 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
1652
1657
  source_content_md5=None, # type: Optional[Union[bytes, bytearray]]
1653
1658
  **kwargs
1654
1659
  ):
1655
- # type: (...) -> None
1660
+ # type: (...) -> Dict[str, Any]
1656
1661
  """Creates a new block to be committed as part of a blob where
1657
1662
  the contents are read from a URL.
1658
1663
 
@@ -1693,7 +1698,8 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
1693
1698
  :keyword str source_authorization:
1694
1699
  Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is
1695
1700
  the prefix of the source_authorization string.
1696
- :rtype: None
1701
+ :returns: Blob property dict.
1702
+ :rtype: Dict[str, Any]
1697
1703
  """
1698
1704
  options = self._stage_block_from_url_options(
1699
1705
  block_id,
@@ -2177,7 +2183,7 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
2177
2183
  .. versionadded:: 12.2.0
2178
2184
  This operation was introduced in API version '2019-07-07'.
2179
2185
 
2180
- :param previous_snapshot_url:
2186
+ :param str previous_snapshot_url:
2181
2187
  Specifies the URL of a previous snapshot of the managed disk.
2182
2188
  The response will only contain pages that were changed between the target blob and
2183
2189
  its previous snapshot.
@@ -2558,6 +2564,8 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
2558
2564
  :keyword str source_authorization:
2559
2565
  Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is
2560
2566
  the prefix of the source_authorization string.
2567
+ :returns: Response after uploading pages from specified URL.
2568
+ :rtype: Dict[str, Any]
2561
2569
  """
2562
2570
 
2563
2571
  options = self._upload_pages_from_url_options(
@@ -2654,6 +2662,7 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
2654
2662
 
2655
2663
  :param data:
2656
2664
  Content of the block.
2665
+ :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]]
2657
2666
  :param int length:
2658
2667
  Size of the block in bytes.
2659
2668
  :keyword bool validate_content:
@@ -2833,6 +2842,8 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase, StorageEncryptio
2833
2842
  :keyword str source_authorization:
2834
2843
  Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is
2835
2844
  the prefix of the source_authorization string.
2845
+ :returns: Result after appending a new block.
2846
+ :rtype: Dict[str, Union[str, datetime, int]]
2836
2847
  """
2837
2848
  options = self._append_block_from_url_options(
2838
2849
  copy_source_url=self._encode_source_url(copy_source_url),
@@ -3,7 +3,7 @@
3
3
  # Licensed under the MIT License. See License.txt in the project root for
4
4
  # license information.
5
5
  # --------------------------------------------------------------------------
6
- # pylint: disable=invalid-overridden-method
6
+ # pylint: disable=invalid-overridden-method, docstring-keyword-should-match-keyword-only
7
7
 
8
8
  import functools
9
9
  import warnings
@@ -490,6 +490,7 @@ class BlobServiceClient(AsyncStorageAccountHostsMixin, BlobServiceClientBase, St
490
490
  This value is not tracked or validated on the client. To configure client-side network timesouts
491
491
  see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob
492
492
  #other-client--per-operation-configuration>`_.
493
+ :returns: A container client to interact with the newly created container.
493
494
  :rtype: ~azure.storage.blob.aio.ContainerClient
494
495
 
495
496
  .. admonition:: Example:
@@ -527,7 +528,7 @@ class BlobServiceClient(AsyncStorageAccountHostsMixin, BlobServiceClientBase, St
527
528
  If specified, delete_container only succeeds if the
528
529
  container's lease is active and matches this ID.
529
530
  Required if the container has an active lease.
530
- :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str
531
+ :type lease: ~azure.storage.blob.aio.BlobLeaseClient or str
531
532
  :keyword ~datetime.datetime if_modified_since:
532
533
  A DateTime value. Azure expects the date value passed in to be UTC.
533
534
  If timezone is included, any non-UTC datetimes will be converted to UTC.
@@ -591,6 +592,7 @@ class BlobServiceClient(AsyncStorageAccountHostsMixin, BlobServiceClientBase, St
591
592
  This value is not tracked or validated on the client. To configure client-side network timesouts
592
593
  see `here <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob
593
594
  #other-client--per-operation-configuration>`_.
595
+ :returns: A container client for the renamed container.
594
596
  :rtype: ~azure.storage.blob.ContainerClient
595
597
  """
596
598
  renamed_container = self.get_container_client(new_name)