azure-storage-blob 12.19.1__py3-none-any.whl → 12.20.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. azure/storage/blob/__init__.py +17 -5
  2. azure/storage/blob/_blob_client.py +23 -7
  3. azure/storage/blob/_blob_service_client.py +9 -3
  4. azure/storage/blob/_container_client.py +37 -13
  5. azure/storage/blob/_download.py +3 -3
  6. azure/storage/blob/_encryption.py +254 -165
  7. azure/storage/blob/_generated/_azure_blob_storage.py +21 -3
  8. azure/storage/blob/_generated/_configuration.py +4 -11
  9. azure/storage/blob/_generated/_serialization.py +41 -49
  10. azure/storage/blob/_generated/aio/_azure_blob_storage.py +23 -3
  11. azure/storage/blob/_generated/aio/_configuration.py +4 -11
  12. azure/storage/blob/_generated/aio/operations/_append_blob_operations.py +24 -58
  13. azure/storage/blob/_generated/aio/operations/_blob_operations.py +123 -306
  14. azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +37 -86
  15. azure/storage/blob/_generated/aio/operations/_container_operations.py +98 -289
  16. azure/storage/blob/_generated/aio/operations/_page_blob_operations.py +51 -150
  17. azure/storage/blob/_generated/aio/operations/_service_operations.py +49 -125
  18. azure/storage/blob/_generated/models/_models_py3.py +31 -31
  19. azure/storage/blob/_generated/operations/_append_blob_operations.py +25 -59
  20. azure/storage/blob/_generated/operations/_blob_operations.py +123 -306
  21. azure/storage/blob/_generated/operations/_block_blob_operations.py +39 -88
  22. azure/storage/blob/_generated/operations/_container_operations.py +100 -291
  23. azure/storage/blob/_generated/operations/_page_blob_operations.py +52 -151
  24. azure/storage/blob/_generated/operations/_service_operations.py +50 -126
  25. azure/storage/blob/_lease.py +1 -0
  26. azure/storage/blob/_models.py +3 -4
  27. azure/storage/blob/_serialize.py +1 -0
  28. azure/storage/blob/_shared/authentication.py +1 -1
  29. azure/storage/blob/_shared/avro/avro_io.py +0 -6
  30. azure/storage/blob/_shared/avro/avro_io_async.py +0 -6
  31. azure/storage/blob/_shared/avro/datafile.py +0 -4
  32. azure/storage/blob/_shared/avro/datafile_async.py +0 -4
  33. azure/storage/blob/_shared/avro/schema.py +4 -4
  34. azure/storage/blob/_shared/base_client.py +72 -87
  35. azure/storage/blob/_shared/base_client_async.py +115 -27
  36. azure/storage/blob/_shared/models.py +120 -27
  37. azure/storage/blob/_shared/parser.py +7 -6
  38. azure/storage/blob/_shared/policies.py +96 -66
  39. azure/storage/blob/_shared/policies_async.py +48 -21
  40. azure/storage/blob/_shared/response_handlers.py +14 -16
  41. azure/storage/blob/_shared/shared_access_signature.py +3 -3
  42. azure/storage/blob/_shared_access_signature.py +38 -27
  43. azure/storage/blob/_upload_helpers.py +4 -7
  44. azure/storage/blob/_version.py +1 -1
  45. azure/storage/blob/aio/__init__.py +13 -4
  46. azure/storage/blob/aio/_blob_client_async.py +17 -6
  47. azure/storage/blob/aio/_blob_service_client_async.py +4 -2
  48. azure/storage/blob/aio/_container_client_async.py +26 -9
  49. azure/storage/blob/aio/_download_async.py +9 -9
  50. azure/storage/blob/aio/_encryption_async.py +72 -0
  51. azure/storage/blob/aio/_lease_async.py +1 -1
  52. azure/storage/blob/aio/_upload_helpers.py +8 -10
  53. {azure_storage_blob-12.19.1.dist-info → azure_storage_blob-12.20.0.dist-info}/METADATA +10 -10
  54. azure_storage_blob-12.20.0.dist-info/RECORD +81 -0
  55. {azure_storage_blob-12.19.1.dist-info → azure_storage_blob-12.20.0.dist-info}/WHEEL +1 -1
  56. azure/storage/blob/_generated/py.typed +0 -1
  57. azure_storage_blob-12.19.1.dist-info/RECORD +0 -81
  58. {azure_storage_blob-12.19.1.dist-info → azure_storage_blob-12.20.0.dist-info}/LICENSE +0 -0
  59. {azure_storage_blob-12.19.1.dist-info → azure_storage_blob-12.20.0.dist-info}/top_level.txt +0 -0
@@ -5,8 +5,11 @@
5
5
  # --------------------------------------------------------------------------
6
6
  # pylint: disable=too-many-instance-attributes
7
7
  from enum import Enum
8
+ from typing import Optional
8
9
 
9
10
  from azure.core import CaseInsensitiveEnumMeta
11
+ from azure.core.configuration import Configuration
12
+ from azure.core.pipeline.policies import UserAgentPolicy
10
13
 
11
14
 
12
15
  def get_enum_value(value):
@@ -269,7 +272,17 @@ class ResourceTypes(object):
269
272
  files(e.g. Put Blob, Query Entity, Get Messages, Create File, etc.)
270
273
  """
271
274
 
272
- def __init__(self, service=False, container=False, object=False): # pylint: disable=redefined-builtin
275
+ service: bool = False
276
+ container: bool = False
277
+ object: bool = False
278
+ _str: str
279
+
280
+ def __init__(
281
+ self,
282
+ service: bool = False,
283
+ container: bool = False,
284
+ object: bool = False # pylint: disable=redefined-builtin
285
+ ) -> None:
273
286
  self.service = service
274
287
  self.container = container
275
288
  self.object = object
@@ -344,9 +357,34 @@ class AccountSasPermissions(object):
344
357
  To enable permanent delete on the blob is permitted.
345
358
  Valid for Object resource type of Blob only.
346
359
  """
347
- def __init__(self, read=False, write=False, delete=False,
348
- list=False, # pylint: disable=redefined-builtin
349
- add=False, create=False, update=False, process=False, delete_previous_version=False, **kwargs):
360
+
361
+ read: bool = False
362
+ write: bool = False
363
+ delete: bool = False
364
+ delete_previous_version: bool = False
365
+ list: bool = False
366
+ add: bool = False
367
+ create: bool = False
368
+ update: bool = False
369
+ process: bool = False
370
+ tag: bool = False
371
+ filter_by_tags: bool = False
372
+ set_immutability_policy: bool = False
373
+ permanent_delete: bool = False
374
+
375
+ def __init__(
376
+ self,
377
+ read: bool = False,
378
+ write: bool = False,
379
+ delete: bool = False,
380
+ list: bool = False, # pylint: disable=redefined-builtin
381
+ add: bool = False,
382
+ create: bool = False,
383
+ update: bool = False,
384
+ process: bool = False,
385
+ delete_previous_version: bool = False,
386
+ **kwargs
387
+ ) -> None:
350
388
  self.read = read
351
389
  self.write = write
352
390
  self.delete = delete
@@ -415,15 +453,20 @@ class AccountSasPermissions(object):
415
453
  class Services(object):
416
454
  """Specifies the services accessible with the account SAS.
417
455
 
418
- :param bool blob:
419
- Access for the `~azure.storage.blob.BlobServiceClient`
420
- :param bool queue:
421
- Access for the `~azure.storage.queue.QueueServiceClient`
422
- :param bool fileshare:
423
- Access for the `~azure.storage.fileshare.ShareServiceClient`
456
+ :keyword bool blob:
457
+ Access for the `~azure.storage.blob.BlobServiceClient`. Default is False.
458
+ :keyword bool queue:
459
+ Access for the `~azure.storage.queue.QueueServiceClient`. Default is False.
460
+ :keyword bool fileshare:
461
+ Access for the `~azure.storage.fileshare.ShareServiceClient`. Default is False.
424
462
  """
425
463
 
426
- def __init__(self, blob=False, queue=False, fileshare=False):
464
+ def __init__(
465
+ self, *,
466
+ blob: bool = False,
467
+ queue: bool = False,
468
+ fileshare: bool = False
469
+ ) -> None:
427
470
  self.blob = blob
428
471
  self.queue = queue
429
472
  self.fileshare = fileshare
@@ -451,7 +494,7 @@ class Services(object):
451
494
  res_queue = 'q' in string
452
495
  res_file = 'f' in string
453
496
 
454
- parsed = cls(res_blob, res_queue, res_file)
497
+ parsed = cls(blob=res_blob, queue=res_queue, fileshare=res_file)
455
498
  parsed._str = string # pylint: disable = protected-access
456
499
  return parsed
457
500
 
@@ -463,22 +506,23 @@ class UserDelegationKey(object):
463
506
 
464
507
  The fields are saved as simple strings since the user does not have to interact with this object;
465
508
  to generate an identify SAS, the user can simply pass it to the right API.
466
-
467
- :ivar str signed_oid:
468
- Object ID of this token.
469
- :ivar str signed_tid:
470
- Tenant ID of the tenant that issued this token.
471
- :ivar str signed_start:
472
- The datetime this token becomes valid.
473
- :ivar str signed_expiry:
474
- The datetime this token expires.
475
- :ivar str signed_service:
476
- What service this key is valid for.
477
- :ivar str signed_version:
478
- The version identifier of the REST service that created this token.
479
- :ivar str value:
480
- The user delegation key.
481
509
  """
510
+
511
+ signed_oid: Optional[str] = None
512
+ """Object ID of this token."""
513
+ signed_tid: Optional[str] = None
514
+ """Tenant ID of the tenant that issued this token."""
515
+ signed_start: Optional[str] = None
516
+ """The datetime this token becomes valid."""
517
+ signed_expiry: Optional[str] = None
518
+ """The datetime this token expires."""
519
+ signed_service: Optional[str] = None
520
+ """What service this key is valid for."""
521
+ signed_version: Optional[str] = None
522
+ """The version identifier of the REST service that created this token."""
523
+ value: Optional[str] = None
524
+ """The user delegation key."""
525
+
482
526
  def __init__(self):
483
527
  self.signed_oid = None
484
528
  self.signed_tid = None
@@ -487,3 +531,52 @@ class UserDelegationKey(object):
487
531
  self.signed_service = None
488
532
  self.signed_version = None
489
533
  self.value = None
534
+
535
+
536
+ class StorageConfiguration(Configuration):
537
+ """
538
+ Specifies the configurable values used in Azure Storage.
539
+
540
+ :param int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be
541
+ uploaded with only one http PUT request. If the blob size is larger than max_single_put_size,
542
+ the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB.
543
+ :param int copy_polling_interval: The interval in seconds for polling copy operations.
544
+ :param int max_block_size: The maximum chunk size for uploading a block blob in chunks.
545
+ Defaults to 4*1024*1024, or 4MB.
546
+ :param int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient
547
+ algorithm when uploading a block blob.
548
+ :param bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False.
549
+ :param int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB.
550
+ :param int min_large_chunk_upload_threshold: The max size for a single put operation.
551
+ :param int max_single_get_size: The maximum size for a blob to be downloaded in a single call,
552
+ the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB.
553
+ :param int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024,
554
+ or 4MB.
555
+ :param int max_range_size: The max range size for file upload.
556
+
557
+ """
558
+
559
+ max_single_put_size: int
560
+ copy_polling_interval: int
561
+ max_block_size: int
562
+ min_large_block_upload_threshold: int
563
+ use_byte_buffer: bool
564
+ max_page_size: int
565
+ min_large_chunk_upload_threshold: int
566
+ max_single_get_size: int
567
+ max_chunk_get_size: int
568
+ max_range_size: int
569
+ user_agent_policy: UserAgentPolicy
570
+
571
+ def __init__(self, **kwargs):
572
+ super(StorageConfiguration, self).__init__(**kwargs)
573
+ self.max_single_put_size = kwargs.pop('max_single_put_size', 64 * 1024 * 1024)
574
+ self.copy_polling_interval = 15
575
+ self.max_block_size = kwargs.pop('max_block_size', 4 * 1024 * 1024)
576
+ self.min_large_block_upload_threshold = kwargs.get('min_large_block_upload_threshold', 4 * 1024 * 1024 + 1)
577
+ self.use_byte_buffer = kwargs.pop('use_byte_buffer', False)
578
+ self.max_page_size = kwargs.pop('max_page_size', 4 * 1024 * 1024)
579
+ self.min_large_chunk_upload_threshold = kwargs.pop('min_large_chunk_upload_threshold', 100 * 1024 * 1024 + 1)
580
+ self.max_single_get_size = kwargs.pop('max_single_get_size', 32 * 1024 * 1024)
581
+ self.max_chunk_get_size = kwargs.pop('max_chunk_get_size', 4 * 1024 * 1024)
582
+ self.max_range_size = kwargs.pop('max_range_size', 4 * 1024 * 1024)
@@ -6,6 +6,7 @@
6
6
 
7
7
  import sys
8
8
  from datetime import datetime, timezone
9
+ from typing import Optional
9
10
 
10
11
  EPOCH_AS_FILETIME = 116444736000000000 # January 1, 1970 as MS filetime
11
12
  HUNDREDS_OF_NANOSECONDS = 10000000
@@ -20,10 +21,10 @@ else:
20
21
  _str = str
21
22
 
22
23
 
23
- def _to_utc_datetime(value):
24
+ def _to_utc_datetime(value: datetime) -> str:
24
25
  return value.strftime('%Y-%m-%dT%H:%M:%SZ')
25
26
 
26
- def _rfc_1123_to_datetime(rfc_1123: str) -> datetime:
27
+ def _rfc_1123_to_datetime(rfc_1123: str) -> Optional[datetime]:
27
28
  """Converts an RFC 1123 date string to a UTC datetime.
28
29
 
29
30
  :param str rfc_1123: The time and date in RFC 1123 format.
@@ -35,7 +36,7 @@ def _rfc_1123_to_datetime(rfc_1123: str) -> datetime:
35
36
 
36
37
  return datetime.strptime(rfc_1123, "%a, %d %b %Y %H:%M:%S %Z")
37
38
 
38
- def _filetime_to_datetime(filetime: str) -> datetime:
39
+ def _filetime_to_datetime(filetime: str) -> Optional[datetime]:
39
40
  """Converts an MS filetime string to a UTC datetime. "0" indicates None.
40
41
  If parsing MS Filetime fails, tries RFC 1123 as backup.
41
42
 
@@ -48,11 +49,11 @@ def _filetime_to_datetime(filetime: str) -> datetime:
48
49
 
49
50
  # Try to convert to MS Filetime
50
51
  try:
51
- filetime = int(filetime)
52
- if filetime == 0:
52
+ temp_filetime = int(filetime)
53
+ if temp_filetime == 0:
53
54
  return None
54
55
 
55
- return datetime.fromtimestamp((filetime - EPOCH_AS_FILETIME) / HUNDREDS_OF_NANOSECONDS, tz=timezone.utc)
56
+ return datetime.fromtimestamp((temp_filetime - EPOCH_AS_FILETIME) / HUNDREDS_OF_NANOSECONDS, tz=timezone.utc)
56
57
  except ValueError:
57
58
  pass
58
59
 
@@ -6,29 +6,22 @@
6
6
 
7
7
  import base64
8
8
  import hashlib
9
- import re
10
- import random
11
- from time import time
12
- from io import SEEK_SET, UnsupportedOperation
13
9
  import logging
10
+ import random
11
+ import re
14
12
  import uuid
15
- from typing import Any, TYPE_CHECKING
16
- from wsgiref.handlers import format_date_time
17
- try:
18
- from urllib.parse import (
19
- urlparse,
13
+ from io import SEEK_SET, UnsupportedOperation
14
+ from time import time
15
+ from typing import Any, Dict, Optional, TYPE_CHECKING
16
+ from urllib.parse import (
20
17
  parse_qsl,
21
- urlunparse,
22
18
  urlencode,
23
- )
24
- except ImportError:
25
- from urllib import urlencode # type: ignore
26
- from urlparse import ( # type: ignore
27
19
  urlparse,
28
- parse_qsl,
29
20
  urlunparse,
30
- )
21
+ )
22
+ from wsgiref.handlers import format_date_time
31
23
 
24
+ from azure.core.exceptions import AzureError, ServiceRequestError, ServiceResponseError
32
25
  from azure.core.pipeline.policies import (
33
26
  BearerTokenCredentialPolicy,
34
27
  HeadersPolicy,
@@ -37,7 +30,6 @@ from azure.core.pipeline.policies import (
37
30
  RequestHistory,
38
31
  SansIOHTTPPolicy,
39
32
  )
40
- from azure.core.exceptions import AzureError, ServiceRequestError, ServiceResponseError
41
33
 
42
34
  from .authentication import StorageHttpChallenge
43
35
  from .constants import DEFAULT_OAUTH_SCOPE
@@ -50,7 +42,10 @@ except NameError:
50
42
 
51
43
  if TYPE_CHECKING:
52
44
  from azure.core.credentials import TokenCredential
53
- from azure.core.pipeline import PipelineRequest, PipelineResponse
45
+ from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import
46
+ PipelineRequest,
47
+ PipelineResponse
48
+ )
54
49
 
55
50
 
56
51
  _LOGGER = logging.getLogger(__name__)
@@ -128,8 +123,7 @@ class QueueMessagePolicy(SansIOHTTPPolicy):
128
123
  class StorageHeadersPolicy(HeadersPolicy):
129
124
  request_id_header_name = 'x-ms-client-request-id'
130
125
 
131
- def on_request(self, request):
132
- # type: (PipelineRequest, Any) -> None
126
+ def on_request(self, request: "PipelineRequest") -> None:
133
127
  super(StorageHeadersPolicy, self).on_request(request)
134
128
  current_time = format_date_time(time())
135
129
  request.http_request.headers['x-ms-date'] = current_time
@@ -159,8 +153,7 @@ class StorageHosts(SansIOHTTPPolicy):
159
153
  self.hosts = hosts
160
154
  super(StorageHosts, self).__init__()
161
155
 
162
- def on_request(self, request):
163
- # type: (PipelineRequest, Any) -> None
156
+ def on_request(self, request: "PipelineRequest") -> None:
164
157
  request.context.options['hosts'] = self.hosts
165
158
  parsed_url = urlparse(request.http_request.url)
166
159
 
@@ -191,12 +184,12 @@ class StorageLoggingPolicy(NetworkTraceLoggingPolicy):
191
184
 
192
185
  This accepts both global configuration, and per-request level with "enable_http_logger"
193
186
  """
194
- def __init__(self, logging_enable=False, **kwargs):
187
+
188
+ def __init__(self, logging_enable: bool = False, **kwargs) -> None:
195
189
  self.logging_body = kwargs.pop("logging_body", False)
196
190
  super(StorageLoggingPolicy, self).__init__(logging_enable=logging_enable, **kwargs)
197
191
 
198
- def on_request(self, request):
199
- # type: (PipelineRequest, Any) -> None
192
+ def on_request(self, request: "PipelineRequest") -> None:
200
193
  http_request = request.http_request
201
194
  options = request.context.options
202
195
  self.logging_body = self.logging_body or options.pop("logging_body", False)
@@ -236,8 +229,7 @@ class StorageLoggingPolicy(NetworkTraceLoggingPolicy):
236
229
  except Exception as err: # pylint: disable=broad-except
237
230
  _LOGGER.debug("Failed to log request: %r", err)
238
231
 
239
- def on_response(self, request, response):
240
- # type: (PipelineRequest, PipelineResponse, Any) -> None
232
+ def on_response(self, request: "PipelineRequest", response: "PipelineResponse") -> None:
241
233
  if response.context.pop("logging_enable", self.enable_http_logger):
242
234
  if not _LOGGER.isEnabledFor(logging.DEBUG):
243
235
  return
@@ -280,8 +272,7 @@ class StorageRequestHook(SansIOHTTPPolicy):
280
272
  self._request_callback = kwargs.get('raw_request_hook')
281
273
  super(StorageRequestHook, self).__init__()
282
274
 
283
- def on_request(self, request):
284
- # type: (PipelineRequest, **Any) -> PipelineResponse
275
+ def on_request(self, request: "PipelineRequest") -> None:
285
276
  request_callback = request.context.options.pop('raw_request_hook', self._request_callback)
286
277
  if request_callback:
287
278
  request_callback(request)
@@ -293,8 +284,7 @@ class StorageResponseHook(HTTPPolicy):
293
284
  self._response_callback = kwargs.get('raw_response_hook')
294
285
  super(StorageResponseHook, self).__init__()
295
286
 
296
- def send(self, request):
297
- # type: (PipelineRequest) -> PipelineResponse
287
+ def send(self, request: "PipelineRequest") -> "PipelineResponse":
298
288
  # Values could be 0
299
289
  data_stream_total = request.context.get('data_stream_total')
300
290
  if data_stream_total is None:
@@ -327,9 +317,10 @@ class StorageResponseHook(HTTPPolicy):
327
317
  elif should_update_counts and upload_stream_current is not None:
328
318
  upload_stream_current += int(response.http_request.headers.get('Content-Length', 0))
329
319
  for pipeline_obj in [request, response]:
330
- pipeline_obj.context['data_stream_total'] = data_stream_total
331
- pipeline_obj.context['download_stream_current'] = download_stream_current
332
- pipeline_obj.context['upload_stream_current'] = upload_stream_current
320
+ if hasattr(pipeline_obj, 'context'):
321
+ pipeline_obj.context['data_stream_total'] = data_stream_total
322
+ pipeline_obj.context['download_stream_current'] = download_stream_current
323
+ pipeline_obj.context['upload_stream_current'] = upload_stream_current
333
324
  if response_callback:
334
325
  response_callback(response)
335
326
  request.context['response_callback'] = response_callback
@@ -344,7 +335,7 @@ class StorageContentValidation(SansIOHTTPPolicy):
344
335
  """
345
336
  header_name = 'Content-MD5'
346
337
 
347
- def __init__(self, **kwargs): # pylint: disable=unused-argument
338
+ def __init__(self, **kwargs: Any) -> None: # pylint: disable=unused-argument
348
339
  super(StorageContentValidation, self).__init__()
349
340
 
350
341
  @staticmethod
@@ -372,8 +363,7 @@ class StorageContentValidation(SansIOHTTPPolicy):
372
363
 
373
364
  return md5.digest()
374
365
 
375
- def on_request(self, request):
376
- # type: (PipelineRequest, Any) -> None
366
+ def on_request(self, request: "PipelineRequest") -> None:
377
367
  validate_content = request.context.options.pop('validate_content', False)
378
368
  if validate_content and request.http_request.method != 'GET':
379
369
  computed_md5 = encode_base64(StorageContentValidation.get_content_md5(request.http_request.data))
@@ -381,7 +371,7 @@ class StorageContentValidation(SansIOHTTPPolicy):
381
371
  request.context['validate_content_md5'] = computed_md5
382
372
  request.context['validate_content'] = validate_content
383
373
 
384
- def on_response(self, request, response):
374
+ def on_response(self, request: "PipelineRequest", response: "PipelineResponse") -> None:
385
375
  if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'):
386
376
  computed_md5 = request.context.get('validate_content_md5') or \
387
377
  encode_base64(StorageContentValidation.get_content_md5(response.http_response.body()))
@@ -398,7 +388,18 @@ class StorageRetryPolicy(HTTPPolicy):
398
388
  The base class for Exponential and Linear retries containing shared code.
399
389
  """
400
390
 
401
- def __init__(self, **kwargs):
391
+ total_retries: int
392
+ """The max number of retries."""
393
+ connect_retries: int
394
+ """The max number of connect retries."""
395
+ retry_read: int
396
+ """The max number of read retries."""
397
+ retry_status: int
398
+ """The max number of status retries."""
399
+ retry_to_secondary: bool
400
+ """Whether the secondary endpoint should be retried."""
401
+
402
+ def __init__(self, **kwargs: Any) -> None:
402
403
  self.total_retries = kwargs.pop('retry_total', 10)
403
404
  self.connect_retries = kwargs.pop('retry_connect', 3)
404
405
  self.read_retries = kwargs.pop('retry_read', 3)
@@ -406,11 +407,11 @@ class StorageRetryPolicy(HTTPPolicy):
406
407
  self.retry_to_secondary = kwargs.pop('retry_to_secondary', False)
407
408
  super(StorageRetryPolicy, self).__init__()
408
409
 
409
- def _set_next_host_location(self, settings, request):
410
+ def _set_next_host_location(self, settings: Dict[str, Any], request: "PipelineRequest") -> None:
410
411
  """
411
412
  A function which sets the next host location on the request, if applicable.
412
413
 
413
- :param Optional[Dict[str, Any]] settings: The configurable values pertaining to the next host location.
414
+ :param Dict[str, Any]] settings: The configurable values pertaining to the next host location.
414
415
  :param PipelineRequest request: A pipeline request object.
415
416
  """
416
417
  if settings['hosts'] and all(settings['hosts'].values()):
@@ -423,7 +424,7 @@ class StorageRetryPolicy(HTTPPolicy):
423
424
  updated = url._replace(netloc=settings['hosts'].get(settings['mode']))
424
425
  request.url = updated.geturl()
425
426
 
426
- def configure_retries(self, request):
427
+ def configure_retries(self, request: "PipelineRequest") -> Dict[str, Any]:
427
428
  body_position = None
428
429
  if hasattr(request.http_request.body, 'read'):
429
430
  try:
@@ -446,11 +447,11 @@ class StorageRetryPolicy(HTTPPolicy):
446
447
  'history': []
447
448
  }
448
449
 
449
- def get_backoff_time(self, settings): # pylint: disable=unused-argument
450
+ def get_backoff_time(self, settings: Dict[str, Any]) -> float: # pylint: disable=unused-argument
450
451
  """ Formula for computing the current backoff.
451
452
  Should be calculated by child class.
452
453
 
453
- :param Optional[Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
454
+ :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
454
455
  :returns: The backoff time.
455
456
  :rtype: float
456
457
  """
@@ -462,15 +463,20 @@ class StorageRetryPolicy(HTTPPolicy):
462
463
  return
463
464
  transport.sleep(backoff)
464
465
 
465
- def increment(self, settings, request, response=None, error=None):
466
+ def increment(
467
+ self, settings: Dict[str, Any],
468
+ request: "PipelineRequest",
469
+ response: Optional["PipelineResponse"] = None,
470
+ error: Optional[AzureError] = None
471
+ ) -> bool:
466
472
  """Increment the retry counters.
467
473
 
468
- :param Optional[Dict[str, Any]] settings: The configurable values pertaining to the increment operation.
469
- :param "PipelineRequest" request: A pipeline request object.
470
- :param "PipelineResponse": A pipeline response object.
474
+ :param Dict[str, Any]] settings: The configurable values pertaining to the increment operation.
475
+ :param PipelineRequest request: A pipeline request object.
476
+ :param Optional[PipelineResponse] response: A pipeline response object.
471
477
  :param error: An error encountered during the request, or
472
478
  None if the response was received successfully.
473
- :paramtype error: Union[ServiceRequestError, ServiceResponseError]
479
+ :type error: Optional[AzureError]
474
480
  :returns: Whether the retry attempts are exhausted.
475
481
  :rtype: bool
476
482
  """
@@ -556,9 +562,23 @@ class StorageRetryPolicy(HTTPPolicy):
556
562
  class ExponentialRetry(StorageRetryPolicy):
557
563
  """Exponential retry."""
558
564
 
559
- def __init__(self, initial_backoff=15, increment_base=3, retry_total=3,
560
- retry_to_secondary=False, random_jitter_range=3, **kwargs):
561
- '''
565
+ initial_backoff: int
566
+ """The initial backoff interval, in seconds, for the first retry."""
567
+ increment_base: int
568
+ """The base, in seconds, to increment the initial_backoff by after the
569
+ first retry."""
570
+ random_jitter_range: int
571
+ """A number in seconds which indicates a range to jitter/randomize for the back-off interval."""
572
+
573
+ def __init__(
574
+ self, initial_backoff: int = 15,
575
+ increment_base: int = 3,
576
+ retry_total: int = 3,
577
+ retry_to_secondary: bool = False,
578
+ random_jitter_range: int = 3,
579
+ **kwargs: Any
580
+ ) -> None:
581
+ """
562
582
  Constructs an Exponential retry object. The initial_backoff is used for
563
583
  the first retry. Subsequent retries are retried after initial_backoff +
564
584
  increment_power^retry_count seconds.
@@ -568,7 +588,7 @@ class ExponentialRetry(StorageRetryPolicy):
568
588
  :param int increment_base:
569
589
  The base, in seconds, to increment the initial_backoff by after the
570
590
  first retry.
571
- :param int max_attempts:
591
+ :param int retry_total:
572
592
  The maximum number of retry attempts.
573
593
  :param bool retry_to_secondary:
574
594
  Whether the request should be retried to secondary, if able. This should
@@ -577,22 +597,22 @@ class ExponentialRetry(StorageRetryPolicy):
577
597
  :param int random_jitter_range:
578
598
  A number in seconds which indicates a range to jitter/randomize for the back-off interval.
579
599
  For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3.
580
- '''
600
+ """
581
601
  self.initial_backoff = initial_backoff
582
602
  self.increment_base = increment_base
583
603
  self.random_jitter_range = random_jitter_range
584
604
  super(ExponentialRetry, self).__init__(
585
605
  retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
586
606
 
587
- def get_backoff_time(self, settings):
607
+ def get_backoff_time(self, settings: Dict[str, Any]) -> float:
588
608
  """
589
609
  Calculates how long to sleep before retrying.
590
610
 
591
- :param Optional[Dict[str, Any]] settings: The configurable values pertaining to get backoff time.
611
+ :param Dict[str, Any]] settings: The configurable values pertaining to get backoff time.
592
612
  :returns:
593
- An integer indicating how long to wait before retrying the request,
613
+ A float indicating how long to wait before retrying the request,
594
614
  or None to indicate no retry should be performed.
595
- :rtype: int or None
615
+ :rtype: float
596
616
  """
597
617
  random_generator = random.Random()
598
618
  backoff = self.initial_backoff + (0 if settings['count'] == 0 else pow(self.increment_base, settings['count']))
@@ -604,13 +624,24 @@ class ExponentialRetry(StorageRetryPolicy):
604
624
  class LinearRetry(StorageRetryPolicy):
605
625
  """Linear retry."""
606
626
 
607
- def __init__(self, backoff=15, retry_total=3, retry_to_secondary=False, random_jitter_range=3, **kwargs):
627
+ initial_backoff: int
628
+ """The backoff interval, in seconds, between retries."""
629
+ random_jitter_range: int
630
+ """A number in seconds which indicates a range to jitter/randomize for the back-off interval."""
631
+
632
+ def __init__(
633
+ self, backoff: int = 15,
634
+ retry_total: int = 3,
635
+ retry_to_secondary: bool = False,
636
+ random_jitter_range: int = 3,
637
+ **kwargs: Any
638
+ ) -> None:
608
639
  """
609
640
  Constructs a Linear retry object.
610
641
 
611
642
  :param int backoff:
612
643
  The backoff interval, in seconds, between retries.
613
- :param int max_attempts:
644
+ :param int retry_total:
614
645
  The maximum number of retry attempts.
615
646
  :param bool retry_to_secondary:
616
647
  Whether the request should be retried to secondary, if able. This should
@@ -625,15 +656,15 @@ class LinearRetry(StorageRetryPolicy):
625
656
  super(LinearRetry, self).__init__(
626
657
  retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
627
658
 
628
- def get_backoff_time(self, settings):
659
+ def get_backoff_time(self, settings: Dict[str, Any]) -> float:
629
660
  """
630
661
  Calculates how long to sleep before retrying.
631
662
 
632
- :param Optional[Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
663
+ :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
633
664
  :returns:
634
- An integer indicating how long to wait before retrying the request,
665
+ A float indicating how long to wait before retrying the request,
635
666
  or None to indicate no retry should be performed.
636
- :rtype: int or None
667
+ :rtype: float
637
668
  """
638
669
  random_generator = random.Random()
639
670
  # the backoff interval normally does not change, however there is the possibility
@@ -650,8 +681,7 @@ class StorageBearerTokenCredentialPolicy(BearerTokenCredentialPolicy):
650
681
  def __init__(self, credential: "TokenCredential", audience: str, **kwargs: Any) -> None:
651
682
  super(StorageBearerTokenCredentialPolicy, self).__init__(credential, audience, **kwargs)
652
683
 
653
- def on_challenge(self, request, response):
654
- # type: (PipelineRequest, PipelineResponse) -> bool
684
+ def on_challenge(self, request: "PipelineRequest", response: "PipelineResponse") -> bool:
655
685
  try:
656
686
  auth_header = response.http_response.headers.get("WWW-Authenticate")
657
687
  challenge = StorageHttpChallenge(auth_header)