azure-storage-blob 12.25.0b1__py3-none-any.whl → 12.26.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. azure/storage/blob/__init__.py +3 -2
  2. azure/storage/blob/_blob_client.py +94 -41
  3. azure/storage/blob/_blob_client_helpers.py +19 -4
  4. azure/storage/blob/_blob_service_client.py +16 -13
  5. azure/storage/blob/_container_client.py +25 -22
  6. azure/storage/blob/_deserialize.py +1 -1
  7. azure/storage/blob/_download.py +7 -7
  8. azure/storage/blob/_encryption.py +177 -184
  9. azure/storage/blob/_generated/_azure_blob_storage.py +1 -1
  10. azure/storage/blob/_generated/_configuration.py +2 -2
  11. azure/storage/blob/_generated/_serialization.py +3 -3
  12. azure/storage/blob/_generated/aio/_azure_blob_storage.py +1 -1
  13. azure/storage/blob/_generated/aio/_configuration.py +2 -2
  14. azure/storage/blob/_generated/aio/operations/_append_blob_operations.py +5 -4
  15. azure/storage/blob/_generated/aio/operations/_blob_operations.py +5 -25
  16. azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +9 -7
  17. azure/storage/blob/_generated/aio/operations/_container_operations.py +1 -19
  18. azure/storage/blob/_generated/aio/operations/_page_blob_operations.py +5 -10
  19. azure/storage/blob/_generated/aio/operations/_service_operations.py +1 -8
  20. azure/storage/blob/_generated/models/__init__.py +2 -0
  21. azure/storage/blob/_generated/models/_azure_blob_storage_enums.py +6 -0
  22. azure/storage/blob/_generated/operations/_append_blob_operations.py +12 -9
  23. azure/storage/blob/_generated/operations/_blob_operations.py +32 -49
  24. azure/storage/blob/_generated/operations/_block_blob_operations.py +21 -13
  25. azure/storage/blob/_generated/operations/_container_operations.py +19 -37
  26. azure/storage/blob/_generated/operations/_page_blob_operations.py +17 -19
  27. azure/storage/blob/_generated/operations/_service_operations.py +9 -17
  28. azure/storage/blob/_lease.py +1 -0
  29. azure/storage/blob/_quick_query_helper.py +20 -24
  30. azure/storage/blob/_serialize.py +1 -0
  31. azure/storage/blob/_shared/__init__.py +7 -7
  32. azure/storage/blob/_shared/authentication.py +49 -32
  33. azure/storage/blob/_shared/avro/avro_io.py +45 -43
  34. azure/storage/blob/_shared/avro/avro_io_async.py +42 -41
  35. azure/storage/blob/_shared/avro/datafile.py +24 -21
  36. azure/storage/blob/_shared/avro/datafile_async.py +15 -15
  37. azure/storage/blob/_shared/avro/schema.py +196 -217
  38. azure/storage/blob/_shared/base_client.py +87 -61
  39. azure/storage/blob/_shared/base_client_async.py +58 -51
  40. azure/storage/blob/_shared/constants.py +1 -1
  41. azure/storage/blob/_shared/models.py +93 -92
  42. azure/storage/blob/_shared/parser.py +3 -3
  43. azure/storage/blob/_shared/policies.py +176 -145
  44. azure/storage/blob/_shared/policies_async.py +59 -70
  45. azure/storage/blob/_shared/request_handlers.py +51 -47
  46. azure/storage/blob/_shared/response_handlers.py +49 -45
  47. azure/storage/blob/_shared/shared_access_signature.py +67 -71
  48. azure/storage/blob/_shared/uploads.py +56 -49
  49. azure/storage/blob/_shared/uploads_async.py +72 -61
  50. azure/storage/blob/_shared_access_signature.py +3 -1
  51. azure/storage/blob/_version.py +1 -1
  52. azure/storage/blob/aio/__init__.py +3 -2
  53. azure/storage/blob/aio/_blob_client_async.py +241 -44
  54. azure/storage/blob/aio/_blob_service_client_async.py +13 -11
  55. azure/storage/blob/aio/_container_client_async.py +28 -25
  56. azure/storage/blob/aio/_download_async.py +16 -12
  57. azure/storage/blob/aio/_lease_async.py +1 -0
  58. azure/storage/blob/aio/_quick_query_helper_async.py +194 -0
  59. {azure_storage_blob-12.25.0b1.dist-info → azure_storage_blob-12.26.0.dist-info}/METADATA +7 -7
  60. azure_storage_blob-12.26.0.dist-info/RECORD +85 -0
  61. {azure_storage_blob-12.25.0b1.dist-info → azure_storage_blob-12.26.0.dist-info}/WHEEL +1 -1
  62. azure_storage_blob-12.25.0b1.dist-info/RECORD +0 -84
  63. {azure_storage_blob-12.25.0b1.dist-info → azure_storage_blob-12.26.0.dist-info}/LICENSE +0 -0
  64. {azure_storage_blob-12.25.0b1.dist-info → azure_storage_blob-12.26.0.dist-info}/top_level.txt +0 -0
@@ -28,7 +28,7 @@ from azure.core.pipeline.policies import (
28
28
  HTTPPolicy,
29
29
  NetworkTraceLoggingPolicy,
30
30
  RequestHistory,
31
- SansIOHTTPPolicy
31
+ SansIOHTTPPolicy,
32
32
  )
33
33
 
34
34
  from .authentication import AzureSigningError, StorageHttpChallenge
@@ -39,7 +39,7 @@ if TYPE_CHECKING:
39
39
  from azure.core.credentials import TokenCredential
40
40
  from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import
41
41
  PipelineRequest,
42
- PipelineResponse
42
+ PipelineResponse,
43
43
  )
44
44
 
45
45
 
@@ -48,14 +48,14 @@ _LOGGER = logging.getLogger(__name__)
48
48
 
49
49
  def encode_base64(data):
50
50
  if isinstance(data, str):
51
- data = data.encode('utf-8')
51
+ data = data.encode("utf-8")
52
52
  encoded = base64.b64encode(data)
53
- return encoded.decode('utf-8')
53
+ return encoded.decode("utf-8")
54
54
 
55
55
 
56
56
  # Are we out of retries?
57
57
  def is_exhausted(settings):
58
- retry_counts = (settings['total'], settings['connect'], settings['read'], settings['status'])
58
+ retry_counts = (settings["total"], settings["connect"], settings["read"], settings["status"])
59
59
  retry_counts = list(filter(None, retry_counts))
60
60
  if not retry_counts:
61
61
  return False
@@ -63,8 +63,8 @@ def is_exhausted(settings):
63
63
 
64
64
 
65
65
  def retry_hook(settings, **kwargs):
66
- if settings['hook']:
67
- settings['hook'](retry_count=settings['count'] - 1, location_mode=settings['mode'], **kwargs)
66
+ if settings["hook"]:
67
+ settings["hook"](retry_count=settings["count"] - 1, location_mode=settings["mode"], **kwargs)
68
68
 
69
69
 
70
70
  # Is this method/status code retryable? (Based on allowlists and control
@@ -95,40 +95,39 @@ def is_retry(response, mode):
95
95
 
96
96
  def is_checksum_retry(response):
97
97
  # retry if invalid content md5
98
- if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'):
99
- computed_md5 = response.http_request.headers.get('content-md5', None) or \
100
- encode_base64(StorageContentValidation.get_content_md5(response.http_response.body()))
101
- if response.http_response.headers['content-md5'] != computed_md5:
98
+ if response.context.get("validate_content", False) and response.http_response.headers.get("content-md5"):
99
+ computed_md5 = response.http_request.headers.get("content-md5", None) or encode_base64(
100
+ StorageContentValidation.get_content_md5(response.http_response.body())
101
+ )
102
+ if response.http_response.headers["content-md5"] != computed_md5:
102
103
  return True
103
104
  return False
104
105
 
105
106
 
106
107
  def urljoin(base_url, stub_url):
107
108
  parsed = urlparse(base_url)
108
- parsed = parsed._replace(path=parsed.path + '/' + stub_url)
109
+ parsed = parsed._replace(path=parsed.path + "/" + stub_url)
109
110
  return parsed.geturl()
110
111
 
111
112
 
112
113
  class QueueMessagePolicy(SansIOHTTPPolicy):
113
114
 
114
115
  def on_request(self, request):
115
- message_id = request.context.options.pop('queue_message_id', None)
116
+ message_id = request.context.options.pop("queue_message_id", None)
116
117
  if message_id:
117
- request.http_request.url = urljoin(
118
- request.http_request.url,
119
- message_id)
118
+ request.http_request.url = urljoin(request.http_request.url, message_id)
120
119
 
121
120
 
122
121
  class StorageHeadersPolicy(HeadersPolicy):
123
- request_id_header_name = 'x-ms-client-request-id'
122
+ request_id_header_name = "x-ms-client-request-id"
124
123
 
125
124
  def on_request(self, request: "PipelineRequest") -> None:
126
125
  super(StorageHeadersPolicy, self).on_request(request)
127
126
  current_time = format_date_time(time())
128
- request.http_request.headers['x-ms-date'] = current_time
127
+ request.http_request.headers["x-ms-date"] = current_time
129
128
 
130
- custom_id = request.context.options.pop('client_request_id', None)
131
- request.http_request.headers['x-ms-client-request-id'] = custom_id or str(uuid.uuid1())
129
+ custom_id = request.context.options.pop("client_request_id", None)
130
+ request.http_request.headers["x-ms-client-request-id"] = custom_id or str(uuid.uuid1())
132
131
 
133
132
  # def on_response(self, request, response):
134
133
  # # raise exception if the echoed client request id from the service is not identical to the one we sent
@@ -153,7 +152,7 @@ class StorageHosts(SansIOHTTPPolicy):
153
152
  super(StorageHosts, self).__init__()
154
153
 
155
154
  def on_request(self, request: "PipelineRequest") -> None:
156
- request.context.options['hosts'] = self.hosts
155
+ request.context.options["hosts"] = self.hosts
157
156
  parsed_url = urlparse(request.http_request.url)
158
157
 
159
158
  # Detect what location mode we're currently requesting with
@@ -163,10 +162,10 @@ class StorageHosts(SansIOHTTPPolicy):
163
162
  location_mode = key
164
163
 
165
164
  # See if a specific location mode has been specified, and if so, redirect
166
- use_location = request.context.options.pop('use_location', None)
165
+ use_location = request.context.options.pop("use_location", None)
167
166
  if use_location:
168
167
  # Lock retries to the specific location
169
- request.context.options['retry_to_secondary'] = False
168
+ request.context.options["retry_to_secondary"] = False
170
169
  if use_location not in self.hosts:
171
170
  raise ValueError(f"Attempting to use undefined host location {use_location}")
172
171
  if use_location != location_mode:
@@ -175,7 +174,7 @@ class StorageHosts(SansIOHTTPPolicy):
175
174
  request.http_request.url = updated.geturl()
176
175
  location_mode = use_location
177
176
 
178
- request.context.options['location_mode'] = location_mode
177
+ request.context.options["location_mode"] = location_mode
179
178
 
180
179
 
181
180
  class StorageLoggingPolicy(NetworkTraceLoggingPolicy):
@@ -200,19 +199,19 @@ class StorageLoggingPolicy(NetworkTraceLoggingPolicy):
200
199
  try:
201
200
  log_url = http_request.url
202
201
  query_params = http_request.query
203
- if 'sig' in query_params:
204
- log_url = log_url.replace(query_params['sig'], "sig=*****")
202
+ if "sig" in query_params:
203
+ log_url = log_url.replace(query_params["sig"], "sig=*****")
205
204
  _LOGGER.debug("Request URL: %r", log_url)
206
205
  _LOGGER.debug("Request method: %r", http_request.method)
207
206
  _LOGGER.debug("Request headers:")
208
207
  for header, value in http_request.headers.items():
209
- if header.lower() == 'authorization':
210
- value = '*****'
211
- elif header.lower() == 'x-ms-copy-source' and 'sig' in value:
208
+ if header.lower() == "authorization":
209
+ value = "*****"
210
+ elif header.lower() == "x-ms-copy-source" and "sig" in value:
212
211
  # take the url apart and scrub away the signed signature
213
212
  scheme, netloc, path, params, query, fragment = urlparse(value)
214
213
  parsed_qs = dict(parse_qsl(query))
215
- parsed_qs['sig'] = '*****'
214
+ parsed_qs["sig"] = "*****"
216
215
 
217
216
  # the SAS needs to be put back together
218
217
  value = urlunparse((scheme, netloc, path, params, urlencode(parsed_qs), fragment))
@@ -242,11 +241,11 @@ class StorageLoggingPolicy(NetworkTraceLoggingPolicy):
242
241
  # We don't want to log binary data if the response is a file.
243
242
  _LOGGER.debug("Response content:")
244
243
  pattern = re.compile(r'attachment; ?filename=["\w.]+', re.IGNORECASE)
245
- header = response.http_response.headers.get('content-disposition')
244
+ header = response.http_response.headers.get("content-disposition")
246
245
  resp_content_type = response.http_response.headers.get("content-type", "")
247
246
 
248
247
  if header and pattern.match(header):
249
- filename = header.partition('=')[2]
248
+ filename = header.partition("=")[2]
250
249
  _LOGGER.debug("File attachments: %s", filename)
251
250
  elif resp_content_type.endswith("octet-stream"):
252
251
  _LOGGER.debug("Body contains binary data.")
@@ -268,11 +267,11 @@ class StorageLoggingPolicy(NetworkTraceLoggingPolicy):
268
267
  class StorageRequestHook(SansIOHTTPPolicy):
269
268
 
270
269
  def __init__(self, **kwargs):
271
- self._request_callback = kwargs.get('raw_request_hook')
270
+ self._request_callback = kwargs.get("raw_request_hook")
272
271
  super(StorageRequestHook, self).__init__()
273
272
 
274
273
  def on_request(self, request: "PipelineRequest") -> None:
275
- request_callback = request.context.options.pop('raw_request_hook', self._request_callback)
274
+ request_callback = request.context.options.pop("raw_request_hook", self._request_callback)
276
275
  if request_callback:
277
276
  request_callback(request)
278
277
 
@@ -280,49 +279,50 @@ class StorageRequestHook(SansIOHTTPPolicy):
280
279
  class StorageResponseHook(HTTPPolicy):
281
280
 
282
281
  def __init__(self, **kwargs):
283
- self._response_callback = kwargs.get('raw_response_hook')
282
+ self._response_callback = kwargs.get("raw_response_hook")
284
283
  super(StorageResponseHook, self).__init__()
285
284
 
286
285
  def send(self, request: "PipelineRequest") -> "PipelineResponse":
287
286
  # Values could be 0
288
- data_stream_total = request.context.get('data_stream_total')
287
+ data_stream_total = request.context.get("data_stream_total")
289
288
  if data_stream_total is None:
290
- data_stream_total = request.context.options.pop('data_stream_total', None)
291
- download_stream_current = request.context.get('download_stream_current')
289
+ data_stream_total = request.context.options.pop("data_stream_total", None)
290
+ download_stream_current = request.context.get("download_stream_current")
292
291
  if download_stream_current is None:
293
- download_stream_current = request.context.options.pop('download_stream_current', None)
294
- upload_stream_current = request.context.get('upload_stream_current')
292
+ download_stream_current = request.context.options.pop("download_stream_current", None)
293
+ upload_stream_current = request.context.get("upload_stream_current")
295
294
  if upload_stream_current is None:
296
- upload_stream_current = request.context.options.pop('upload_stream_current', None)
295
+ upload_stream_current = request.context.options.pop("upload_stream_current", None)
297
296
 
298
- response_callback = request.context.get('response_callback') or \
299
- request.context.options.pop('raw_response_hook', self._response_callback)
297
+ response_callback = request.context.get("response_callback") or request.context.options.pop(
298
+ "raw_response_hook", self._response_callback
299
+ )
300
300
 
301
301
  response = self.next.send(request)
302
302
 
303
- will_retry = is_retry(response, request.context.options.get('mode')) or is_checksum_retry(response)
303
+ will_retry = is_retry(response, request.context.options.get("mode")) or is_checksum_retry(response)
304
304
  # Auth error could come from Bearer challenge, in which case this request will be made again
305
305
  is_auth_error = response.http_response.status_code == 401
306
306
  should_update_counts = not (will_retry or is_auth_error)
307
307
 
308
308
  if should_update_counts and download_stream_current is not None:
309
- download_stream_current += int(response.http_response.headers.get('Content-Length', 0))
309
+ download_stream_current += int(response.http_response.headers.get("Content-Length", 0))
310
310
  if data_stream_total is None:
311
- content_range = response.http_response.headers.get('Content-Range')
311
+ content_range = response.http_response.headers.get("Content-Range")
312
312
  if content_range:
313
- data_stream_total = int(content_range.split(' ', 1)[1].split('/', 1)[1])
313
+ data_stream_total = int(content_range.split(" ", 1)[1].split("/", 1)[1])
314
314
  else:
315
315
  data_stream_total = download_stream_current
316
316
  elif should_update_counts and upload_stream_current is not None:
317
- upload_stream_current += int(response.http_request.headers.get('Content-Length', 0))
317
+ upload_stream_current += int(response.http_request.headers.get("Content-Length", 0))
318
318
  for pipeline_obj in [request, response]:
319
- if hasattr(pipeline_obj, 'context'):
320
- pipeline_obj.context['data_stream_total'] = data_stream_total
321
- pipeline_obj.context['download_stream_current'] = download_stream_current
322
- pipeline_obj.context['upload_stream_current'] = upload_stream_current
319
+ if hasattr(pipeline_obj, "context"):
320
+ pipeline_obj.context["data_stream_total"] = data_stream_total
321
+ pipeline_obj.context["download_stream_current"] = download_stream_current
322
+ pipeline_obj.context["upload_stream_current"] = upload_stream_current
323
323
  if response_callback:
324
324
  response_callback(response)
325
- request.context['response_callback'] = response_callback
325
+ request.context["response_callback"] = response_callback
326
326
  return response
327
327
 
328
328
 
@@ -332,7 +332,8 @@ class StorageContentValidation(SansIOHTTPPolicy):
332
332
 
333
333
  This will overwrite any headers already defined in the request.
334
334
  """
335
- header_name = 'Content-MD5'
335
+
336
+ header_name = "Content-MD5"
336
337
 
337
338
  def __init__(self, **kwargs: Any) -> None: # pylint: disable=unused-argument
338
339
  super(StorageContentValidation, self).__init__()
@@ -342,10 +343,10 @@ class StorageContentValidation(SansIOHTTPPolicy):
342
343
  # Since HTTP does not differentiate between no content and empty content,
343
344
  # we have to perform a None check.
344
345
  data = data or b""
345
- md5 = hashlib.md5() # nosec
346
+ md5 = hashlib.md5() # nosec
346
347
  if isinstance(data, bytes):
347
348
  md5.update(data)
348
- elif hasattr(data, 'read'):
349
+ elif hasattr(data, "read"):
349
350
  pos = 0
350
351
  try:
351
352
  pos = data.tell()
@@ -363,22 +364,25 @@ class StorageContentValidation(SansIOHTTPPolicy):
363
364
  return md5.digest()
364
365
 
365
366
  def on_request(self, request: "PipelineRequest") -> None:
366
- validate_content = request.context.options.pop('validate_content', False)
367
- if validate_content and request.http_request.method != 'GET':
367
+ validate_content = request.context.options.pop("validate_content", False)
368
+ if validate_content and request.http_request.method != "GET":
368
369
  computed_md5 = encode_base64(StorageContentValidation.get_content_md5(request.http_request.data))
369
370
  request.http_request.headers[self.header_name] = computed_md5
370
- request.context['validate_content_md5'] = computed_md5
371
- request.context['validate_content'] = validate_content
371
+ request.context["validate_content_md5"] = computed_md5
372
+ request.context["validate_content"] = validate_content
372
373
 
373
374
  def on_response(self, request: "PipelineRequest", response: "PipelineResponse") -> None:
374
- if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'):
375
- computed_md5 = request.context.get('validate_content_md5') or \
376
- encode_base64(StorageContentValidation.get_content_md5(response.http_response.body()))
377
- if response.http_response.headers['content-md5'] != computed_md5:
378
- raise AzureError((
379
- f"MD5 mismatch. Expected value is '{response.http_response.headers['content-md5']}', "
380
- f"computed value is '{computed_md5}'."),
381
- response=response.http_response
375
+ if response.context.get("validate_content", False) and response.http_response.headers.get("content-md5"):
376
+ computed_md5 = request.context.get("validate_content_md5") or encode_base64(
377
+ StorageContentValidation.get_content_md5(response.http_response.body())
378
+ )
379
+ if response.http_response.headers["content-md5"] != computed_md5:
380
+ raise AzureError(
381
+ (
382
+ f"MD5 mismatch. Expected value is '{response.http_response.headers['content-md5']}', "
383
+ f"computed value is '{computed_md5}'."
384
+ ),
385
+ response=response.http_response,
382
386
  )
383
387
 
384
388
 
@@ -399,33 +403,41 @@ class StorageRetryPolicy(HTTPPolicy):
399
403
  """Whether the secondary endpoint should be retried."""
400
404
 
401
405
  def __init__(self, **kwargs: Any) -> None:
402
- self.total_retries = kwargs.pop('retry_total', 10)
403
- self.connect_retries = kwargs.pop('retry_connect', 3)
404
- self.read_retries = kwargs.pop('retry_read', 3)
405
- self.status_retries = kwargs.pop('retry_status', 3)
406
- self.retry_to_secondary = kwargs.pop('retry_to_secondary', False)
406
+ self.total_retries = kwargs.pop("retry_total", 10)
407
+ self.connect_retries = kwargs.pop("retry_connect", 3)
408
+ self.read_retries = kwargs.pop("retry_read", 3)
409
+ self.status_retries = kwargs.pop("retry_status", 3)
410
+ self.retry_to_secondary = kwargs.pop("retry_to_secondary", False)
407
411
  super(StorageRetryPolicy, self).__init__()
408
412
 
409
413
  def _set_next_host_location(self, settings: Dict[str, Any], request: "PipelineRequest") -> None:
410
414
  """
411
415
  A function which sets the next host location on the request, if applicable.
412
416
 
413
- :param Dict[str, Any]] settings: The configurable values pertaining to the next host location.
417
+ :param Dict[str, Any] settings: The configurable values pertaining to the next host location.
414
418
  :param PipelineRequest request: A pipeline request object.
415
419
  """
416
- if settings['hosts'] and all(settings['hosts'].values()):
420
+ if settings["hosts"] and all(settings["hosts"].values()):
417
421
  url = urlparse(request.url)
418
422
  # If there's more than one possible location, retry to the alternative
419
- if settings['mode'] == LocationMode.PRIMARY:
420
- settings['mode'] = LocationMode.SECONDARY
423
+ if settings["mode"] == LocationMode.PRIMARY:
424
+ settings["mode"] = LocationMode.SECONDARY
421
425
  else:
422
- settings['mode'] = LocationMode.PRIMARY
423
- updated = url._replace(netloc=settings['hosts'].get(settings['mode']))
426
+ settings["mode"] = LocationMode.PRIMARY
427
+ updated = url._replace(netloc=settings["hosts"].get(settings["mode"]))
424
428
  request.url = updated.geturl()
425
429
 
426
430
  def configure_retries(self, request: "PipelineRequest") -> Dict[str, Any]:
431
+ """
432
+ Configure the retry settings for the request.
433
+
434
+ :param request: A pipeline request object.
435
+ :type request: ~azure.core.pipeline.PipelineRequest
436
+ :return: A dictionary containing the retry settings.
437
+ :rtype: Dict[str, Any]
438
+ """
427
439
  body_position = None
428
- if hasattr(request.http_request.body, 'read'):
440
+ if hasattr(request.http_request.body, "read"):
429
441
  try:
430
442
  body_position = request.http_request.body.tell()
431
443
  except (AttributeError, UnsupportedOperation):
@@ -433,129 +445,140 @@ class StorageRetryPolicy(HTTPPolicy):
433
445
  pass
434
446
  options = request.context.options
435
447
  return {
436
- 'total': options.pop("retry_total", self.total_retries),
437
- 'connect': options.pop("retry_connect", self.connect_retries),
438
- 'read': options.pop("retry_read", self.read_retries),
439
- 'status': options.pop("retry_status", self.status_retries),
440
- 'retry_secondary': options.pop("retry_to_secondary", self.retry_to_secondary),
441
- 'mode': options.pop("location_mode", LocationMode.PRIMARY),
442
- 'hosts': options.pop("hosts", None),
443
- 'hook': options.pop("retry_hook", None),
444
- 'body_position': body_position,
445
- 'count': 0,
446
- 'history': []
448
+ "total": options.pop("retry_total", self.total_retries),
449
+ "connect": options.pop("retry_connect", self.connect_retries),
450
+ "read": options.pop("retry_read", self.read_retries),
451
+ "status": options.pop("retry_status", self.status_retries),
452
+ "retry_secondary": options.pop("retry_to_secondary", self.retry_to_secondary),
453
+ "mode": options.pop("location_mode", LocationMode.PRIMARY),
454
+ "hosts": options.pop("hosts", None),
455
+ "hook": options.pop("retry_hook", None),
456
+ "body_position": body_position,
457
+ "count": 0,
458
+ "history": [],
447
459
  }
448
460
 
449
461
  def get_backoff_time(self, settings: Dict[str, Any]) -> float: # pylint: disable=unused-argument
450
- """ Formula for computing the current backoff.
462
+ """Formula for computing the current backoff.
451
463
  Should be calculated by child class.
452
464
 
453
465
  :param Dict[str, Any] settings: The configurable values pertaining to the backoff time.
454
- :returns: The backoff time.
466
+ :return: The backoff time.
455
467
  :rtype: float
456
468
  """
457
469
  return 0
458
470
 
459
471
  def sleep(self, settings, transport):
472
+ """Sleep for the backoff time.
473
+
474
+ :param Dict[str, Any] settings: The configurable values pertaining to the sleep operation.
475
+ :param transport: The transport to use for sleeping.
476
+ :type transport:
477
+ ~azure.core.pipeline.transport.AsyncioBaseTransport or
478
+ ~azure.core.pipeline.transport.BaseTransport
479
+ """
460
480
  backoff = self.get_backoff_time(settings)
461
481
  if not backoff or backoff < 0:
462
482
  return
463
483
  transport.sleep(backoff)
464
484
 
465
485
  def increment(
466
- self, settings: Dict[str, Any],
486
+ self,
487
+ settings: Dict[str, Any],
467
488
  request: "PipelineRequest",
468
489
  response: Optional["PipelineResponse"] = None,
469
- error: Optional[AzureError] = None
490
+ error: Optional[AzureError] = None,
470
491
  ) -> bool:
471
492
  """Increment the retry counters.
472
493
 
473
494
  :param Dict[str, Any] settings: The configurable values pertaining to the increment operation.
474
- :param PipelineRequest request: A pipeline request object.
475
- :param Optional[PipelineResponse] response: A pipeline response object.
476
- :param Optional[AzureError] error: An error encountered during the request, or
495
+ :param request: A pipeline request object.
496
+ :type request: ~azure.core.pipeline.PipelineRequest
497
+ :param response: A pipeline response object.
498
+ :type response: ~azure.core.pipeline.PipelineResponse or None
499
+ :param error: An error encountered during the request, or
477
500
  None if the response was received successfully.
478
- :returns: Whether the retry attempts are exhausted.
501
+ :type error: ~azure.core.exceptions.AzureError or None
502
+ :return: Whether the retry attempts are exhausted.
479
503
  :rtype: bool
480
504
  """
481
- settings['total'] -= 1
505
+ settings["total"] -= 1
482
506
 
483
507
  if error and isinstance(error, ServiceRequestError):
484
508
  # Errors when we're fairly sure that the server did not receive the
485
509
  # request, so it should be safe to retry.
486
- settings['connect'] -= 1
487
- settings['history'].append(RequestHistory(request, error=error))
510
+ settings["connect"] -= 1
511
+ settings["history"].append(RequestHistory(request, error=error))
488
512
 
489
513
  elif error and isinstance(error, ServiceResponseError):
490
514
  # Errors that occur after the request has been started, so we should
491
515
  # assume that the server began processing it.
492
- settings['read'] -= 1
493
- settings['history'].append(RequestHistory(request, error=error))
516
+ settings["read"] -= 1
517
+ settings["history"].append(RequestHistory(request, error=error))
494
518
 
495
519
  else:
496
520
  # Incrementing because of a server error like a 500 in
497
521
  # status_forcelist and a the given method is in the allowlist
498
522
  if response:
499
- settings['status'] -= 1
500
- settings['history'].append(RequestHistory(request, http_response=response))
523
+ settings["status"] -= 1
524
+ settings["history"].append(RequestHistory(request, http_response=response))
501
525
 
502
526
  if not is_exhausted(settings):
503
- if request.method not in ['PUT'] and settings['retry_secondary']:
527
+ if request.method not in ["PUT"] and settings["retry_secondary"]:
504
528
  self._set_next_host_location(settings, request)
505
529
 
506
530
  # rewind the request body if it is a stream
507
- if request.body and hasattr(request.body, 'read'):
531
+ if request.body and hasattr(request.body, "read"):
508
532
  # no position was saved, then retry would not work
509
- if settings['body_position'] is None:
533
+ if settings["body_position"] is None:
510
534
  return False
511
535
  try:
512
536
  # attempt to rewind the body to the initial position
513
- request.body.seek(settings['body_position'], SEEK_SET)
537
+ request.body.seek(settings["body_position"], SEEK_SET)
514
538
  except (UnsupportedOperation, ValueError):
515
539
  # if body is not seekable, then retry would not work
516
540
  return False
517
- settings['count'] += 1
541
+ settings["count"] += 1
518
542
  return True
519
543
  return False
520
544
 
521
545
  def send(self, request):
546
+ """Send the request with retry logic.
547
+
548
+ :param request: A pipeline request object.
549
+ :type request: ~azure.core.pipeline.PipelineRequest
550
+ :return: A pipeline response object.
551
+ :rtype: ~azure.core.pipeline.PipelineResponse
552
+ """
522
553
  retries_remaining = True
523
554
  response = None
524
555
  retry_settings = self.configure_retries(request)
525
556
  while retries_remaining:
526
557
  try:
527
558
  response = self.next.send(request)
528
- if is_retry(response, retry_settings['mode']) or is_checksum_retry(response):
559
+ if is_retry(response, retry_settings["mode"]) or is_checksum_retry(response):
529
560
  retries_remaining = self.increment(
530
- retry_settings,
531
- request=request.http_request,
532
- response=response.http_response)
561
+ retry_settings, request=request.http_request, response=response.http_response
562
+ )
533
563
  if retries_remaining:
534
564
  retry_hook(
535
- retry_settings,
536
- request=request.http_request,
537
- response=response.http_response,
538
- error=None)
565
+ retry_settings, request=request.http_request, response=response.http_response, error=None
566
+ )
539
567
  self.sleep(retry_settings, request.context.transport)
540
568
  continue
541
569
  break
542
570
  except AzureError as err:
543
571
  if isinstance(err, AzureSigningError):
544
572
  raise
545
- retries_remaining = self.increment(
546
- retry_settings, request=request.http_request, error=err)
573
+ retries_remaining = self.increment(retry_settings, request=request.http_request, error=err)
547
574
  if retries_remaining:
548
- retry_hook(
549
- retry_settings,
550
- request=request.http_request,
551
- response=None,
552
- error=err)
575
+ retry_hook(retry_settings, request=request.http_request, response=None, error=err)
553
576
  self.sleep(retry_settings, request.context.transport)
554
577
  continue
555
578
  raise err
556
- if retry_settings['history']:
557
- response.context['history'] = retry_settings['history']
558
- response.http_response.location_mode = retry_settings['mode']
579
+ if retry_settings["history"]:
580
+ response.context["history"] = retry_settings["history"]
581
+ response.http_response.location_mode = retry_settings["mode"]
559
582
  return response
560
583
 
561
584
 
@@ -571,12 +594,13 @@ class ExponentialRetry(StorageRetryPolicy):
571
594
  """A number in seconds which indicates a range to jitter/randomize for the back-off interval."""
572
595
 
573
596
  def __init__(
574
- self, initial_backoff: int = 15,
597
+ self,
598
+ initial_backoff: int = 15,
575
599
  increment_base: int = 3,
576
600
  retry_total: int = 3,
577
601
  retry_to_secondary: bool = False,
578
602
  random_jitter_range: int = 3,
579
- **kwargs: Any
603
+ **kwargs: Any,
580
604
  ) -> None:
581
605
  """
582
606
  Constructs an Exponential retry object. The initial_backoff is used for
@@ -601,21 +625,20 @@ class ExponentialRetry(StorageRetryPolicy):
601
625
  self.initial_backoff = initial_backoff
602
626
  self.increment_base = increment_base
603
627
  self.random_jitter_range = random_jitter_range
604
- super(ExponentialRetry, self).__init__(
605
- retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
628
+ super(ExponentialRetry, self).__init__(retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
606
629
 
607
630
  def get_backoff_time(self, settings: Dict[str, Any]) -> float:
608
631
  """
609
632
  Calculates how long to sleep before retrying.
610
633
 
611
- :param Dict[str, Any]] settings: The configurable values pertaining to get backoff time.
612
- :returns:
634
+ :param Dict[str, Any] settings: The configurable values pertaining to get backoff time.
635
+ :return:
613
636
  A float indicating how long to wait before retrying the request,
614
637
  or None to indicate no retry should be performed.
615
638
  :rtype: float
616
639
  """
617
640
  random_generator = random.Random()
618
- backoff = self.initial_backoff + (0 if settings['count'] == 0 else pow(self.increment_base, settings['count']))
641
+ backoff = self.initial_backoff + (0 if settings["count"] == 0 else pow(self.increment_base, settings["count"]))
619
642
  random_range_start = backoff - self.random_jitter_range if backoff > self.random_jitter_range else 0
620
643
  random_range_end = backoff + self.random_jitter_range
621
644
  return random_generator.uniform(random_range_start, random_range_end)
@@ -630,11 +653,12 @@ class LinearRetry(StorageRetryPolicy):
630
653
  """A number in seconds which indicates a range to jitter/randomize for the back-off interval."""
631
654
 
632
655
  def __init__(
633
- self, backoff: int = 15,
656
+ self,
657
+ backoff: int = 15,
634
658
  retry_total: int = 3,
635
659
  retry_to_secondary: bool = False,
636
660
  random_jitter_range: int = 3,
637
- **kwargs: Any
661
+ **kwargs: Any,
638
662
  ) -> None:
639
663
  """
640
664
  Constructs a Linear retry object.
@@ -653,15 +677,14 @@ class LinearRetry(StorageRetryPolicy):
653
677
  """
654
678
  self.backoff = backoff
655
679
  self.random_jitter_range = random_jitter_range
656
- super(LinearRetry, self).__init__(
657
- retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
680
+ super(LinearRetry, self).__init__(retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
658
681
 
659
682
  def get_backoff_time(self, settings: Dict[str, Any]) -> float:
660
683
  """
661
684
  Calculates how long to sleep before retrying.
662
685
 
663
- :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
664
- :returns:
686
+ :param Dict[str, Any] settings: The configurable values pertaining to the backoff time.
687
+ :return:
665
688
  A float indicating how long to wait before retrying the request,
666
689
  or None to indicate no retry should be performed.
667
690
  :rtype: float
@@ -669,19 +692,27 @@ class LinearRetry(StorageRetryPolicy):
669
692
  random_generator = random.Random()
670
693
  # the backoff interval normally does not change, however there is the possibility
671
694
  # that it was modified by accessing the property directly after initializing the object
672
- random_range_start = self.backoff - self.random_jitter_range \
673
- if self.backoff > self.random_jitter_range else 0
695
+ random_range_start = self.backoff - self.random_jitter_range if self.backoff > self.random_jitter_range else 0
674
696
  random_range_end = self.backoff + self.random_jitter_range
675
697
  return random_generator.uniform(random_range_start, random_range_end)
676
698
 
677
699
 
678
700
  class StorageBearerTokenCredentialPolicy(BearerTokenCredentialPolicy):
679
- """ Custom Bearer token credential policy for following Storage Bearer challenges """
701
+ """Custom Bearer token credential policy for following Storage Bearer challenges"""
680
702
 
681
703
  def __init__(self, credential: "TokenCredential", audience: str, **kwargs: Any) -> None:
682
704
  super(StorageBearerTokenCredentialPolicy, self).__init__(credential, audience, **kwargs)
683
705
 
684
706
  def on_challenge(self, request: "PipelineRequest", response: "PipelineResponse") -> bool:
707
+ """Handle the challenge from the service and authorize the request.
708
+
709
+ :param request: The request object.
710
+ :type request: ~azure.core.pipeline.PipelineRequest
711
+ :param response: The response object.
712
+ :type response: ~azure.core.pipeline.PipelineResponse
713
+ :return: True if the request was authorized, False otherwise.
714
+ :rtype: bool
715
+ """
685
716
  try:
686
717
  auth_header = response.http_response.headers.get("WWW-Authenticate")
687
718
  challenge = StorageHttpChallenge(auth_header)