azure-storage-blob 12.26.0b1__py3-none-any.whl → 12.27.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. azure/storage/blob/__init__.py +3 -2
  2. azure/storage/blob/_blob_client.py +43 -38
  3. azure/storage/blob/_blob_client_helpers.py +4 -3
  4. azure/storage/blob/_blob_service_client.py +16 -13
  5. azure/storage/blob/_container_client.py +25 -22
  6. azure/storage/blob/_deserialize.py +1 -1
  7. azure/storage/blob/_download.py +7 -7
  8. azure/storage/blob/_encryption.py +177 -184
  9. azure/storage/blob/_generated/_azure_blob_storage.py +3 -2
  10. azure/storage/blob/_generated/_configuration.py +2 -2
  11. azure/storage/blob/_generated/_utils/__init__.py +6 -0
  12. azure/storage/blob/_generated/{_serialization.py → _utils/serialization.py} +4 -22
  13. azure/storage/blob/_generated/aio/_azure_blob_storage.py +3 -2
  14. azure/storage/blob/_generated/aio/_configuration.py +2 -2
  15. azure/storage/blob/_generated/aio/operations/_append_blob_operations.py +6 -10
  16. azure/storage/blob/_generated/aio/operations/_blob_operations.py +35 -39
  17. azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +9 -13
  18. azure/storage/blob/_generated/aio/operations/_container_operations.py +20 -24
  19. azure/storage/blob/_generated/aio/operations/_page_blob_operations.py +13 -17
  20. azure/storage/blob/_generated/aio/operations/_service_operations.py +10 -14
  21. azure/storage/blob/_generated/models/_models_py3.py +30 -9
  22. azure/storage/blob/_generated/operations/_append_blob_operations.py +11 -15
  23. azure/storage/blob/_generated/operations/_blob_operations.py +60 -64
  24. azure/storage/blob/_generated/operations/_block_blob_operations.py +16 -20
  25. azure/storage/blob/_generated/operations/_container_operations.py +39 -43
  26. azure/storage/blob/_generated/operations/_page_blob_operations.py +23 -27
  27. azure/storage/blob/_generated/operations/_service_operations.py +19 -23
  28. azure/storage/blob/_lease.py +1 -0
  29. azure/storage/blob/_list_blobs_helper.py +1 -1
  30. azure/storage/blob/_quick_query_helper.py +3 -3
  31. azure/storage/blob/_serialize.py +1 -0
  32. azure/storage/blob/_shared/__init__.py +7 -7
  33. azure/storage/blob/_shared/authentication.py +49 -32
  34. azure/storage/blob/_shared/avro/avro_io.py +44 -42
  35. azure/storage/blob/_shared/avro/avro_io_async.py +42 -41
  36. azure/storage/blob/_shared/avro/datafile.py +24 -21
  37. azure/storage/blob/_shared/avro/datafile_async.py +15 -15
  38. azure/storage/blob/_shared/avro/schema.py +196 -217
  39. azure/storage/blob/_shared/base_client.py +81 -59
  40. azure/storage/blob/_shared/base_client_async.py +58 -51
  41. azure/storage/blob/_shared/constants.py +1 -1
  42. azure/storage/blob/_shared/models.py +94 -92
  43. azure/storage/blob/_shared/parser.py +3 -3
  44. azure/storage/blob/_shared/policies.py +186 -147
  45. azure/storage/blob/_shared/policies_async.py +53 -65
  46. azure/storage/blob/_shared/request_handlers.py +50 -45
  47. azure/storage/blob/_shared/response_handlers.py +54 -45
  48. azure/storage/blob/_shared/shared_access_signature.py +65 -73
  49. azure/storage/blob/_shared/uploads.py +56 -49
  50. azure/storage/blob/_shared/uploads_async.py +70 -58
  51. azure/storage/blob/_version.py +1 -1
  52. azure/storage/blob/aio/__init__.py +3 -2
  53. azure/storage/blob/aio/_blob_client_async.py +53 -40
  54. azure/storage/blob/aio/_blob_service_client_async.py +13 -11
  55. azure/storage/blob/aio/_container_client_async.py +28 -25
  56. azure/storage/blob/aio/_download_async.py +7 -7
  57. azure/storage/blob/aio/_lease_async.py +1 -0
  58. azure/storage/blob/aio/_quick_query_helper_async.py +3 -3
  59. {azure_storage_blob-12.26.0b1.dist-info → azure_storage_blob-12.27.0b1.dist-info}/METADATA +3 -4
  60. azure_storage_blob-12.27.0b1.dist-info/RECORD +86 -0
  61. azure_storage_blob-12.26.0b1.dist-info/RECORD +0 -85
  62. {azure_storage_blob-12.26.0b1.dist-info → azure_storage_blob-12.27.0b1.dist-info}/LICENSE +0 -0
  63. {azure_storage_blob-12.26.0b1.dist-info → azure_storage_blob-12.27.0b1.dist-info}/WHEEL +0 -0
  64. {azure_storage_blob-12.26.0b1.dist-info → azure_storage_blob-12.27.0b1.dist-info}/top_level.txt +0 -0
@@ -28,18 +28,18 @@ from azure.core.pipeline.policies import (
28
28
  HTTPPolicy,
29
29
  NetworkTraceLoggingPolicy,
30
30
  RequestHistory,
31
- SansIOHTTPPolicy
31
+ SansIOHTTPPolicy,
32
32
  )
33
33
 
34
34
  from .authentication import AzureSigningError, StorageHttpChallenge
35
35
  from .constants import DEFAULT_OAUTH_SCOPE
36
- from .models import LocationMode
36
+ from .models import LocationMode, StorageErrorCode
37
37
 
38
38
  if TYPE_CHECKING:
39
39
  from azure.core.credentials import TokenCredential
40
40
  from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import
41
41
  PipelineRequest,
42
- PipelineResponse
42
+ PipelineResponse,
43
43
  )
44
44
 
45
45
 
@@ -48,14 +48,14 @@ _LOGGER = logging.getLogger(__name__)
48
48
 
49
49
  def encode_base64(data):
50
50
  if isinstance(data, str):
51
- data = data.encode('utf-8')
51
+ data = data.encode("utf-8")
52
52
  encoded = base64.b64encode(data)
53
- return encoded.decode('utf-8')
53
+ return encoded.decode("utf-8")
54
54
 
55
55
 
56
56
  # Are we out of retries?
57
57
  def is_exhausted(settings):
58
- retry_counts = (settings['total'], settings['connect'], settings['read'], settings['status'])
58
+ retry_counts = (settings["total"], settings["connect"], settings["read"], settings["status"])
59
59
  retry_counts = list(filter(None, retry_counts))
60
60
  if not retry_counts:
61
61
  return False
@@ -63,8 +63,8 @@ def is_exhausted(settings):
63
63
 
64
64
 
65
65
  def retry_hook(settings, **kwargs):
66
- if settings['hook']:
67
- settings['hook'](retry_count=settings['count'] - 1, location_mode=settings['mode'], **kwargs)
66
+ if settings["hook"]:
67
+ settings["hook"](retry_count=settings["count"] - 1, location_mode=settings["mode"], **kwargs)
68
68
 
69
69
 
70
70
  # Is this method/status code retryable? (Based on allowlists and control
@@ -72,7 +72,7 @@ def retry_hook(settings, **kwargs):
72
72
  # respect the Retry-After header, whether this header is present, and
73
73
  # whether the returned status code is on the list of status codes to
74
74
  # be retried upon on the presence of the aforementioned header)
75
- def is_retry(response, mode):
75
+ def is_retry(response, mode): # pylint: disable=too-many-return-statements
76
76
  status = response.http_response.status_code
77
77
  if 300 <= status < 500:
78
78
  # An exception occurred, but in most cases it was expected. Examples could
@@ -83,6 +83,14 @@ def is_retry(response, mode):
83
83
  if status == 408:
84
84
  # Response code 408 is a timeout and should be retried.
85
85
  return True
86
+ if status >= 400:
87
+ error_code = response.http_response.headers.get("x-ms-copy-source-error-code")
88
+ if error_code in [
89
+ StorageErrorCode.OPERATION_TIMED_OUT,
90
+ StorageErrorCode.INTERNAL_ERROR,
91
+ StorageErrorCode.SERVER_BUSY,
92
+ ]:
93
+ return True
86
94
  return False
87
95
  if status >= 500:
88
96
  # Response codes above 500 with the exception of 501 Not Implemented and
@@ -95,40 +103,39 @@ def is_retry(response, mode):
95
103
 
96
104
  def is_checksum_retry(response):
97
105
  # retry if invalid content md5
98
- if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'):
99
- computed_md5 = response.http_request.headers.get('content-md5', None) or \
100
- encode_base64(StorageContentValidation.get_content_md5(response.http_response.body()))
101
- if response.http_response.headers['content-md5'] != computed_md5:
106
+ if response.context.get("validate_content", False) and response.http_response.headers.get("content-md5"):
107
+ computed_md5 = response.http_request.headers.get("content-md5", None) or encode_base64(
108
+ StorageContentValidation.get_content_md5(response.http_response.body())
109
+ )
110
+ if response.http_response.headers["content-md5"] != computed_md5:
102
111
  return True
103
112
  return False
104
113
 
105
114
 
106
115
  def urljoin(base_url, stub_url):
107
116
  parsed = urlparse(base_url)
108
- parsed = parsed._replace(path=parsed.path + '/' + stub_url)
117
+ parsed = parsed._replace(path=parsed.path + "/" + stub_url)
109
118
  return parsed.geturl()
110
119
 
111
120
 
112
121
  class QueueMessagePolicy(SansIOHTTPPolicy):
113
122
 
114
123
  def on_request(self, request):
115
- message_id = request.context.options.pop('queue_message_id', None)
124
+ message_id = request.context.options.pop("queue_message_id", None)
116
125
  if message_id:
117
- request.http_request.url = urljoin(
118
- request.http_request.url,
119
- message_id)
126
+ request.http_request.url = urljoin(request.http_request.url, message_id)
120
127
 
121
128
 
122
129
  class StorageHeadersPolicy(HeadersPolicy):
123
- request_id_header_name = 'x-ms-client-request-id'
130
+ request_id_header_name = "x-ms-client-request-id"
124
131
 
125
132
  def on_request(self, request: "PipelineRequest") -> None:
126
133
  super(StorageHeadersPolicy, self).on_request(request)
127
134
  current_time = format_date_time(time())
128
- request.http_request.headers['x-ms-date'] = current_time
135
+ request.http_request.headers["x-ms-date"] = current_time
129
136
 
130
- custom_id = request.context.options.pop('client_request_id', None)
131
- request.http_request.headers['x-ms-client-request-id'] = custom_id or str(uuid.uuid1())
137
+ custom_id = request.context.options.pop("client_request_id", None)
138
+ request.http_request.headers["x-ms-client-request-id"] = custom_id or str(uuid.uuid1())
132
139
 
133
140
  # def on_response(self, request, response):
134
141
  # # raise exception if the echoed client request id from the service is not identical to the one we sent
@@ -153,7 +160,7 @@ class StorageHosts(SansIOHTTPPolicy):
153
160
  super(StorageHosts, self).__init__()
154
161
 
155
162
  def on_request(self, request: "PipelineRequest") -> None:
156
- request.context.options['hosts'] = self.hosts
163
+ request.context.options["hosts"] = self.hosts
157
164
  parsed_url = urlparse(request.http_request.url)
158
165
 
159
166
  # Detect what location mode we're currently requesting with
@@ -163,10 +170,10 @@ class StorageHosts(SansIOHTTPPolicy):
163
170
  location_mode = key
164
171
 
165
172
  # See if a specific location mode has been specified, and if so, redirect
166
- use_location = request.context.options.pop('use_location', None)
173
+ use_location = request.context.options.pop("use_location", None)
167
174
  if use_location:
168
175
  # Lock retries to the specific location
169
- request.context.options['retry_to_secondary'] = False
176
+ request.context.options["retry_to_secondary"] = False
170
177
  if use_location not in self.hosts:
171
178
  raise ValueError(f"Attempting to use undefined host location {use_location}")
172
179
  if use_location != location_mode:
@@ -175,7 +182,7 @@ class StorageHosts(SansIOHTTPPolicy):
175
182
  request.http_request.url = updated.geturl()
176
183
  location_mode = use_location
177
184
 
178
- request.context.options['location_mode'] = location_mode
185
+ request.context.options["location_mode"] = location_mode
179
186
 
180
187
 
181
188
  class StorageLoggingPolicy(NetworkTraceLoggingPolicy):
@@ -200,19 +207,19 @@ class StorageLoggingPolicy(NetworkTraceLoggingPolicy):
200
207
  try:
201
208
  log_url = http_request.url
202
209
  query_params = http_request.query
203
- if 'sig' in query_params:
204
- log_url = log_url.replace(query_params['sig'], "sig=*****")
210
+ if "sig" in query_params:
211
+ log_url = log_url.replace(query_params["sig"], "sig=*****")
205
212
  _LOGGER.debug("Request URL: %r", log_url)
206
213
  _LOGGER.debug("Request method: %r", http_request.method)
207
214
  _LOGGER.debug("Request headers:")
208
215
  for header, value in http_request.headers.items():
209
- if header.lower() == 'authorization':
210
- value = '*****'
211
- elif header.lower() == 'x-ms-copy-source' and 'sig' in value:
216
+ if header.lower() == "authorization":
217
+ value = "*****"
218
+ elif header.lower() == "x-ms-copy-source" and "sig" in value:
212
219
  # take the url apart and scrub away the signed signature
213
220
  scheme, netloc, path, params, query, fragment = urlparse(value)
214
221
  parsed_qs = dict(parse_qsl(query))
215
- parsed_qs['sig'] = '*****'
222
+ parsed_qs["sig"] = "*****"
216
223
 
217
224
  # the SAS needs to be put back together
218
225
  value = urlunparse((scheme, netloc, path, params, urlencode(parsed_qs), fragment))
@@ -242,11 +249,11 @@ class StorageLoggingPolicy(NetworkTraceLoggingPolicy):
242
249
  # We don't want to log binary data if the response is a file.
243
250
  _LOGGER.debug("Response content:")
244
251
  pattern = re.compile(r'attachment; ?filename=["\w.]+', re.IGNORECASE)
245
- header = response.http_response.headers.get('content-disposition')
252
+ header = response.http_response.headers.get("content-disposition")
246
253
  resp_content_type = response.http_response.headers.get("content-type", "")
247
254
 
248
255
  if header and pattern.match(header):
249
- filename = header.partition('=')[2]
256
+ filename = header.partition("=")[2]
250
257
  _LOGGER.debug("File attachments: %s", filename)
251
258
  elif resp_content_type.endswith("octet-stream"):
252
259
  _LOGGER.debug("Body contains binary data.")
@@ -268,11 +275,11 @@ class StorageLoggingPolicy(NetworkTraceLoggingPolicy):
268
275
  class StorageRequestHook(SansIOHTTPPolicy):
269
276
 
270
277
  def __init__(self, **kwargs):
271
- self._request_callback = kwargs.get('raw_request_hook')
278
+ self._request_callback = kwargs.get("raw_request_hook")
272
279
  super(StorageRequestHook, self).__init__()
273
280
 
274
281
  def on_request(self, request: "PipelineRequest") -> None:
275
- request_callback = request.context.options.pop('raw_request_hook', self._request_callback)
282
+ request_callback = request.context.options.pop("raw_request_hook", self._request_callback)
276
283
  if request_callback:
277
284
  request_callback(request)
278
285
 
@@ -280,49 +287,50 @@ class StorageRequestHook(SansIOHTTPPolicy):
280
287
  class StorageResponseHook(HTTPPolicy):
281
288
 
282
289
  def __init__(self, **kwargs):
283
- self._response_callback = kwargs.get('raw_response_hook')
290
+ self._response_callback = kwargs.get("raw_response_hook")
284
291
  super(StorageResponseHook, self).__init__()
285
292
 
286
293
  def send(self, request: "PipelineRequest") -> "PipelineResponse":
287
294
  # Values could be 0
288
- data_stream_total = request.context.get('data_stream_total')
295
+ data_stream_total = request.context.get("data_stream_total")
289
296
  if data_stream_total is None:
290
- data_stream_total = request.context.options.pop('data_stream_total', None)
291
- download_stream_current = request.context.get('download_stream_current')
297
+ data_stream_total = request.context.options.pop("data_stream_total", None)
298
+ download_stream_current = request.context.get("download_stream_current")
292
299
  if download_stream_current is None:
293
- download_stream_current = request.context.options.pop('download_stream_current', None)
294
- upload_stream_current = request.context.get('upload_stream_current')
300
+ download_stream_current = request.context.options.pop("download_stream_current", None)
301
+ upload_stream_current = request.context.get("upload_stream_current")
295
302
  if upload_stream_current is None:
296
- upload_stream_current = request.context.options.pop('upload_stream_current', None)
303
+ upload_stream_current = request.context.options.pop("upload_stream_current", None)
297
304
 
298
- response_callback = request.context.get('response_callback') or \
299
- request.context.options.pop('raw_response_hook', self._response_callback)
305
+ response_callback = request.context.get("response_callback") or request.context.options.pop(
306
+ "raw_response_hook", self._response_callback
307
+ )
300
308
 
301
309
  response = self.next.send(request)
302
310
 
303
- will_retry = is_retry(response, request.context.options.get('mode')) or is_checksum_retry(response)
311
+ will_retry = is_retry(response, request.context.options.get("mode")) or is_checksum_retry(response)
304
312
  # Auth error could come from Bearer challenge, in which case this request will be made again
305
313
  is_auth_error = response.http_response.status_code == 401
306
314
  should_update_counts = not (will_retry or is_auth_error)
307
315
 
308
316
  if should_update_counts and download_stream_current is not None:
309
- download_stream_current += int(response.http_response.headers.get('Content-Length', 0))
317
+ download_stream_current += int(response.http_response.headers.get("Content-Length", 0))
310
318
  if data_stream_total is None:
311
- content_range = response.http_response.headers.get('Content-Range')
319
+ content_range = response.http_response.headers.get("Content-Range")
312
320
  if content_range:
313
- data_stream_total = int(content_range.split(' ', 1)[1].split('/', 1)[1])
321
+ data_stream_total = int(content_range.split(" ", 1)[1].split("/", 1)[1])
314
322
  else:
315
323
  data_stream_total = download_stream_current
316
324
  elif should_update_counts and upload_stream_current is not None:
317
- upload_stream_current += int(response.http_request.headers.get('Content-Length', 0))
325
+ upload_stream_current += int(response.http_request.headers.get("Content-Length", 0))
318
326
  for pipeline_obj in [request, response]:
319
- if hasattr(pipeline_obj, 'context'):
320
- pipeline_obj.context['data_stream_total'] = data_stream_total
321
- pipeline_obj.context['download_stream_current'] = download_stream_current
322
- pipeline_obj.context['upload_stream_current'] = upload_stream_current
327
+ if hasattr(pipeline_obj, "context"):
328
+ pipeline_obj.context["data_stream_total"] = data_stream_total
329
+ pipeline_obj.context["download_stream_current"] = download_stream_current
330
+ pipeline_obj.context["upload_stream_current"] = upload_stream_current
323
331
  if response_callback:
324
332
  response_callback(response)
325
- request.context['response_callback'] = response_callback
333
+ request.context["response_callback"] = response_callback
326
334
  return response
327
335
 
328
336
 
@@ -332,7 +340,8 @@ class StorageContentValidation(SansIOHTTPPolicy):
332
340
 
333
341
  This will overwrite any headers already defined in the request.
334
342
  """
335
- header_name = 'Content-MD5'
343
+
344
+ header_name = "Content-MD5"
336
345
 
337
346
  def __init__(self, **kwargs: Any) -> None: # pylint: disable=unused-argument
338
347
  super(StorageContentValidation, self).__init__()
@@ -342,10 +351,10 @@ class StorageContentValidation(SansIOHTTPPolicy):
342
351
  # Since HTTP does not differentiate between no content and empty content,
343
352
  # we have to perform a None check.
344
353
  data = data or b""
345
- md5 = hashlib.md5() # nosec
354
+ md5 = hashlib.md5() # nosec
346
355
  if isinstance(data, bytes):
347
356
  md5.update(data)
348
- elif hasattr(data, 'read'):
357
+ elif hasattr(data, "read"):
349
358
  pos = 0
350
359
  try:
351
360
  pos = data.tell()
@@ -363,22 +372,25 @@ class StorageContentValidation(SansIOHTTPPolicy):
363
372
  return md5.digest()
364
373
 
365
374
  def on_request(self, request: "PipelineRequest") -> None:
366
- validate_content = request.context.options.pop('validate_content', False)
367
- if validate_content and request.http_request.method != 'GET':
375
+ validate_content = request.context.options.pop("validate_content", False)
376
+ if validate_content and request.http_request.method != "GET":
368
377
  computed_md5 = encode_base64(StorageContentValidation.get_content_md5(request.http_request.data))
369
378
  request.http_request.headers[self.header_name] = computed_md5
370
- request.context['validate_content_md5'] = computed_md5
371
- request.context['validate_content'] = validate_content
379
+ request.context["validate_content_md5"] = computed_md5
380
+ request.context["validate_content"] = validate_content
372
381
 
373
382
  def on_response(self, request: "PipelineRequest", response: "PipelineResponse") -> None:
374
- if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'):
375
- computed_md5 = request.context.get('validate_content_md5') or \
376
- encode_base64(StorageContentValidation.get_content_md5(response.http_response.body()))
377
- if response.http_response.headers['content-md5'] != computed_md5:
378
- raise AzureError((
379
- f"MD5 mismatch. Expected value is '{response.http_response.headers['content-md5']}', "
380
- f"computed value is '{computed_md5}'."),
381
- response=response.http_response
383
+ if response.context.get("validate_content", False) and response.http_response.headers.get("content-md5"):
384
+ computed_md5 = request.context.get("validate_content_md5") or encode_base64(
385
+ StorageContentValidation.get_content_md5(response.http_response.body())
386
+ )
387
+ if response.http_response.headers["content-md5"] != computed_md5:
388
+ raise AzureError(
389
+ (
390
+ f"MD5 mismatch. Expected value is '{response.http_response.headers['content-md5']}', "
391
+ f"computed value is '{computed_md5}'."
392
+ ),
393
+ response=response.http_response,
382
394
  )
383
395
 
384
396
 
@@ -399,33 +411,41 @@ class StorageRetryPolicy(HTTPPolicy):
399
411
  """Whether the secondary endpoint should be retried."""
400
412
 
401
413
  def __init__(self, **kwargs: Any) -> None:
402
- self.total_retries = kwargs.pop('retry_total', 10)
403
- self.connect_retries = kwargs.pop('retry_connect', 3)
404
- self.read_retries = kwargs.pop('retry_read', 3)
405
- self.status_retries = kwargs.pop('retry_status', 3)
406
- self.retry_to_secondary = kwargs.pop('retry_to_secondary', False)
414
+ self.total_retries = kwargs.pop("retry_total", 10)
415
+ self.connect_retries = kwargs.pop("retry_connect", 3)
416
+ self.read_retries = kwargs.pop("retry_read", 3)
417
+ self.status_retries = kwargs.pop("retry_status", 3)
418
+ self.retry_to_secondary = kwargs.pop("retry_to_secondary", False)
407
419
  super(StorageRetryPolicy, self).__init__()
408
420
 
409
421
  def _set_next_host_location(self, settings: Dict[str, Any], request: "PipelineRequest") -> None:
410
422
  """
411
423
  A function which sets the next host location on the request, if applicable.
412
424
 
413
- :param Dict[str, Any]] settings: The configurable values pertaining to the next host location.
425
+ :param Dict[str, Any] settings: The configurable values pertaining to the next host location.
414
426
  :param PipelineRequest request: A pipeline request object.
415
427
  """
416
- if settings['hosts'] and all(settings['hosts'].values()):
428
+ if settings["hosts"] and all(settings["hosts"].values()):
417
429
  url = urlparse(request.url)
418
430
  # If there's more than one possible location, retry to the alternative
419
- if settings['mode'] == LocationMode.PRIMARY:
420
- settings['mode'] = LocationMode.SECONDARY
431
+ if settings["mode"] == LocationMode.PRIMARY:
432
+ settings["mode"] = LocationMode.SECONDARY
421
433
  else:
422
- settings['mode'] = LocationMode.PRIMARY
423
- updated = url._replace(netloc=settings['hosts'].get(settings['mode']))
434
+ settings["mode"] = LocationMode.PRIMARY
435
+ updated = url._replace(netloc=settings["hosts"].get(settings["mode"]))
424
436
  request.url = updated.geturl()
425
437
 
426
438
  def configure_retries(self, request: "PipelineRequest") -> Dict[str, Any]:
439
+ """
440
+ Configure the retry settings for the request.
441
+
442
+ :param request: A pipeline request object.
443
+ :type request: ~azure.core.pipeline.PipelineRequest
444
+ :return: A dictionary containing the retry settings.
445
+ :rtype: Dict[str, Any]
446
+ """
427
447
  body_position = None
428
- if hasattr(request.http_request.body, 'read'):
448
+ if hasattr(request.http_request.body, "read"):
429
449
  try:
430
450
  body_position = request.http_request.body.tell()
431
451
  except (AttributeError, UnsupportedOperation):
@@ -433,129 +453,140 @@ class StorageRetryPolicy(HTTPPolicy):
433
453
  pass
434
454
  options = request.context.options
435
455
  return {
436
- 'total': options.pop("retry_total", self.total_retries),
437
- 'connect': options.pop("retry_connect", self.connect_retries),
438
- 'read': options.pop("retry_read", self.read_retries),
439
- 'status': options.pop("retry_status", self.status_retries),
440
- 'retry_secondary': options.pop("retry_to_secondary", self.retry_to_secondary),
441
- 'mode': options.pop("location_mode", LocationMode.PRIMARY),
442
- 'hosts': options.pop("hosts", None),
443
- 'hook': options.pop("retry_hook", None),
444
- 'body_position': body_position,
445
- 'count': 0,
446
- 'history': []
456
+ "total": options.pop("retry_total", self.total_retries),
457
+ "connect": options.pop("retry_connect", self.connect_retries),
458
+ "read": options.pop("retry_read", self.read_retries),
459
+ "status": options.pop("retry_status", self.status_retries),
460
+ "retry_secondary": options.pop("retry_to_secondary", self.retry_to_secondary),
461
+ "mode": options.pop("location_mode", LocationMode.PRIMARY),
462
+ "hosts": options.pop("hosts", None),
463
+ "hook": options.pop("retry_hook", None),
464
+ "body_position": body_position,
465
+ "count": 0,
466
+ "history": [],
447
467
  }
448
468
 
449
469
  def get_backoff_time(self, settings: Dict[str, Any]) -> float: # pylint: disable=unused-argument
450
- """ Formula for computing the current backoff.
470
+ """Formula for computing the current backoff.
451
471
  Should be calculated by child class.
452
472
 
453
473
  :param Dict[str, Any] settings: The configurable values pertaining to the backoff time.
454
- :returns: The backoff time.
474
+ :return: The backoff time.
455
475
  :rtype: float
456
476
  """
457
477
  return 0
458
478
 
459
479
  def sleep(self, settings, transport):
480
+ """Sleep for the backoff time.
481
+
482
+ :param Dict[str, Any] settings: The configurable values pertaining to the sleep operation.
483
+ :param transport: The transport to use for sleeping.
484
+ :type transport:
485
+ ~azure.core.pipeline.transport.AsyncioBaseTransport or
486
+ ~azure.core.pipeline.transport.BaseTransport
487
+ """
460
488
  backoff = self.get_backoff_time(settings)
461
489
  if not backoff or backoff < 0:
462
490
  return
463
491
  transport.sleep(backoff)
464
492
 
465
493
  def increment(
466
- self, settings: Dict[str, Any],
494
+ self,
495
+ settings: Dict[str, Any],
467
496
  request: "PipelineRequest",
468
497
  response: Optional["PipelineResponse"] = None,
469
- error: Optional[AzureError] = None
498
+ error: Optional[AzureError] = None,
470
499
  ) -> bool:
471
500
  """Increment the retry counters.
472
501
 
473
502
  :param Dict[str, Any] settings: The configurable values pertaining to the increment operation.
474
- :param PipelineRequest request: A pipeline request object.
475
- :param Optional[PipelineResponse] response: A pipeline response object.
476
- :param Optional[AzureError] error: An error encountered during the request, or
503
+ :param request: A pipeline request object.
504
+ :type request: ~azure.core.pipeline.PipelineRequest
505
+ :param response: A pipeline response object.
506
+ :type response: ~azure.core.pipeline.PipelineResponse or None
507
+ :param error: An error encountered during the request, or
477
508
  None if the response was received successfully.
478
- :returns: Whether the retry attempts are exhausted.
509
+ :type error: ~azure.core.exceptions.AzureError or None
510
+ :return: Whether the retry attempts are exhausted.
479
511
  :rtype: bool
480
512
  """
481
- settings['total'] -= 1
513
+ settings["total"] -= 1
482
514
 
483
515
  if error and isinstance(error, ServiceRequestError):
484
516
  # Errors when we're fairly sure that the server did not receive the
485
517
  # request, so it should be safe to retry.
486
- settings['connect'] -= 1
487
- settings['history'].append(RequestHistory(request, error=error))
518
+ settings["connect"] -= 1
519
+ settings["history"].append(RequestHistory(request, error=error))
488
520
 
489
521
  elif error and isinstance(error, ServiceResponseError):
490
522
  # Errors that occur after the request has been started, so we should
491
523
  # assume that the server began processing it.
492
- settings['read'] -= 1
493
- settings['history'].append(RequestHistory(request, error=error))
524
+ settings["read"] -= 1
525
+ settings["history"].append(RequestHistory(request, error=error))
494
526
 
495
527
  else:
496
528
  # Incrementing because of a server error like a 500 in
497
529
  # status_forcelist and a the given method is in the allowlist
498
530
  if response:
499
- settings['status'] -= 1
500
- settings['history'].append(RequestHistory(request, http_response=response))
531
+ settings["status"] -= 1
532
+ settings["history"].append(RequestHistory(request, http_response=response))
501
533
 
502
534
  if not is_exhausted(settings):
503
- if request.method not in ['PUT'] and settings['retry_secondary']:
535
+ if request.method not in ["PUT"] and settings["retry_secondary"]:
504
536
  self._set_next_host_location(settings, request)
505
537
 
506
538
  # rewind the request body if it is a stream
507
- if request.body and hasattr(request.body, 'read'):
539
+ if request.body and hasattr(request.body, "read"):
508
540
  # no position was saved, then retry would not work
509
- if settings['body_position'] is None:
541
+ if settings["body_position"] is None:
510
542
  return False
511
543
  try:
512
544
  # attempt to rewind the body to the initial position
513
- request.body.seek(settings['body_position'], SEEK_SET)
545
+ request.body.seek(settings["body_position"], SEEK_SET)
514
546
  except (UnsupportedOperation, ValueError):
515
547
  # if body is not seekable, then retry would not work
516
548
  return False
517
- settings['count'] += 1
549
+ settings["count"] += 1
518
550
  return True
519
551
  return False
520
552
 
521
553
  def send(self, request):
554
+ """Send the request with retry logic.
555
+
556
+ :param request: A pipeline request object.
557
+ :type request: ~azure.core.pipeline.PipelineRequest
558
+ :return: A pipeline response object.
559
+ :rtype: ~azure.core.pipeline.PipelineResponse
560
+ """
522
561
  retries_remaining = True
523
562
  response = None
524
563
  retry_settings = self.configure_retries(request)
525
564
  while retries_remaining:
526
565
  try:
527
566
  response = self.next.send(request)
528
- if is_retry(response, retry_settings['mode']) or is_checksum_retry(response):
567
+ if is_retry(response, retry_settings["mode"]) or is_checksum_retry(response):
529
568
  retries_remaining = self.increment(
530
- retry_settings,
531
- request=request.http_request,
532
- response=response.http_response)
569
+ retry_settings, request=request.http_request, response=response.http_response
570
+ )
533
571
  if retries_remaining:
534
572
  retry_hook(
535
- retry_settings,
536
- request=request.http_request,
537
- response=response.http_response,
538
- error=None)
573
+ retry_settings, request=request.http_request, response=response.http_response, error=None
574
+ )
539
575
  self.sleep(retry_settings, request.context.transport)
540
576
  continue
541
577
  break
542
578
  except AzureError as err:
543
579
  if isinstance(err, AzureSigningError):
544
580
  raise
545
- retries_remaining = self.increment(
546
- retry_settings, request=request.http_request, error=err)
581
+ retries_remaining = self.increment(retry_settings, request=request.http_request, error=err)
547
582
  if retries_remaining:
548
- retry_hook(
549
- retry_settings,
550
- request=request.http_request,
551
- response=None,
552
- error=err)
583
+ retry_hook(retry_settings, request=request.http_request, response=None, error=err)
553
584
  self.sleep(retry_settings, request.context.transport)
554
585
  continue
555
586
  raise err
556
- if retry_settings['history']:
557
- response.context['history'] = retry_settings['history']
558
- response.http_response.location_mode = retry_settings['mode']
587
+ if retry_settings["history"]:
588
+ response.context["history"] = retry_settings["history"]
589
+ response.http_response.location_mode = retry_settings["mode"]
559
590
  return response
560
591
 
561
592
 
@@ -571,12 +602,13 @@ class ExponentialRetry(StorageRetryPolicy):
571
602
  """A number in seconds which indicates a range to jitter/randomize for the back-off interval."""
572
603
 
573
604
  def __init__(
574
- self, initial_backoff: int = 15,
605
+ self,
606
+ initial_backoff: int = 15,
575
607
  increment_base: int = 3,
576
608
  retry_total: int = 3,
577
609
  retry_to_secondary: bool = False,
578
610
  random_jitter_range: int = 3,
579
- **kwargs: Any
611
+ **kwargs: Any,
580
612
  ) -> None:
581
613
  """
582
614
  Constructs an Exponential retry object. The initial_backoff is used for
@@ -601,21 +633,20 @@ class ExponentialRetry(StorageRetryPolicy):
601
633
  self.initial_backoff = initial_backoff
602
634
  self.increment_base = increment_base
603
635
  self.random_jitter_range = random_jitter_range
604
- super(ExponentialRetry, self).__init__(
605
- retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
636
+ super(ExponentialRetry, self).__init__(retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
606
637
 
607
638
  def get_backoff_time(self, settings: Dict[str, Any]) -> float:
608
639
  """
609
640
  Calculates how long to sleep before retrying.
610
641
 
611
- :param Dict[str, Any]] settings: The configurable values pertaining to get backoff time.
612
- :returns:
642
+ :param Dict[str, Any] settings: The configurable values pertaining to get backoff time.
643
+ :return:
613
644
  A float indicating how long to wait before retrying the request,
614
645
  or None to indicate no retry should be performed.
615
646
  :rtype: float
616
647
  """
617
648
  random_generator = random.Random()
618
- backoff = self.initial_backoff + (0 if settings['count'] == 0 else pow(self.increment_base, settings['count']))
649
+ backoff = self.initial_backoff + (0 if settings["count"] == 0 else pow(self.increment_base, settings["count"]))
619
650
  random_range_start = backoff - self.random_jitter_range if backoff > self.random_jitter_range else 0
620
651
  random_range_end = backoff + self.random_jitter_range
621
652
  return random_generator.uniform(random_range_start, random_range_end)
@@ -630,11 +661,12 @@ class LinearRetry(StorageRetryPolicy):
630
661
  """A number in seconds which indicates a range to jitter/randomize for the back-off interval."""
631
662
 
632
663
  def __init__(
633
- self, backoff: int = 15,
664
+ self,
665
+ backoff: int = 15,
634
666
  retry_total: int = 3,
635
667
  retry_to_secondary: bool = False,
636
668
  random_jitter_range: int = 3,
637
- **kwargs: Any
669
+ **kwargs: Any,
638
670
  ) -> None:
639
671
  """
640
672
  Constructs a Linear retry object.
@@ -653,15 +685,14 @@ class LinearRetry(StorageRetryPolicy):
653
685
  """
654
686
  self.backoff = backoff
655
687
  self.random_jitter_range = random_jitter_range
656
- super(LinearRetry, self).__init__(
657
- retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
688
+ super(LinearRetry, self).__init__(retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
658
689
 
659
690
  def get_backoff_time(self, settings: Dict[str, Any]) -> float:
660
691
  """
661
692
  Calculates how long to sleep before retrying.
662
693
 
663
- :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
664
- :returns:
694
+ :param Dict[str, Any] settings: The configurable values pertaining to the backoff time.
695
+ :return:
665
696
  A float indicating how long to wait before retrying the request,
666
697
  or None to indicate no retry should be performed.
667
698
  :rtype: float
@@ -669,19 +700,27 @@ class LinearRetry(StorageRetryPolicy):
669
700
  random_generator = random.Random()
670
701
  # the backoff interval normally does not change, however there is the possibility
671
702
  # that it was modified by accessing the property directly after initializing the object
672
- random_range_start = self.backoff - self.random_jitter_range \
673
- if self.backoff > self.random_jitter_range else 0
703
+ random_range_start = self.backoff - self.random_jitter_range if self.backoff > self.random_jitter_range else 0
674
704
  random_range_end = self.backoff + self.random_jitter_range
675
705
  return random_generator.uniform(random_range_start, random_range_end)
676
706
 
677
707
 
678
708
  class StorageBearerTokenCredentialPolicy(BearerTokenCredentialPolicy):
679
- """ Custom Bearer token credential policy for following Storage Bearer challenges """
709
+ """Custom Bearer token credential policy for following Storage Bearer challenges"""
680
710
 
681
711
  def __init__(self, credential: "TokenCredential", audience: str, **kwargs: Any) -> None:
682
712
  super(StorageBearerTokenCredentialPolicy, self).__init__(credential, audience, **kwargs)
683
713
 
684
714
  def on_challenge(self, request: "PipelineRequest", response: "PipelineResponse") -> bool:
715
+ """Handle the challenge from the service and authorize the request.
716
+
717
+ :param request: The request object.
718
+ :type request: ~azure.core.pipeline.PipelineRequest
719
+ :param response: The response object.
720
+ :type response: ~azure.core.pipeline.PipelineResponse
721
+ :return: True if the request was authorized, False otherwise.
722
+ :rtype: bool
723
+ """
685
724
  try:
686
725
  auth_header = response.http_response.headers.get("WWW-Authenticate")
687
726
  challenge = StorageHttpChallenge(auth_header)