azure-storage-blob 12.25.0b1__py3-none-any.whl → 12.26.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. azure/storage/blob/__init__.py +3 -2
  2. azure/storage/blob/_blob_client.py +94 -41
  3. azure/storage/blob/_blob_client_helpers.py +19 -4
  4. azure/storage/blob/_blob_service_client.py +16 -13
  5. azure/storage/blob/_container_client.py +25 -22
  6. azure/storage/blob/_deserialize.py +1 -1
  7. azure/storage/blob/_download.py +7 -7
  8. azure/storage/blob/_encryption.py +177 -184
  9. azure/storage/blob/_generated/_azure_blob_storage.py +1 -1
  10. azure/storage/blob/_generated/_configuration.py +2 -2
  11. azure/storage/blob/_generated/_serialization.py +3 -3
  12. azure/storage/blob/_generated/aio/_azure_blob_storage.py +1 -1
  13. azure/storage/blob/_generated/aio/_configuration.py +2 -2
  14. azure/storage/blob/_generated/aio/operations/_append_blob_operations.py +5 -4
  15. azure/storage/blob/_generated/aio/operations/_blob_operations.py +5 -25
  16. azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +9 -7
  17. azure/storage/blob/_generated/aio/operations/_container_operations.py +1 -19
  18. azure/storage/blob/_generated/aio/operations/_page_blob_operations.py +5 -10
  19. azure/storage/blob/_generated/aio/operations/_service_operations.py +1 -8
  20. azure/storage/blob/_generated/models/__init__.py +2 -0
  21. azure/storage/blob/_generated/models/_azure_blob_storage_enums.py +6 -0
  22. azure/storage/blob/_generated/operations/_append_blob_operations.py +12 -9
  23. azure/storage/blob/_generated/operations/_blob_operations.py +32 -49
  24. azure/storage/blob/_generated/operations/_block_blob_operations.py +21 -13
  25. azure/storage/blob/_generated/operations/_container_operations.py +19 -37
  26. azure/storage/blob/_generated/operations/_page_blob_operations.py +17 -19
  27. azure/storage/blob/_generated/operations/_service_operations.py +9 -17
  28. azure/storage/blob/_lease.py +1 -0
  29. azure/storage/blob/_quick_query_helper.py +20 -24
  30. azure/storage/blob/_serialize.py +1 -0
  31. azure/storage/blob/_shared/__init__.py +7 -7
  32. azure/storage/blob/_shared/authentication.py +49 -32
  33. azure/storage/blob/_shared/avro/avro_io.py +45 -43
  34. azure/storage/blob/_shared/avro/avro_io_async.py +42 -41
  35. azure/storage/blob/_shared/avro/datafile.py +24 -21
  36. azure/storage/blob/_shared/avro/datafile_async.py +15 -15
  37. azure/storage/blob/_shared/avro/schema.py +196 -217
  38. azure/storage/blob/_shared/base_client.py +87 -61
  39. azure/storage/blob/_shared/base_client_async.py +58 -51
  40. azure/storage/blob/_shared/constants.py +1 -1
  41. azure/storage/blob/_shared/models.py +93 -92
  42. azure/storage/blob/_shared/parser.py +3 -3
  43. azure/storage/blob/_shared/policies.py +176 -145
  44. azure/storage/blob/_shared/policies_async.py +59 -70
  45. azure/storage/blob/_shared/request_handlers.py +51 -47
  46. azure/storage/blob/_shared/response_handlers.py +49 -45
  47. azure/storage/blob/_shared/shared_access_signature.py +67 -71
  48. azure/storage/blob/_shared/uploads.py +56 -49
  49. azure/storage/blob/_shared/uploads_async.py +72 -61
  50. azure/storage/blob/_shared_access_signature.py +3 -1
  51. azure/storage/blob/_version.py +1 -1
  52. azure/storage/blob/aio/__init__.py +3 -2
  53. azure/storage/blob/aio/_blob_client_async.py +241 -44
  54. azure/storage/blob/aio/_blob_service_client_async.py +13 -11
  55. azure/storage/blob/aio/_container_client_async.py +28 -25
  56. azure/storage/blob/aio/_download_async.py +16 -12
  57. azure/storage/blob/aio/_lease_async.py +1 -0
  58. azure/storage/blob/aio/_quick_query_helper_async.py +194 -0
  59. {azure_storage_blob-12.25.0b1.dist-info → azure_storage_blob-12.26.0.dist-info}/METADATA +7 -7
  60. azure_storage_blob-12.26.0.dist-info/RECORD +85 -0
  61. {azure_storage_blob-12.25.0b1.dist-info → azure_storage_blob-12.26.0.dist-info}/WHEEL +1 -1
  62. azure_storage_blob-12.25.0b1.dist-info/RECORD +0 -84
  63. {azure_storage_blob-12.25.0b1.dist-info → azure_storage_blob-12.26.0.dist-info}/LICENSE +0 -0
  64. {azure_storage_blob-12.25.0b1.dist-info → azure_storage_blob-12.26.0.dist-info}/top_level.txt +0 -0
@@ -5,7 +5,7 @@
5
5
  # --------------------------------------------------------------------------
6
6
  # pylint: disable=invalid-overridden-method
7
7
 
8
- import asyncio
8
+ import asyncio # pylint: disable=do-not-import-asyncio
9
9
  import logging
10
10
  import random
11
11
  from typing import Any, Dict, TYPE_CHECKING
@@ -21,7 +21,7 @@ if TYPE_CHECKING:
21
21
  from azure.core.credentials_async import AsyncTokenCredential
22
22
  from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import
23
23
  PipelineRequest,
24
- PipelineResponse
24
+ PipelineResponse,
25
25
  )
26
26
 
27
27
 
@@ -29,29 +29,25 @@ _LOGGER = logging.getLogger(__name__)
29
29
 
30
30
 
31
31
  async def retry_hook(settings, **kwargs):
32
- if settings['hook']:
33
- if asyncio.iscoroutine(settings['hook']):
34
- await settings['hook'](
35
- retry_count=settings['count'] - 1,
36
- location_mode=settings['mode'],
37
- **kwargs)
32
+ if settings["hook"]:
33
+ if asyncio.iscoroutine(settings["hook"]):
34
+ await settings["hook"](retry_count=settings["count"] - 1, location_mode=settings["mode"], **kwargs)
38
35
  else:
39
- settings['hook'](
40
- retry_count=settings['count'] - 1,
41
- location_mode=settings['mode'],
42
- **kwargs)
36
+ settings["hook"](retry_count=settings["count"] - 1, location_mode=settings["mode"], **kwargs)
43
37
 
44
38
 
45
39
  async def is_checksum_retry(response):
46
40
  # retry if invalid content md5
47
- if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'):
48
- try:
49
- await response.http_response.load_body() # Load the body in memory and close the socket
50
- except (StreamClosedError, StreamConsumedError):
51
- pass
52
- computed_md5 = response.http_request.headers.get('content-md5', None) or \
53
- encode_base64(StorageContentValidation.get_content_md5(response.http_response.body()))
54
- if response.http_response.headers['content-md5'] != computed_md5:
41
+ if response.context.get("validate_content", False) and response.http_response.headers.get("content-md5"):
42
+ if hasattr(response.http_response, "load_body"):
43
+ try:
44
+ await response.http_response.load_body() # Load the body in memory and close the socket
45
+ except (StreamClosedError, StreamConsumedError):
46
+ pass
47
+ computed_md5 = response.http_request.headers.get("content-md5", None) or encode_base64(
48
+ StorageContentValidation.get_content_md5(response.http_response.body())
49
+ )
50
+ if response.http_response.headers["content-md5"] != computed_md5:
55
51
  return True
56
52
  return False
57
53
 
@@ -59,54 +55,56 @@ async def is_checksum_retry(response):
59
55
  class AsyncStorageResponseHook(AsyncHTTPPolicy):
60
56
 
61
57
  def __init__(self, **kwargs):
62
- self._response_callback = kwargs.get('raw_response_hook')
58
+ self._response_callback = kwargs.get("raw_response_hook")
63
59
  super(AsyncStorageResponseHook, self).__init__()
64
60
 
65
61
  async def send(self, request: "PipelineRequest") -> "PipelineResponse":
66
62
  # Values could be 0
67
- data_stream_total = request.context.get('data_stream_total')
63
+ data_stream_total = request.context.get("data_stream_total")
68
64
  if data_stream_total is None:
69
- data_stream_total = request.context.options.pop('data_stream_total', None)
70
- download_stream_current = request.context.get('download_stream_current')
65
+ data_stream_total = request.context.options.pop("data_stream_total", None)
66
+ download_stream_current = request.context.get("download_stream_current")
71
67
  if download_stream_current is None:
72
- download_stream_current = request.context.options.pop('download_stream_current', None)
73
- upload_stream_current = request.context.get('upload_stream_current')
68
+ download_stream_current = request.context.options.pop("download_stream_current", None)
69
+ upload_stream_current = request.context.get("upload_stream_current")
74
70
  if upload_stream_current is None:
75
- upload_stream_current = request.context.options.pop('upload_stream_current', None)
71
+ upload_stream_current = request.context.options.pop("upload_stream_current", None)
76
72
 
77
- response_callback = request.context.get('response_callback') or \
78
- request.context.options.pop('raw_response_hook', self._response_callback)
73
+ response_callback = request.context.get("response_callback") or request.context.options.pop(
74
+ "raw_response_hook", self._response_callback
75
+ )
79
76
 
80
77
  response = await self.next.send(request)
81
- will_retry = is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response)
78
+ will_retry = is_retry(response, request.context.options.get("mode")) or await is_checksum_retry(response)
82
79
 
83
80
  # Auth error could come from Bearer challenge, in which case this request will be made again
84
81
  is_auth_error = response.http_response.status_code == 401
85
82
  should_update_counts = not (will_retry or is_auth_error)
86
83
 
87
84
  if should_update_counts and download_stream_current is not None:
88
- download_stream_current += int(response.http_response.headers.get('Content-Length', 0))
85
+ download_stream_current += int(response.http_response.headers.get("Content-Length", 0))
89
86
  if data_stream_total is None:
90
- content_range = response.http_response.headers.get('Content-Range')
87
+ content_range = response.http_response.headers.get("Content-Range")
91
88
  if content_range:
92
- data_stream_total = int(content_range.split(' ', 1)[1].split('/', 1)[1])
89
+ data_stream_total = int(content_range.split(" ", 1)[1].split("/", 1)[1])
93
90
  else:
94
91
  data_stream_total = download_stream_current
95
92
  elif should_update_counts and upload_stream_current is not None:
96
- upload_stream_current += int(response.http_request.headers.get('Content-Length', 0))
93
+ upload_stream_current += int(response.http_request.headers.get("Content-Length", 0))
97
94
  for pipeline_obj in [request, response]:
98
- if hasattr(pipeline_obj, 'context'):
99
- pipeline_obj.context['data_stream_total'] = data_stream_total
100
- pipeline_obj.context['download_stream_current'] = download_stream_current
101
- pipeline_obj.context['upload_stream_current'] = upload_stream_current
95
+ if hasattr(pipeline_obj, "context"):
96
+ pipeline_obj.context["data_stream_total"] = data_stream_total
97
+ pipeline_obj.context["download_stream_current"] = download_stream_current
98
+ pipeline_obj.context["upload_stream_current"] = upload_stream_current
102
99
  if response_callback:
103
100
  if asyncio.iscoroutine(response_callback):
104
- await response_callback(response) # type: ignore
101
+ await response_callback(response) # type: ignore
105
102
  else:
106
103
  response_callback(response)
107
- request.context['response_callback'] = response_callback
104
+ request.context["response_callback"] = response_callback
108
105
  return response
109
106
 
107
+
110
108
  class AsyncStorageRetryPolicy(StorageRetryPolicy):
111
109
  """
112
110
  The base class for Exponential and Linear retries containing shared code.
@@ -125,37 +123,29 @@ class AsyncStorageRetryPolicy(StorageRetryPolicy):
125
123
  while retries_remaining:
126
124
  try:
127
125
  response = await self.next.send(request)
128
- if is_retry(response, retry_settings['mode']) or await is_checksum_retry(response):
126
+ if is_retry(response, retry_settings["mode"]) or await is_checksum_retry(response):
129
127
  retries_remaining = self.increment(
130
- retry_settings,
131
- request=request.http_request,
132
- response=response.http_response)
128
+ retry_settings, request=request.http_request, response=response.http_response
129
+ )
133
130
  if retries_remaining:
134
131
  await retry_hook(
135
- retry_settings,
136
- request=request.http_request,
137
- response=response.http_response,
138
- error=None)
132
+ retry_settings, request=request.http_request, response=response.http_response, error=None
133
+ )
139
134
  await self.sleep(retry_settings, request.context.transport)
140
135
  continue
141
136
  break
142
137
  except AzureError as err:
143
138
  if isinstance(err, AzureSigningError):
144
139
  raise
145
- retries_remaining = self.increment(
146
- retry_settings, request=request.http_request, error=err)
140
+ retries_remaining = self.increment(retry_settings, request=request.http_request, error=err)
147
141
  if retries_remaining:
148
- await retry_hook(
149
- retry_settings,
150
- request=request.http_request,
151
- response=None,
152
- error=err)
142
+ await retry_hook(retry_settings, request=request.http_request, response=None, error=err)
153
143
  await self.sleep(retry_settings, request.context.transport)
154
144
  continue
155
145
  raise err
156
- if retry_settings['history']:
157
- response.context['history'] = retry_settings['history']
158
- response.http_response.location_mode = retry_settings['mode']
146
+ if retry_settings["history"]:
147
+ response.context["history"] = retry_settings["history"]
148
+ response.http_response.location_mode = retry_settings["mode"]
159
149
  return response
160
150
 
161
151
 
@@ -176,7 +166,8 @@ class ExponentialRetry(AsyncStorageRetryPolicy):
176
166
  increment_base: int = 3,
177
167
  retry_total: int = 3,
178
168
  retry_to_secondary: bool = False,
179
- random_jitter_range: int = 3, **kwargs
169
+ random_jitter_range: int = 3,
170
+ **kwargs
180
171
  ) -> None:
181
172
  """
182
173
  Constructs an Exponential retry object. The initial_backoff is used for
@@ -203,21 +194,20 @@ class ExponentialRetry(AsyncStorageRetryPolicy):
203
194
  self.initial_backoff = initial_backoff
204
195
  self.increment_base = increment_base
205
196
  self.random_jitter_range = random_jitter_range
206
- super(ExponentialRetry, self).__init__(
207
- retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
197
+ super(ExponentialRetry, self).__init__(retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
208
198
 
209
199
  def get_backoff_time(self, settings: Dict[str, Any]) -> float:
210
200
  """
211
201
  Calculates how long to sleep before retrying.
212
202
 
213
- :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
203
+ :param Dict[str, Any] settings: The configurable values pertaining to the backoff time.
214
204
  :return:
215
205
  An integer indicating how long to wait before retrying the request,
216
206
  or None to indicate no retry should be performed.
217
207
  :rtype: int or None
218
208
  """
219
209
  random_generator = random.Random()
220
- backoff = self.initial_backoff + (0 if settings['count'] == 0 else pow(self.increment_base, settings['count']))
210
+ backoff = self.initial_backoff + (0 if settings["count"] == 0 else pow(self.increment_base, settings["count"]))
221
211
  random_range_start = backoff - self.random_jitter_range if backoff > self.random_jitter_range else 0
222
212
  random_range_end = backoff + self.random_jitter_range
223
213
  return random_generator.uniform(random_range_start, random_range_end)
@@ -232,7 +222,8 @@ class LinearRetry(AsyncStorageRetryPolicy):
232
222
  """A number in seconds which indicates a range to jitter/randomize for the back-off interval."""
233
223
 
234
224
  def __init__(
235
- self, backoff: int = 15,
225
+ self,
226
+ backoff: int = 15,
236
227
  retry_total: int = 3,
237
228
  retry_to_secondary: bool = False,
238
229
  random_jitter_range: int = 3,
@@ -255,14 +246,13 @@ class LinearRetry(AsyncStorageRetryPolicy):
255
246
  """
256
247
  self.backoff = backoff
257
248
  self.random_jitter_range = random_jitter_range
258
- super(LinearRetry, self).__init__(
259
- retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
249
+ super(LinearRetry, self).__init__(retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
260
250
 
261
251
  def get_backoff_time(self, settings: Dict[str, Any]) -> float:
262
252
  """
263
253
  Calculates how long to sleep before retrying.
264
254
 
265
- :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
255
+ :param Dict[str, Any] settings: The configurable values pertaining to the backoff time.
266
256
  :return:
267
257
  An integer indicating how long to wait before retrying the request,
268
258
  or None to indicate no retry should be performed.
@@ -271,14 +261,13 @@ class LinearRetry(AsyncStorageRetryPolicy):
271
261
  random_generator = random.Random()
272
262
  # the backoff interval normally does not change, however there is the possibility
273
263
  # that it was modified by accessing the property directly after initializing the object
274
- random_range_start = self.backoff - self.random_jitter_range \
275
- if self.backoff > self.random_jitter_range else 0
264
+ random_range_start = self.backoff - self.random_jitter_range if self.backoff > self.random_jitter_range else 0
276
265
  random_range_end = self.backoff + self.random_jitter_range
277
266
  return random_generator.uniform(random_range_start, random_range_end)
278
267
 
279
268
 
280
269
  class AsyncStorageBearerTokenCredentialPolicy(AsyncBearerTokenCredentialPolicy):
281
- """ Custom Bearer token credential policy for following Storage Bearer challenges """
270
+ """Custom Bearer token credential policy for following Storage Bearer challenges"""
282
271
 
283
272
  def __init__(self, credential: "AsyncTokenCredential", audience: str, **kwargs: Any) -> None:
284
273
  super(AsyncStorageBearerTokenCredentialPolicy, self).__init__(credential, audience, **kwargs)
@@ -6,7 +6,7 @@
6
6
 
7
7
  import logging
8
8
  import stat
9
- from io import (SEEK_END, SEEK_SET, UnsupportedOperation)
9
+ from io import SEEK_END, SEEK_SET, UnsupportedOperation
10
10
  from os import fstat
11
11
  from typing import Dict, Optional
12
12
 
@@ -37,12 +37,13 @@ def serialize_iso(attr):
37
37
  raise OverflowError("Hit max or min date")
38
38
 
39
39
  date = f"{utc.tm_year:04}-{utc.tm_mon:02}-{utc.tm_mday:02}T{utc.tm_hour:02}:{utc.tm_min:02}:{utc.tm_sec:02}"
40
- return date + 'Z'
40
+ return date + "Z"
41
41
  except (ValueError, OverflowError) as err:
42
42
  raise ValueError("Unable to serialize datetime object.") from err
43
43
  except AttributeError as err:
44
44
  raise TypeError("ISO-8601 object must be valid datetime object.") from err
45
45
 
46
+
46
47
  def get_length(data):
47
48
  length = None
48
49
  # Check if object implements the __len__ method, covers most input cases such as bytearray.
@@ -62,7 +63,7 @@ def get_length(data):
62
63
  try:
63
64
  mode = fstat(fileno).st_mode
64
65
  if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
65
- #st_size only meaningful if regular file or symlink, other types
66
+ # st_size only meaningful if regular file or symlink, other types
66
67
  # e.g. sockets may return misleading sizes like 0
67
68
  return fstat(fileno).st_size
68
69
  except OSError:
@@ -84,13 +85,13 @@ def get_length(data):
84
85
 
85
86
  def read_length(data):
86
87
  try:
87
- if hasattr(data, 'read'):
88
- read_data = b''
88
+ if hasattr(data, "read"):
89
+ read_data = b""
89
90
  for chunk in iter(lambda: data.read(4096), b""):
90
91
  read_data += chunk
91
92
  return len(read_data), read_data
92
- if hasattr(data, '__iter__'):
93
- read_data = b''
93
+ if hasattr(data, "__iter__"):
94
+ read_data = b""
94
95
  for chunk in data:
95
96
  read_data += chunk
96
97
  return len(read_data), read_data
@@ -100,8 +101,13 @@ def read_length(data):
100
101
 
101
102
 
102
103
  def validate_and_format_range_headers(
103
- start_range, end_range, start_range_required=True,
104
- end_range_required=True, check_content_md5=False, align_to_page=False):
104
+ start_range,
105
+ end_range,
106
+ start_range_required=True,
107
+ end_range_required=True,
108
+ check_content_md5=False,
109
+ align_to_page=False,
110
+ ):
105
111
  # If end range is provided, start range must be provided
106
112
  if (start_range_required or end_range is not None) and start_range is None:
107
113
  raise ValueError("start_range value cannot be None.")
@@ -111,16 +117,18 @@ def validate_and_format_range_headers(
111
117
  # Page ranges must be 512 aligned
112
118
  if align_to_page:
113
119
  if start_range is not None and start_range % 512 != 0:
114
- raise ValueError(f"Invalid page blob start_range: {start_range}. "
115
- "The size must be aligned to a 512-byte boundary.")
120
+ raise ValueError(
121
+ f"Invalid page blob start_range: {start_range}. " "The size must be aligned to a 512-byte boundary."
122
+ )
116
123
  if end_range is not None and end_range % 512 != 511:
117
- raise ValueError(f"Invalid page blob end_range: {end_range}. "
118
- "The size must be aligned to a 512-byte boundary.")
124
+ raise ValueError(
125
+ f"Invalid page blob end_range: {end_range}. " "The size must be aligned to a 512-byte boundary."
126
+ )
119
127
 
120
128
  # Format based on whether end_range is present
121
129
  range_header = None
122
130
  if end_range is not None:
123
- range_header = f'bytes={start_range}-{end_range}'
131
+ range_header = f"bytes={start_range}-{end_range}"
124
132
  elif start_range is not None:
125
133
  range_header = f"bytes={start_range}-"
126
134
 
@@ -131,17 +139,16 @@ def validate_and_format_range_headers(
131
139
  raise ValueError("Both start and end range required for MD5 content validation.")
132
140
  if end_range - start_range > 4 * 1024 * 1024:
133
141
  raise ValueError("Getting content MD5 for a range greater than 4MB is not supported.")
134
- range_validation = 'true'
142
+ range_validation = "true"
135
143
 
136
144
  return range_header, range_validation
137
145
 
138
146
 
139
- def add_metadata_headers(metadata=None):
140
- # type: (Optional[Dict[str, str]]) -> Dict[str, str]
147
+ def add_metadata_headers(metadata: Optional[Dict[str, str]] = None) -> Dict[str, str]:
141
148
  headers = {}
142
149
  if metadata:
143
150
  for key, value in metadata.items():
144
- headers[f'x-ms-meta-{key.strip()}'] = value.strip() if value else value
151
+ headers[f"x-ms-meta-{key.strip()}"] = value.strip() if value else value
145
152
  return headers
146
153
 
147
154
 
@@ -159,29 +166,26 @@ def serialize_batch_body(requests, batch_id):
159
166
  a list of sub-request for the batch request
160
167
  :param str batch_id:
161
168
  to be embedded in batch sub-request delimiter
162
- :returns: The body bytes for this batch.
169
+ :return: The body bytes for this batch.
163
170
  :rtype: bytes
164
171
  """
165
172
 
166
173
  if requests is None or len(requests) == 0:
167
- raise ValueError('Please provide sub-request(s) for this batch request')
174
+ raise ValueError("Please provide sub-request(s) for this batch request")
168
175
 
169
- delimiter_bytes = (_get_batch_request_delimiter(batch_id, True, False) + _HTTP_LINE_ENDING).encode('utf-8')
170
- newline_bytes = _HTTP_LINE_ENDING.encode('utf-8')
176
+ delimiter_bytes = (_get_batch_request_delimiter(batch_id, True, False) + _HTTP_LINE_ENDING).encode("utf-8")
177
+ newline_bytes = _HTTP_LINE_ENDING.encode("utf-8")
171
178
  batch_body = []
172
179
 
173
180
  content_index = 0
174
181
  for request in requests:
175
- request.headers.update({
176
- "Content-ID": str(content_index),
177
- "Content-Length": str(0)
178
- })
182
+ request.headers.update({"Content-ID": str(content_index), "Content-Length": str(0)})
179
183
  batch_body.append(delimiter_bytes)
180
184
  batch_body.append(_make_body_from_sub_request(request))
181
185
  batch_body.append(newline_bytes)
182
186
  content_index += 1
183
187
 
184
- batch_body.append(_get_batch_request_delimiter(batch_id, True, True).encode('utf-8'))
188
+ batch_body.append(_get_batch_request_delimiter(batch_id, True, True).encode("utf-8"))
185
189
  # final line of body MUST have \r\n at the end, or it will not be properly read by the service
186
190
  batch_body.append(newline_bytes)
187
191
 
@@ -198,35 +202,35 @@ def _get_batch_request_delimiter(batch_id, is_prepend_dashes=False, is_append_da
198
202
  Whether to include the starting dashes. Used in the body, but non on defining the delimiter.
199
203
  :param bool is_append_dashes:
200
204
  Whether to include the ending dashes. Used in the body on the closing delimiter only.
201
- :returns: The delimiter, WITHOUT a trailing newline.
205
+ :return: The delimiter, WITHOUT a trailing newline.
202
206
  :rtype: str
203
207
  """
204
208
 
205
- prepend_dashes = '--' if is_prepend_dashes else ''
206
- append_dashes = '--' if is_append_dashes else ''
209
+ prepend_dashes = "--" if is_prepend_dashes else ""
210
+ append_dashes = "--" if is_append_dashes else ""
207
211
 
208
212
  return prepend_dashes + _REQUEST_DELIMITER_PREFIX + batch_id + append_dashes
209
213
 
210
214
 
211
215
  def _make_body_from_sub_request(sub_request):
212
216
  """
213
- Content-Type: application/http
214
- Content-ID: <sequential int ID>
215
- Content-Transfer-Encoding: <value> (if present)
217
+ Content-Type: application/http
218
+ Content-ID: <sequential int ID>
219
+ Content-Transfer-Encoding: <value> (if present)
216
220
 
217
- <verb> <path><query> HTTP/<version>
218
- <header key>: <header value> (repeated as necessary)
219
- Content-Length: <value>
220
- (newline if content length > 0)
221
- <body> (if content length > 0)
221
+ <verb> <path><query> HTTP/<version>
222
+ <header key>: <header value> (repeated as necessary)
223
+ Content-Length: <value>
224
+ (newline if content length > 0)
225
+ <body> (if content length > 0)
222
226
 
223
- Serializes an http request.
227
+ Serializes an http request.
224
228
 
225
- :param ~azure.core.pipeline.transport.HttpRequest sub_request:
226
- Request to serialize.
227
- :returns: The serialized sub-request in bytes
228
- :rtype: bytes
229
- """
229
+ :param ~azure.core.pipeline.transport.HttpRequest sub_request:
230
+ Request to serialize.
231
+ :return: The serialized sub-request in bytes
232
+ :rtype: bytes
233
+ """
230
234
 
231
235
  # put the sub-request's headers into a list for efficient str concatenation
232
236
  sub_request_body = []
@@ -250,9 +254,9 @@ def _make_body_from_sub_request(sub_request):
250
254
 
251
255
  # append HTTP verb and path and query and HTTP version
252
256
  sub_request_body.append(sub_request.method)
253
- sub_request_body.append(' ')
257
+ sub_request_body.append(" ")
254
258
  sub_request_body.append(sub_request.url)
255
- sub_request_body.append(' ')
259
+ sub_request_body.append(" ")
256
260
  sub_request_body.append(_HTTP1_1_IDENTIFIER)
257
261
  sub_request_body.append(_HTTP_LINE_ENDING)
258
262
 
@@ -267,4 +271,4 @@ def _make_body_from_sub_request(sub_request):
267
271
  # append blank line
268
272
  sub_request_body.append(_HTTP_LINE_ENDING)
269
273
 
270
- return ''.join(sub_request_body).encode()
274
+ return "".join(sub_request_body).encode()
@@ -46,23 +46,23 @@ def parse_length_from_content_range(content_range):
46
46
  # First, split in space and take the second half: '1-3/65537'
47
47
  # Next, split on slash and take the second half: '65537'
48
48
  # Finally, convert to an int: 65537
49
- return int(content_range.split(' ', 1)[1].split('/', 1)[1])
49
+ return int(content_range.split(" ", 1)[1].split("/", 1)[1])
50
50
 
51
51
 
52
52
  def normalize_headers(headers):
53
53
  normalized = {}
54
54
  for key, value in headers.items():
55
- if key.startswith('x-ms-'):
55
+ if key.startswith("x-ms-"):
56
56
  key = key[5:]
57
- normalized[key.lower().replace('-', '_')] = get_enum_value(value)
57
+ normalized[key.lower().replace("-", "_")] = get_enum_value(value)
58
58
  return normalized
59
59
 
60
60
 
61
61
  def deserialize_metadata(response, obj, headers): # pylint: disable=unused-argument
62
62
  try:
63
- raw_metadata = {k: v for k, v in response.http_response.headers.items() if k.lower().startswith('x-ms-meta-')}
63
+ raw_metadata = {k: v for k, v in response.http_response.headers.items() if k.lower().startswith("x-ms-meta-")}
64
64
  except AttributeError:
65
- raw_metadata = {k: v for k, v in response.headers.items() if k.lower().startswith('x-ms-meta-')}
65
+ raw_metadata = {k: v for k, v in response.headers.items() if k.lower().startswith("x-ms-meta-")}
66
66
  return {k[10:]: v for k, v in raw_metadata.items()}
67
67
 
68
68
 
@@ -82,19 +82,23 @@ def return_raw_deserialized(response, *_):
82
82
  return response.http_response.location_mode, response.context[ContentDecodePolicy.CONTEXT_NAME]
83
83
 
84
84
 
85
- def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # pylint:disable=too-many-statements, too-many-branches
85
+ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # pylint:disable=too-many-statements, too-many-branches
86
86
  raise_error = HttpResponseError
87
87
  serialized = False
88
88
  if isinstance(storage_error, AzureSigningError):
89
- storage_error.message = storage_error.message + \
90
- '. This is likely due to an invalid shared key. Please check your shared key and try again.'
89
+ storage_error.message = (
90
+ storage_error.message
91
+ + ". This is likely due to an invalid shared key. Please check your shared key and try again."
92
+ )
91
93
  if not storage_error.response or storage_error.response.status_code in [200, 204]:
92
94
  raise storage_error
93
95
  # If it is one of those three then it has been serialized prior by the generated layer.
94
- if isinstance(storage_error, (PartialBatchErrorException,
95
- ClientAuthenticationError, ResourceNotFoundError, ResourceExistsError)):
96
+ if isinstance(
97
+ storage_error,
98
+ (PartialBatchErrorException, ClientAuthenticationError, ResourceNotFoundError, ResourceExistsError),
99
+ ):
96
100
  serialized = True
97
- error_code = storage_error.response.headers.get('x-ms-error-code')
101
+ error_code = storage_error.response.headers.get("x-ms-error-code")
98
102
  error_message = storage_error.message
99
103
  additional_data = {}
100
104
  error_dict = {}
@@ -104,27 +108,25 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # py
104
108
  if error_body is None or len(error_body) == 0:
105
109
  error_body = storage_error.response.reason
106
110
  except AttributeError:
107
- error_body = ''
111
+ error_body = ""
108
112
  # If it is an XML response
109
113
  if isinstance(error_body, Element):
110
- error_dict = {
111
- child.tag.lower(): child.text
112
- for child in error_body
113
- }
114
+ error_dict = {child.tag.lower(): child.text for child in error_body}
114
115
  # If it is a JSON response
115
116
  elif isinstance(error_body, dict):
116
- error_dict = error_body.get('error', {})
117
+ error_dict = error_body.get("error", {})
117
118
  elif not error_code:
118
119
  _LOGGER.warning(
119
- 'Unexpected return type %s from ContentDecodePolicy.deserialize_from_http_generics.', type(error_body))
120
- error_dict = {'message': str(error_body)}
120
+ "Unexpected return type %s from ContentDecodePolicy.deserialize_from_http_generics.", type(error_body)
121
+ )
122
+ error_dict = {"message": str(error_body)}
121
123
 
122
124
  # If we extracted from a Json or XML response
123
125
  # There is a chance error_dict is just a string
124
126
  if error_dict and isinstance(error_dict, dict):
125
- error_code = error_dict.get('code')
126
- error_message = error_dict.get('message')
127
- additional_data = {k: v for k, v in error_dict.items() if k not in {'code', 'message'}}
127
+ error_code = error_dict.get("code")
128
+ error_message = error_dict.get("message")
129
+ additional_data = {k: v for k, v in error_dict.items() if k not in {"code", "message"}}
128
130
  except DecodeError:
129
131
  pass
130
132
 
@@ -132,31 +134,33 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # py
132
134
  # This check would be unnecessary if we have already serialized the error
133
135
  if error_code and not serialized:
134
136
  error_code = StorageErrorCode(error_code)
135
- if error_code in [StorageErrorCode.condition_not_met,
136
- StorageErrorCode.blob_overwritten]:
137
+ if error_code in [StorageErrorCode.condition_not_met, StorageErrorCode.blob_overwritten]:
137
138
  raise_error = ResourceModifiedError
138
- if error_code in [StorageErrorCode.invalid_authentication_info,
139
- StorageErrorCode.authentication_failed]:
139
+ if error_code in [StorageErrorCode.invalid_authentication_info, StorageErrorCode.authentication_failed]:
140
140
  raise_error = ClientAuthenticationError
141
- if error_code in [StorageErrorCode.resource_not_found,
142
- StorageErrorCode.cannot_verify_copy_source,
143
- StorageErrorCode.blob_not_found,
144
- StorageErrorCode.queue_not_found,
145
- StorageErrorCode.container_not_found,
146
- StorageErrorCode.parent_not_found,
147
- StorageErrorCode.share_not_found]:
141
+ if error_code in [
142
+ StorageErrorCode.resource_not_found,
143
+ StorageErrorCode.cannot_verify_copy_source,
144
+ StorageErrorCode.blob_not_found,
145
+ StorageErrorCode.queue_not_found,
146
+ StorageErrorCode.container_not_found,
147
+ StorageErrorCode.parent_not_found,
148
+ StorageErrorCode.share_not_found,
149
+ ]:
148
150
  raise_error = ResourceNotFoundError
149
- if error_code in [StorageErrorCode.account_already_exists,
150
- StorageErrorCode.account_being_created,
151
- StorageErrorCode.resource_already_exists,
152
- StorageErrorCode.resource_type_mismatch,
153
- StorageErrorCode.blob_already_exists,
154
- StorageErrorCode.queue_already_exists,
155
- StorageErrorCode.container_already_exists,
156
- StorageErrorCode.container_being_deleted,
157
- StorageErrorCode.queue_being_deleted,
158
- StorageErrorCode.share_already_exists,
159
- StorageErrorCode.share_being_deleted]:
151
+ if error_code in [
152
+ StorageErrorCode.account_already_exists,
153
+ StorageErrorCode.account_being_created,
154
+ StorageErrorCode.resource_already_exists,
155
+ StorageErrorCode.resource_type_mismatch,
156
+ StorageErrorCode.blob_already_exists,
157
+ StorageErrorCode.queue_already_exists,
158
+ StorageErrorCode.container_already_exists,
159
+ StorageErrorCode.container_being_deleted,
160
+ StorageErrorCode.queue_being_deleted,
161
+ StorageErrorCode.share_already_exists,
162
+ StorageErrorCode.share_being_deleted,
163
+ ]:
160
164
  raise_error = ResourceExistsError
161
165
  except ValueError:
162
166
  # Got an unknown error code
@@ -183,7 +187,7 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # py
183
187
  error.args = (error.message,)
184
188
  try:
185
189
  # `from None` prevents us from double printing the exception (suppresses generated layer error context)
186
- exec("raise error from None") # pylint: disable=exec-used # nosec
190
+ exec("raise error from None") # pylint: disable=exec-used # nosec
187
191
  except SyntaxError as exc:
188
192
  raise error from exc
189
193