azure-storage-blob 12.25.1__py3-none-any.whl → 12.27.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. azure/storage/blob/__init__.py +3 -2
  2. azure/storage/blob/_blob_client.py +94 -41
  3. azure/storage/blob/_blob_client_helpers.py +19 -4
  4. azure/storage/blob/_blob_service_client.py +16 -13
  5. azure/storage/blob/_container_client.py +25 -22
  6. azure/storage/blob/_deserialize.py +1 -1
  7. azure/storage/blob/_download.py +7 -7
  8. azure/storage/blob/_encryption.py +177 -184
  9. azure/storage/blob/_generated/_azure_blob_storage.py +3 -2
  10. azure/storage/blob/_generated/_configuration.py +2 -2
  11. azure/storage/blob/_generated/_utils/__init__.py +6 -0
  12. azure/storage/blob/_generated/{_serialization.py → _utils/serialization.py} +7 -25
  13. azure/storage/blob/_generated/aio/_azure_blob_storage.py +3 -2
  14. azure/storage/blob/_generated/aio/_configuration.py +2 -2
  15. azure/storage/blob/_generated/aio/operations/_append_blob_operations.py +11 -14
  16. azure/storage/blob/_generated/aio/operations/_blob_operations.py +40 -64
  17. azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +18 -20
  18. azure/storage/blob/_generated/aio/operations/_container_operations.py +21 -43
  19. azure/storage/blob/_generated/aio/operations/_page_blob_operations.py +18 -27
  20. azure/storage/blob/_generated/aio/operations/_service_operations.py +11 -22
  21. azure/storage/blob/_generated/models/__init__.py +2 -0
  22. azure/storage/blob/_generated/models/_azure_blob_storage_enums.py +6 -0
  23. azure/storage/blob/_generated/models/_models_py3.py +30 -9
  24. azure/storage/blob/_generated/operations/_append_blob_operations.py +19 -20
  25. azure/storage/blob/_generated/operations/_blob_operations.py +68 -89
  26. azure/storage/blob/_generated/operations/_block_blob_operations.py +31 -27
  27. azure/storage/blob/_generated/operations/_container_operations.py +40 -62
  28. azure/storage/blob/_generated/operations/_page_blob_operations.py +31 -37
  29. azure/storage/blob/_generated/operations/_service_operations.py +20 -32
  30. azure/storage/blob/_lease.py +1 -0
  31. azure/storage/blob/_list_blobs_helper.py +1 -1
  32. azure/storage/blob/_quick_query_helper.py +20 -24
  33. azure/storage/blob/_serialize.py +2 -0
  34. azure/storage/blob/_shared/__init__.py +7 -7
  35. azure/storage/blob/_shared/authentication.py +49 -32
  36. azure/storage/blob/_shared/avro/avro_io.py +44 -42
  37. azure/storage/blob/_shared/avro/avro_io_async.py +42 -41
  38. azure/storage/blob/_shared/avro/datafile.py +24 -21
  39. azure/storage/blob/_shared/avro/datafile_async.py +15 -15
  40. azure/storage/blob/_shared/avro/schema.py +196 -217
  41. azure/storage/blob/_shared/base_client.py +82 -59
  42. azure/storage/blob/_shared/base_client_async.py +58 -51
  43. azure/storage/blob/_shared/constants.py +1 -1
  44. azure/storage/blob/_shared/models.py +94 -92
  45. azure/storage/blob/_shared/parser.py +3 -3
  46. azure/storage/blob/_shared/policies.py +186 -147
  47. azure/storage/blob/_shared/policies_async.py +53 -65
  48. azure/storage/blob/_shared/request_handlers.py +50 -45
  49. azure/storage/blob/_shared/response_handlers.py +54 -45
  50. azure/storage/blob/_shared/shared_access_signature.py +67 -71
  51. azure/storage/blob/_shared/uploads.py +56 -49
  52. azure/storage/blob/_shared/uploads_async.py +70 -58
  53. azure/storage/blob/_shared_access_signature.py +3 -1
  54. azure/storage/blob/_version.py +1 -1
  55. azure/storage/blob/aio/__init__.py +3 -2
  56. azure/storage/blob/aio/_blob_client_async.py +241 -44
  57. azure/storage/blob/aio/_blob_service_client_async.py +13 -11
  58. azure/storage/blob/aio/_container_client_async.py +28 -25
  59. azure/storage/blob/aio/_download_async.py +7 -7
  60. azure/storage/blob/aio/_lease_async.py +1 -0
  61. azure/storage/blob/aio/_quick_query_helper_async.py +194 -0
  62. {azure_storage_blob-12.25.1.dist-info → azure_storage_blob-12.27.0b1.dist-info}/METADATA +4 -5
  63. azure_storage_blob-12.27.0b1.dist-info/RECORD +86 -0
  64. azure_storage_blob-12.25.1.dist-info/RECORD +0 -84
  65. {azure_storage_blob-12.25.1.dist-info → azure_storage_blob-12.27.0b1.dist-info}/LICENSE +0 -0
  66. {azure_storage_blob-12.25.1.dist-info → azure_storage_blob-12.27.0b1.dist-info}/WHEEL +0 -0
  67. {azure_storage_blob-12.25.1.dist-info → azure_storage_blob-12.27.0b1.dist-info}/top_level.txt +0 -0
@@ -21,7 +21,7 @@ if TYPE_CHECKING:
21
21
  from azure.core.credentials_async import AsyncTokenCredential
22
22
  from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import
23
23
  PipelineRequest,
24
- PipelineResponse
24
+ PipelineResponse,
25
25
  )
26
26
 
27
27
 
@@ -29,29 +29,24 @@ _LOGGER = logging.getLogger(__name__)
29
29
 
30
30
 
31
31
  async def retry_hook(settings, **kwargs):
32
- if settings['hook']:
33
- if asyncio.iscoroutine(settings['hook']):
34
- await settings['hook'](
35
- retry_count=settings['count'] - 1,
36
- location_mode=settings['mode'],
37
- **kwargs)
32
+ if settings["hook"]:
33
+ if asyncio.iscoroutine(settings["hook"]):
34
+ await settings["hook"](retry_count=settings["count"] - 1, location_mode=settings["mode"], **kwargs)
38
35
  else:
39
- settings['hook'](
40
- retry_count=settings['count'] - 1,
41
- location_mode=settings['mode'],
42
- **kwargs)
36
+ settings["hook"](retry_count=settings["count"] - 1, location_mode=settings["mode"], **kwargs)
43
37
 
44
38
 
45
39
  async def is_checksum_retry(response):
46
40
  # retry if invalid content md5
47
- if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'):
41
+ if response.context.get("validate_content", False) and response.http_response.headers.get("content-md5"):
48
42
  try:
49
43
  await response.http_response.load_body() # Load the body in memory and close the socket
50
44
  except (StreamClosedError, StreamConsumedError):
51
45
  pass
52
- computed_md5 = response.http_request.headers.get('content-md5', None) or \
53
- encode_base64(StorageContentValidation.get_content_md5(response.http_response.body()))
54
- if response.http_response.headers['content-md5'] != computed_md5:
46
+ computed_md5 = response.http_request.headers.get("content-md5", None) or encode_base64(
47
+ StorageContentValidation.get_content_md5(response.http_response.body())
48
+ )
49
+ if response.http_response.headers["content-md5"] != computed_md5:
55
50
  return True
56
51
  return False
57
52
 
@@ -59,54 +54,56 @@ async def is_checksum_retry(response):
59
54
  class AsyncStorageResponseHook(AsyncHTTPPolicy):
60
55
 
61
56
  def __init__(self, **kwargs):
62
- self._response_callback = kwargs.get('raw_response_hook')
57
+ self._response_callback = kwargs.get("raw_response_hook")
63
58
  super(AsyncStorageResponseHook, self).__init__()
64
59
 
65
60
  async def send(self, request: "PipelineRequest") -> "PipelineResponse":
66
61
  # Values could be 0
67
- data_stream_total = request.context.get('data_stream_total')
62
+ data_stream_total = request.context.get("data_stream_total")
68
63
  if data_stream_total is None:
69
- data_stream_total = request.context.options.pop('data_stream_total', None)
70
- download_stream_current = request.context.get('download_stream_current')
64
+ data_stream_total = request.context.options.pop("data_stream_total", None)
65
+ download_stream_current = request.context.get("download_stream_current")
71
66
  if download_stream_current is None:
72
- download_stream_current = request.context.options.pop('download_stream_current', None)
73
- upload_stream_current = request.context.get('upload_stream_current')
67
+ download_stream_current = request.context.options.pop("download_stream_current", None)
68
+ upload_stream_current = request.context.get("upload_stream_current")
74
69
  if upload_stream_current is None:
75
- upload_stream_current = request.context.options.pop('upload_stream_current', None)
70
+ upload_stream_current = request.context.options.pop("upload_stream_current", None)
76
71
 
77
- response_callback = request.context.get('response_callback') or \
78
- request.context.options.pop('raw_response_hook', self._response_callback)
72
+ response_callback = request.context.get("response_callback") or request.context.options.pop(
73
+ "raw_response_hook", self._response_callback
74
+ )
79
75
 
80
76
  response = await self.next.send(request)
81
- will_retry = is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response)
77
+ will_retry = is_retry(response, request.context.options.get("mode")) or await is_checksum_retry(response)
82
78
 
83
79
  # Auth error could come from Bearer challenge, in which case this request will be made again
84
80
  is_auth_error = response.http_response.status_code == 401
85
81
  should_update_counts = not (will_retry or is_auth_error)
86
82
 
87
83
  if should_update_counts and download_stream_current is not None:
88
- download_stream_current += int(response.http_response.headers.get('Content-Length', 0))
84
+ download_stream_current += int(response.http_response.headers.get("Content-Length", 0))
89
85
  if data_stream_total is None:
90
- content_range = response.http_response.headers.get('Content-Range')
86
+ content_range = response.http_response.headers.get("Content-Range")
91
87
  if content_range:
92
- data_stream_total = int(content_range.split(' ', 1)[1].split('/', 1)[1])
88
+ data_stream_total = int(content_range.split(" ", 1)[1].split("/", 1)[1])
93
89
  else:
94
90
  data_stream_total = download_stream_current
95
91
  elif should_update_counts and upload_stream_current is not None:
96
- upload_stream_current += int(response.http_request.headers.get('Content-Length', 0))
92
+ upload_stream_current += int(response.http_request.headers.get("Content-Length", 0))
97
93
  for pipeline_obj in [request, response]:
98
- if hasattr(pipeline_obj, 'context'):
99
- pipeline_obj.context['data_stream_total'] = data_stream_total
100
- pipeline_obj.context['download_stream_current'] = download_stream_current
101
- pipeline_obj.context['upload_stream_current'] = upload_stream_current
94
+ if hasattr(pipeline_obj, "context"):
95
+ pipeline_obj.context["data_stream_total"] = data_stream_total
96
+ pipeline_obj.context["download_stream_current"] = download_stream_current
97
+ pipeline_obj.context["upload_stream_current"] = upload_stream_current
102
98
  if response_callback:
103
99
  if asyncio.iscoroutine(response_callback):
104
- await response_callback(response) # type: ignore
100
+ await response_callback(response) # type: ignore
105
101
  else:
106
102
  response_callback(response)
107
- request.context['response_callback'] = response_callback
103
+ request.context["response_callback"] = response_callback
108
104
  return response
109
105
 
106
+
110
107
  class AsyncStorageRetryPolicy(StorageRetryPolicy):
111
108
  """
112
109
  The base class for Exponential and Linear retries containing shared code.
@@ -125,37 +122,29 @@ class AsyncStorageRetryPolicy(StorageRetryPolicy):
125
122
  while retries_remaining:
126
123
  try:
127
124
  response = await self.next.send(request)
128
- if is_retry(response, retry_settings['mode']) or await is_checksum_retry(response):
125
+ if is_retry(response, retry_settings["mode"]) or await is_checksum_retry(response):
129
126
  retries_remaining = self.increment(
130
- retry_settings,
131
- request=request.http_request,
132
- response=response.http_response)
127
+ retry_settings, request=request.http_request, response=response.http_response
128
+ )
133
129
  if retries_remaining:
134
130
  await retry_hook(
135
- retry_settings,
136
- request=request.http_request,
137
- response=response.http_response,
138
- error=None)
131
+ retry_settings, request=request.http_request, response=response.http_response, error=None
132
+ )
139
133
  await self.sleep(retry_settings, request.context.transport)
140
134
  continue
141
135
  break
142
136
  except AzureError as err:
143
137
  if isinstance(err, AzureSigningError):
144
138
  raise
145
- retries_remaining = self.increment(
146
- retry_settings, request=request.http_request, error=err)
139
+ retries_remaining = self.increment(retry_settings, request=request.http_request, error=err)
147
140
  if retries_remaining:
148
- await retry_hook(
149
- retry_settings,
150
- request=request.http_request,
151
- response=None,
152
- error=err)
141
+ await retry_hook(retry_settings, request=request.http_request, response=None, error=err)
153
142
  await self.sleep(retry_settings, request.context.transport)
154
143
  continue
155
144
  raise err
156
- if retry_settings['history']:
157
- response.context['history'] = retry_settings['history']
158
- response.http_response.location_mode = retry_settings['mode']
145
+ if retry_settings["history"]:
146
+ response.context["history"] = retry_settings["history"]
147
+ response.http_response.location_mode = retry_settings["mode"]
159
148
  return response
160
149
 
161
150
 
@@ -176,7 +165,8 @@ class ExponentialRetry(AsyncStorageRetryPolicy):
176
165
  increment_base: int = 3,
177
166
  retry_total: int = 3,
178
167
  retry_to_secondary: bool = False,
179
- random_jitter_range: int = 3, **kwargs
168
+ random_jitter_range: int = 3,
169
+ **kwargs
180
170
  ) -> None:
181
171
  """
182
172
  Constructs an Exponential retry object. The initial_backoff is used for
@@ -203,21 +193,20 @@ class ExponentialRetry(AsyncStorageRetryPolicy):
203
193
  self.initial_backoff = initial_backoff
204
194
  self.increment_base = increment_base
205
195
  self.random_jitter_range = random_jitter_range
206
- super(ExponentialRetry, self).__init__(
207
- retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
196
+ super(ExponentialRetry, self).__init__(retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
208
197
 
209
198
  def get_backoff_time(self, settings: Dict[str, Any]) -> float:
210
199
  """
211
200
  Calculates how long to sleep before retrying.
212
201
 
213
- :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
202
+ :param Dict[str, Any] settings: The configurable values pertaining to the backoff time.
214
203
  :return:
215
204
  An integer indicating how long to wait before retrying the request,
216
205
  or None to indicate no retry should be performed.
217
206
  :rtype: int or None
218
207
  """
219
208
  random_generator = random.Random()
220
- backoff = self.initial_backoff + (0 if settings['count'] == 0 else pow(self.increment_base, settings['count']))
209
+ backoff = self.initial_backoff + (0 if settings["count"] == 0 else pow(self.increment_base, settings["count"]))
221
210
  random_range_start = backoff - self.random_jitter_range if backoff > self.random_jitter_range else 0
222
211
  random_range_end = backoff + self.random_jitter_range
223
212
  return random_generator.uniform(random_range_start, random_range_end)
@@ -232,7 +221,8 @@ class LinearRetry(AsyncStorageRetryPolicy):
232
221
  """A number in seconds which indicates a range to jitter/randomize for the back-off interval."""
233
222
 
234
223
  def __init__(
235
- self, backoff: int = 15,
224
+ self,
225
+ backoff: int = 15,
236
226
  retry_total: int = 3,
237
227
  retry_to_secondary: bool = False,
238
228
  random_jitter_range: int = 3,
@@ -255,14 +245,13 @@ class LinearRetry(AsyncStorageRetryPolicy):
255
245
  """
256
246
  self.backoff = backoff
257
247
  self.random_jitter_range = random_jitter_range
258
- super(LinearRetry, self).__init__(
259
- retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
248
+ super(LinearRetry, self).__init__(retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
260
249
 
261
250
  def get_backoff_time(self, settings: Dict[str, Any]) -> float:
262
251
  """
263
252
  Calculates how long to sleep before retrying.
264
253
 
265
- :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time.
254
+ :param Dict[str, Any] settings: The configurable values pertaining to the backoff time.
266
255
  :return:
267
256
  An integer indicating how long to wait before retrying the request,
268
257
  or None to indicate no retry should be performed.
@@ -271,14 +260,13 @@ class LinearRetry(AsyncStorageRetryPolicy):
271
260
  random_generator = random.Random()
272
261
  # the backoff interval normally does not change, however there is the possibility
273
262
  # that it was modified by accessing the property directly after initializing the object
274
- random_range_start = self.backoff - self.random_jitter_range \
275
- if self.backoff > self.random_jitter_range else 0
263
+ random_range_start = self.backoff - self.random_jitter_range if self.backoff > self.random_jitter_range else 0
276
264
  random_range_end = self.backoff + self.random_jitter_range
277
265
  return random_generator.uniform(random_range_start, random_range_end)
278
266
 
279
267
 
280
268
  class AsyncStorageBearerTokenCredentialPolicy(AsyncBearerTokenCredentialPolicy):
281
- """ Custom Bearer token credential policy for following Storage Bearer challenges """
269
+ """Custom Bearer token credential policy for following Storage Bearer challenges"""
282
270
 
283
271
  def __init__(self, credential: "AsyncTokenCredential", audience: str, **kwargs: Any) -> None:
284
272
  super(AsyncStorageBearerTokenCredentialPolicy, self).__init__(credential, audience, **kwargs)
@@ -6,7 +6,7 @@
6
6
 
7
7
  import logging
8
8
  import stat
9
- from io import (SEEK_END, SEEK_SET, UnsupportedOperation)
9
+ from io import SEEK_END, SEEK_SET, UnsupportedOperation
10
10
  from os import fstat
11
11
  from typing import Dict, Optional
12
12
 
@@ -37,12 +37,13 @@ def serialize_iso(attr):
37
37
  raise OverflowError("Hit max or min date")
38
38
 
39
39
  date = f"{utc.tm_year:04}-{utc.tm_mon:02}-{utc.tm_mday:02}T{utc.tm_hour:02}:{utc.tm_min:02}:{utc.tm_sec:02}"
40
- return date + 'Z'
40
+ return date + "Z"
41
41
  except (ValueError, OverflowError) as err:
42
42
  raise ValueError("Unable to serialize datetime object.") from err
43
43
  except AttributeError as err:
44
44
  raise TypeError("ISO-8601 object must be valid datetime object.") from err
45
45
 
46
+
46
47
  def get_length(data):
47
48
  length = None
48
49
  # Check if object implements the __len__ method, covers most input cases such as bytearray.
@@ -62,7 +63,7 @@ def get_length(data):
62
63
  try:
63
64
  mode = fstat(fileno).st_mode
64
65
  if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
65
- #st_size only meaningful if regular file or symlink, other types
66
+ # st_size only meaningful if regular file or symlink, other types
66
67
  # e.g. sockets may return misleading sizes like 0
67
68
  return fstat(fileno).st_size
68
69
  except OSError:
@@ -84,13 +85,13 @@ def get_length(data):
84
85
 
85
86
  def read_length(data):
86
87
  try:
87
- if hasattr(data, 'read'):
88
- read_data = b''
88
+ if hasattr(data, "read"):
89
+ read_data = b""
89
90
  for chunk in iter(lambda: data.read(4096), b""):
90
91
  read_data += chunk
91
92
  return len(read_data), read_data
92
- if hasattr(data, '__iter__'):
93
- read_data = b''
93
+ if hasattr(data, "__iter__"):
94
+ read_data = b""
94
95
  for chunk in data:
95
96
  read_data += chunk
96
97
  return len(read_data), read_data
@@ -100,8 +101,13 @@ def read_length(data):
100
101
 
101
102
 
102
103
  def validate_and_format_range_headers(
103
- start_range, end_range, start_range_required=True,
104
- end_range_required=True, check_content_md5=False, align_to_page=False):
104
+ start_range,
105
+ end_range,
106
+ start_range_required=True,
107
+ end_range_required=True,
108
+ check_content_md5=False,
109
+ align_to_page=False,
110
+ ):
105
111
  # If end range is provided, start range must be provided
106
112
  if (start_range_required or end_range is not None) and start_range is None:
107
113
  raise ValueError("start_range value cannot be None.")
@@ -111,16 +117,18 @@ def validate_and_format_range_headers(
111
117
  # Page ranges must be 512 aligned
112
118
  if align_to_page:
113
119
  if start_range is not None and start_range % 512 != 0:
114
- raise ValueError(f"Invalid page blob start_range: {start_range}. "
115
- "The size must be aligned to a 512-byte boundary.")
120
+ raise ValueError(
121
+ f"Invalid page blob start_range: {start_range}. " "The size must be aligned to a 512-byte boundary."
122
+ )
116
123
  if end_range is not None and end_range % 512 != 511:
117
- raise ValueError(f"Invalid page blob end_range: {end_range}. "
118
- "The size must be aligned to a 512-byte boundary.")
124
+ raise ValueError(
125
+ f"Invalid page blob end_range: {end_range}. " "The size must be aligned to a 512-byte boundary."
126
+ )
119
127
 
120
128
  # Format based on whether end_range is present
121
129
  range_header = None
122
130
  if end_range is not None:
123
- range_header = f'bytes={start_range}-{end_range}'
131
+ range_header = f"bytes={start_range}-{end_range}"
124
132
  elif start_range is not None:
125
133
  range_header = f"bytes={start_range}-"
126
134
 
@@ -131,7 +139,7 @@ def validate_and_format_range_headers(
131
139
  raise ValueError("Both start and end range required for MD5 content validation.")
132
140
  if end_range - start_range > 4 * 1024 * 1024:
133
141
  raise ValueError("Getting content MD5 for a range greater than 4MB is not supported.")
134
- range_validation = 'true'
142
+ range_validation = "true"
135
143
 
136
144
  return range_header, range_validation
137
145
 
@@ -140,7 +148,7 @@ def add_metadata_headers(metadata: Optional[Dict[str, str]] = None) -> Dict[str,
140
148
  headers = {}
141
149
  if metadata:
142
150
  for key, value in metadata.items():
143
- headers[f'x-ms-meta-{key.strip()}'] = value.strip() if value else value
151
+ headers[f"x-ms-meta-{key.strip()}"] = value.strip() if value else value
144
152
  return headers
145
153
 
146
154
 
@@ -158,29 +166,26 @@ def serialize_batch_body(requests, batch_id):
158
166
  a list of sub-request for the batch request
159
167
  :param str batch_id:
160
168
  to be embedded in batch sub-request delimiter
161
- :returns: The body bytes for this batch.
169
+ :return: The body bytes for this batch.
162
170
  :rtype: bytes
163
171
  """
164
172
 
165
173
  if requests is None or len(requests) == 0:
166
- raise ValueError('Please provide sub-request(s) for this batch request')
174
+ raise ValueError("Please provide sub-request(s) for this batch request")
167
175
 
168
- delimiter_bytes = (_get_batch_request_delimiter(batch_id, True, False) + _HTTP_LINE_ENDING).encode('utf-8')
169
- newline_bytes = _HTTP_LINE_ENDING.encode('utf-8')
176
+ delimiter_bytes = (_get_batch_request_delimiter(batch_id, True, False) + _HTTP_LINE_ENDING).encode("utf-8")
177
+ newline_bytes = _HTTP_LINE_ENDING.encode("utf-8")
170
178
  batch_body = []
171
179
 
172
180
  content_index = 0
173
181
  for request in requests:
174
- request.headers.update({
175
- "Content-ID": str(content_index),
176
- "Content-Length": str(0)
177
- })
182
+ request.headers.update({"Content-ID": str(content_index), "Content-Length": str(0)})
178
183
  batch_body.append(delimiter_bytes)
179
184
  batch_body.append(_make_body_from_sub_request(request))
180
185
  batch_body.append(newline_bytes)
181
186
  content_index += 1
182
187
 
183
- batch_body.append(_get_batch_request_delimiter(batch_id, True, True).encode('utf-8'))
188
+ batch_body.append(_get_batch_request_delimiter(batch_id, True, True).encode("utf-8"))
184
189
  # final line of body MUST have \r\n at the end, or it will not be properly read by the service
185
190
  batch_body.append(newline_bytes)
186
191
 
@@ -197,35 +202,35 @@ def _get_batch_request_delimiter(batch_id, is_prepend_dashes=False, is_append_da
197
202
  Whether to include the starting dashes. Used in the body, but non on defining the delimiter.
198
203
  :param bool is_append_dashes:
199
204
  Whether to include the ending dashes. Used in the body on the closing delimiter only.
200
- :returns: The delimiter, WITHOUT a trailing newline.
205
+ :return: The delimiter, WITHOUT a trailing newline.
201
206
  :rtype: str
202
207
  """
203
208
 
204
- prepend_dashes = '--' if is_prepend_dashes else ''
205
- append_dashes = '--' if is_append_dashes else ''
209
+ prepend_dashes = "--" if is_prepend_dashes else ""
210
+ append_dashes = "--" if is_append_dashes else ""
206
211
 
207
212
  return prepend_dashes + _REQUEST_DELIMITER_PREFIX + batch_id + append_dashes
208
213
 
209
214
 
210
215
  def _make_body_from_sub_request(sub_request):
211
216
  """
212
- Content-Type: application/http
213
- Content-ID: <sequential int ID>
214
- Content-Transfer-Encoding: <value> (if present)
217
+ Content-Type: application/http
218
+ Content-ID: <sequential int ID>
219
+ Content-Transfer-Encoding: <value> (if present)
215
220
 
216
- <verb> <path><query> HTTP/<version>
217
- <header key>: <header value> (repeated as necessary)
218
- Content-Length: <value>
219
- (newline if content length > 0)
220
- <body> (if content length > 0)
221
+ <verb> <path><query> HTTP/<version>
222
+ <header key>: <header value> (repeated as necessary)
223
+ Content-Length: <value>
224
+ (newline if content length > 0)
225
+ <body> (if content length > 0)
221
226
 
222
- Serializes an http request.
227
+ Serializes an http request.
223
228
 
224
- :param ~azure.core.pipeline.transport.HttpRequest sub_request:
225
- Request to serialize.
226
- :returns: The serialized sub-request in bytes
227
- :rtype: bytes
228
- """
229
+ :param ~azure.core.pipeline.transport.HttpRequest sub_request:
230
+ Request to serialize.
231
+ :return: The serialized sub-request in bytes
232
+ :rtype: bytes
233
+ """
229
234
 
230
235
  # put the sub-request's headers into a list for efficient str concatenation
231
236
  sub_request_body = []
@@ -249,9 +254,9 @@ def _make_body_from_sub_request(sub_request):
249
254
 
250
255
  # append HTTP verb and path and query and HTTP version
251
256
  sub_request_body.append(sub_request.method)
252
- sub_request_body.append(' ')
257
+ sub_request_body.append(" ")
253
258
  sub_request_body.append(sub_request.url)
254
- sub_request_body.append(' ')
259
+ sub_request_body.append(" ")
255
260
  sub_request_body.append(_HTTP1_1_IDENTIFIER)
256
261
  sub_request_body.append(_HTTP_LINE_ENDING)
257
262
 
@@ -266,4 +271,4 @@ def _make_body_from_sub_request(sub_request):
266
271
  # append blank line
267
272
  sub_request_body.append(_HTTP_LINE_ENDING)
268
273
 
269
- return ''.join(sub_request_body).encode()
274
+ return "".join(sub_request_body).encode()
@@ -22,6 +22,7 @@ from .models import get_enum_value, StorageErrorCode, UserDelegationKey
22
22
  from .parser import _to_utc_datetime
23
23
 
24
24
 
25
+ SV_DOCS_URL = "https://learn.microsoft.com/rest/api/storageservices/versioning-for-the-azure-storage-services"
25
26
  _LOGGER = logging.getLogger(__name__)
26
27
 
27
28
 
@@ -46,23 +47,23 @@ def parse_length_from_content_range(content_range):
46
47
  # First, split in space and take the second half: '1-3/65537'
47
48
  # Next, split on slash and take the second half: '65537'
48
49
  # Finally, convert to an int: 65537
49
- return int(content_range.split(' ', 1)[1].split('/', 1)[1])
50
+ return int(content_range.split(" ", 1)[1].split("/", 1)[1])
50
51
 
51
52
 
52
53
  def normalize_headers(headers):
53
54
  normalized = {}
54
55
  for key, value in headers.items():
55
- if key.startswith('x-ms-'):
56
+ if key.startswith("x-ms-"):
56
57
  key = key[5:]
57
- normalized[key.lower().replace('-', '_')] = get_enum_value(value)
58
+ normalized[key.lower().replace("-", "_")] = get_enum_value(value)
58
59
  return normalized
59
60
 
60
61
 
61
62
  def deserialize_metadata(response, obj, headers): # pylint: disable=unused-argument
62
63
  try:
63
- raw_metadata = {k: v for k, v in response.http_response.headers.items() if k.lower().startswith('x-ms-meta-')}
64
+ raw_metadata = {k: v for k, v in response.http_response.headers.items() if k.lower().startswith("x-ms-meta-")}
64
65
  except AttributeError:
65
- raw_metadata = {k: v for k, v in response.headers.items() if k.lower().startswith('x-ms-meta-')}
66
+ raw_metadata = {k: v for k, v in response.headers.items() if k.lower().startswith("x-ms-meta-")}
66
67
  return {k[10:]: v for k, v in raw_metadata.items()}
67
68
 
68
69
 
@@ -82,19 +83,23 @@ def return_raw_deserialized(response, *_):
82
83
  return response.http_response.location_mode, response.context[ContentDecodePolicy.CONTEXT_NAME]
83
84
 
84
85
 
85
- def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # pylint:disable=too-many-statements, too-many-branches
86
+ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # pylint:disable=too-many-statements, too-many-branches
86
87
  raise_error = HttpResponseError
87
88
  serialized = False
88
89
  if isinstance(storage_error, AzureSigningError):
89
- storage_error.message = storage_error.message + \
90
- '. This is likely due to an invalid shared key. Please check your shared key and try again.'
90
+ storage_error.message = (
91
+ storage_error.message
92
+ + ". This is likely due to an invalid shared key. Please check your shared key and try again."
93
+ )
91
94
  if not storage_error.response or storage_error.response.status_code in [200, 204]:
92
95
  raise storage_error
93
96
  # If it is one of those three then it has been serialized prior by the generated layer.
94
- if isinstance(storage_error, (PartialBatchErrorException,
95
- ClientAuthenticationError, ResourceNotFoundError, ResourceExistsError)):
97
+ if isinstance(
98
+ storage_error,
99
+ (PartialBatchErrorException, ClientAuthenticationError, ResourceNotFoundError, ResourceExistsError),
100
+ ):
96
101
  serialized = True
97
- error_code = storage_error.response.headers.get('x-ms-error-code')
102
+ error_code = storage_error.response.headers.get("x-ms-error-code")
98
103
  error_message = storage_error.message
99
104
  additional_data = {}
100
105
  error_dict = {}
@@ -104,27 +109,25 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # py
104
109
  if error_body is None or len(error_body) == 0:
105
110
  error_body = storage_error.response.reason
106
111
  except AttributeError:
107
- error_body = ''
112
+ error_body = ""
108
113
  # If it is an XML response
109
114
  if isinstance(error_body, Element):
110
- error_dict = {
111
- child.tag.lower(): child.text
112
- for child in error_body
113
- }
115
+ error_dict = {child.tag.lower(): child.text for child in error_body}
114
116
  # If it is a JSON response
115
117
  elif isinstance(error_body, dict):
116
- error_dict = error_body.get('error', {})
118
+ error_dict = error_body.get("error", {})
117
119
  elif not error_code:
118
120
  _LOGGER.warning(
119
- 'Unexpected return type %s from ContentDecodePolicy.deserialize_from_http_generics.', type(error_body))
120
- error_dict = {'message': str(error_body)}
121
+ "Unexpected return type %s from ContentDecodePolicy.deserialize_from_http_generics.", type(error_body)
122
+ )
123
+ error_dict = {"message": str(error_body)}
121
124
 
122
125
  # If we extracted from a Json or XML response
123
126
  # There is a chance error_dict is just a string
124
127
  if error_dict and isinstance(error_dict, dict):
125
- error_code = error_dict.get('code')
126
- error_message = error_dict.get('message')
127
- additional_data = {k: v for k, v in error_dict.items() if k not in {'code', 'message'}}
128
+ error_code = error_dict.get("code")
129
+ error_message = error_dict.get("message")
130
+ additional_data = {k: v for k, v in error_dict.items() if k not in {"code", "message"}}
128
131
  except DecodeError:
129
132
  pass
130
133
 
@@ -132,31 +135,33 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # py
132
135
  # This check would be unnecessary if we have already serialized the error
133
136
  if error_code and not serialized:
134
137
  error_code = StorageErrorCode(error_code)
135
- if error_code in [StorageErrorCode.condition_not_met,
136
- StorageErrorCode.blob_overwritten]:
138
+ if error_code in [StorageErrorCode.condition_not_met, StorageErrorCode.blob_overwritten]:
137
139
  raise_error = ResourceModifiedError
138
- if error_code in [StorageErrorCode.invalid_authentication_info,
139
- StorageErrorCode.authentication_failed]:
140
+ if error_code in [StorageErrorCode.invalid_authentication_info, StorageErrorCode.authentication_failed]:
140
141
  raise_error = ClientAuthenticationError
141
- if error_code in [StorageErrorCode.resource_not_found,
142
- StorageErrorCode.cannot_verify_copy_source,
143
- StorageErrorCode.blob_not_found,
144
- StorageErrorCode.queue_not_found,
145
- StorageErrorCode.container_not_found,
146
- StorageErrorCode.parent_not_found,
147
- StorageErrorCode.share_not_found]:
142
+ if error_code in [
143
+ StorageErrorCode.resource_not_found,
144
+ StorageErrorCode.cannot_verify_copy_source,
145
+ StorageErrorCode.blob_not_found,
146
+ StorageErrorCode.queue_not_found,
147
+ StorageErrorCode.container_not_found,
148
+ StorageErrorCode.parent_not_found,
149
+ StorageErrorCode.share_not_found,
150
+ ]:
148
151
  raise_error = ResourceNotFoundError
149
- if error_code in [StorageErrorCode.account_already_exists,
150
- StorageErrorCode.account_being_created,
151
- StorageErrorCode.resource_already_exists,
152
- StorageErrorCode.resource_type_mismatch,
153
- StorageErrorCode.blob_already_exists,
154
- StorageErrorCode.queue_already_exists,
155
- StorageErrorCode.container_already_exists,
156
- StorageErrorCode.container_being_deleted,
157
- StorageErrorCode.queue_being_deleted,
158
- StorageErrorCode.share_already_exists,
159
- StorageErrorCode.share_being_deleted]:
152
+ if error_code in [
153
+ StorageErrorCode.account_already_exists,
154
+ StorageErrorCode.account_being_created,
155
+ StorageErrorCode.resource_already_exists,
156
+ StorageErrorCode.resource_type_mismatch,
157
+ StorageErrorCode.blob_already_exists,
158
+ StorageErrorCode.queue_already_exists,
159
+ StorageErrorCode.container_already_exists,
160
+ StorageErrorCode.container_being_deleted,
161
+ StorageErrorCode.queue_being_deleted,
162
+ StorageErrorCode.share_already_exists,
163
+ StorageErrorCode.share_being_deleted,
164
+ ]:
160
165
  raise_error = ResourceExistsError
161
166
  except ValueError:
162
167
  # Got an unknown error code
@@ -170,6 +175,10 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # py
170
175
  for name, info in additional_data.items():
171
176
  error_message += f"\n{name}:{info}"
172
177
 
178
+ if additional_data.get("headername") == "x-ms-version" and error_code == StorageErrorCode.INVALID_HEADER_VALUE:
179
+ error_message = ("The provided service version is not enabled on this storage account." +
180
+ f"Please see {SV_DOCS_URL} for additional information.\n" + error_message)
181
+
173
182
  # No need to create an instance if it has already been serialized by the generated layer
174
183
  if serialized:
175
184
  storage_error.message = error_message
@@ -183,7 +192,7 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # py
183
192
  error.args = (error.message,)
184
193
  try:
185
194
  # `from None` prevents us from double printing the exception (suppresses generated layer error context)
186
- exec("raise error from None") # pylint: disable=exec-used # nosec
195
+ exec("raise error from None") # pylint: disable=exec-used # nosec
187
196
  except SyntaxError as exc:
188
197
  raise error from exc
189
198