azure-storage-blob 12.25.1__py3-none-any.whl → 12.26.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- azure/storage/blob/__init__.py +3 -2
- azure/storage/blob/_blob_client.py +94 -41
- azure/storage/blob/_blob_client_helpers.py +19 -4
- azure/storage/blob/_blob_service_client.py +16 -13
- azure/storage/blob/_container_client.py +25 -22
- azure/storage/blob/_deserialize.py +1 -1
- azure/storage/blob/_download.py +7 -7
- azure/storage/blob/_encryption.py +177 -184
- azure/storage/blob/_generated/_azure_blob_storage.py +1 -1
- azure/storage/blob/_generated/_configuration.py +2 -2
- azure/storage/blob/_generated/_serialization.py +3 -3
- azure/storage/blob/_generated/aio/_azure_blob_storage.py +1 -1
- azure/storage/blob/_generated/aio/_configuration.py +2 -2
- azure/storage/blob/_generated/aio/operations/_append_blob_operations.py +5 -4
- azure/storage/blob/_generated/aio/operations/_blob_operations.py +5 -25
- azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +9 -7
- azure/storage/blob/_generated/aio/operations/_container_operations.py +1 -19
- azure/storage/blob/_generated/aio/operations/_page_blob_operations.py +5 -10
- azure/storage/blob/_generated/aio/operations/_service_operations.py +1 -8
- azure/storage/blob/_generated/models/__init__.py +2 -0
- azure/storage/blob/_generated/models/_azure_blob_storage_enums.py +6 -0
- azure/storage/blob/_generated/operations/_append_blob_operations.py +12 -9
- azure/storage/blob/_generated/operations/_blob_operations.py +32 -49
- azure/storage/blob/_generated/operations/_block_blob_operations.py +21 -13
- azure/storage/blob/_generated/operations/_container_operations.py +19 -37
- azure/storage/blob/_generated/operations/_page_blob_operations.py +17 -19
- azure/storage/blob/_generated/operations/_service_operations.py +9 -17
- azure/storage/blob/_lease.py +1 -0
- azure/storage/blob/_quick_query_helper.py +20 -24
- azure/storage/blob/_serialize.py +1 -0
- azure/storage/blob/_shared/__init__.py +7 -7
- azure/storage/blob/_shared/authentication.py +49 -32
- azure/storage/blob/_shared/avro/avro_io.py +44 -42
- azure/storage/blob/_shared/avro/avro_io_async.py +42 -41
- azure/storage/blob/_shared/avro/datafile.py +24 -21
- azure/storage/blob/_shared/avro/datafile_async.py +15 -15
- azure/storage/blob/_shared/avro/schema.py +196 -217
- azure/storage/blob/_shared/base_client.py +82 -59
- azure/storage/blob/_shared/base_client_async.py +58 -51
- azure/storage/blob/_shared/constants.py +1 -1
- azure/storage/blob/_shared/models.py +93 -92
- azure/storage/blob/_shared/parser.py +3 -3
- azure/storage/blob/_shared/policies.py +176 -145
- azure/storage/blob/_shared/policies_async.py +58 -69
- azure/storage/blob/_shared/request_handlers.py +50 -45
- azure/storage/blob/_shared/response_handlers.py +49 -45
- azure/storage/blob/_shared/shared_access_signature.py +67 -71
- azure/storage/blob/_shared/uploads.py +56 -49
- azure/storage/blob/_shared/uploads_async.py +70 -58
- azure/storage/blob/_shared_access_signature.py +3 -1
- azure/storage/blob/_version.py +1 -1
- azure/storage/blob/aio/__init__.py +3 -2
- azure/storage/blob/aio/_blob_client_async.py +241 -44
- azure/storage/blob/aio/_blob_service_client_async.py +13 -11
- azure/storage/blob/aio/_container_client_async.py +28 -25
- azure/storage/blob/aio/_download_async.py +15 -11
- azure/storage/blob/aio/_lease_async.py +1 -0
- azure/storage/blob/aio/_quick_query_helper_async.py +194 -0
- {azure_storage_blob-12.25.1.dist-info → azure_storage_blob-12.26.0.dist-info}/METADATA +1 -1
- azure_storage_blob-12.26.0.dist-info/RECORD +85 -0
- azure_storage_blob-12.25.1.dist-info/RECORD +0 -84
- {azure_storage_blob-12.25.1.dist-info → azure_storage_blob-12.26.0.dist-info}/LICENSE +0 -0
- {azure_storage_blob-12.25.1.dist-info → azure_storage_blob-12.26.0.dist-info}/WHEEL +0 -0
- {azure_storage_blob-12.25.1.dist-info → azure_storage_blob-12.26.0.dist-info}/top_level.txt +0 -0
@@ -21,7 +21,7 @@ if TYPE_CHECKING:
|
|
21
21
|
from azure.core.credentials_async import AsyncTokenCredential
|
22
22
|
from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import
|
23
23
|
PipelineRequest,
|
24
|
-
PipelineResponse
|
24
|
+
PipelineResponse,
|
25
25
|
)
|
26
26
|
|
27
27
|
|
@@ -29,29 +29,25 @@ _LOGGER = logging.getLogger(__name__)
|
|
29
29
|
|
30
30
|
|
31
31
|
async def retry_hook(settings, **kwargs):
|
32
|
-
if settings[
|
33
|
-
if asyncio.iscoroutine(settings[
|
34
|
-
await settings[
|
35
|
-
retry_count=settings['count'] - 1,
|
36
|
-
location_mode=settings['mode'],
|
37
|
-
**kwargs)
|
32
|
+
if settings["hook"]:
|
33
|
+
if asyncio.iscoroutine(settings["hook"]):
|
34
|
+
await settings["hook"](retry_count=settings["count"] - 1, location_mode=settings["mode"], **kwargs)
|
38
35
|
else:
|
39
|
-
settings[
|
40
|
-
retry_count=settings['count'] - 1,
|
41
|
-
location_mode=settings['mode'],
|
42
|
-
**kwargs)
|
36
|
+
settings["hook"](retry_count=settings["count"] - 1, location_mode=settings["mode"], **kwargs)
|
43
37
|
|
44
38
|
|
45
39
|
async def is_checksum_retry(response):
|
46
40
|
# retry if invalid content md5
|
47
|
-
if response.context.get(
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
41
|
+
if response.context.get("validate_content", False) and response.http_response.headers.get("content-md5"):
|
42
|
+
if hasattr(response.http_response, "load_body"):
|
43
|
+
try:
|
44
|
+
await response.http_response.load_body() # Load the body in memory and close the socket
|
45
|
+
except (StreamClosedError, StreamConsumedError):
|
46
|
+
pass
|
47
|
+
computed_md5 = response.http_request.headers.get("content-md5", None) or encode_base64(
|
48
|
+
StorageContentValidation.get_content_md5(response.http_response.body())
|
49
|
+
)
|
50
|
+
if response.http_response.headers["content-md5"] != computed_md5:
|
55
51
|
return True
|
56
52
|
return False
|
57
53
|
|
@@ -59,54 +55,56 @@ async def is_checksum_retry(response):
|
|
59
55
|
class AsyncStorageResponseHook(AsyncHTTPPolicy):
|
60
56
|
|
61
57
|
def __init__(self, **kwargs):
|
62
|
-
self._response_callback = kwargs.get(
|
58
|
+
self._response_callback = kwargs.get("raw_response_hook")
|
63
59
|
super(AsyncStorageResponseHook, self).__init__()
|
64
60
|
|
65
61
|
async def send(self, request: "PipelineRequest") -> "PipelineResponse":
|
66
62
|
# Values could be 0
|
67
|
-
data_stream_total = request.context.get(
|
63
|
+
data_stream_total = request.context.get("data_stream_total")
|
68
64
|
if data_stream_total is None:
|
69
|
-
data_stream_total = request.context.options.pop(
|
70
|
-
download_stream_current = request.context.get(
|
65
|
+
data_stream_total = request.context.options.pop("data_stream_total", None)
|
66
|
+
download_stream_current = request.context.get("download_stream_current")
|
71
67
|
if download_stream_current is None:
|
72
|
-
download_stream_current = request.context.options.pop(
|
73
|
-
upload_stream_current = request.context.get(
|
68
|
+
download_stream_current = request.context.options.pop("download_stream_current", None)
|
69
|
+
upload_stream_current = request.context.get("upload_stream_current")
|
74
70
|
if upload_stream_current is None:
|
75
|
-
upload_stream_current = request.context.options.pop(
|
71
|
+
upload_stream_current = request.context.options.pop("upload_stream_current", None)
|
76
72
|
|
77
|
-
response_callback = request.context.get(
|
78
|
-
|
73
|
+
response_callback = request.context.get("response_callback") or request.context.options.pop(
|
74
|
+
"raw_response_hook", self._response_callback
|
75
|
+
)
|
79
76
|
|
80
77
|
response = await self.next.send(request)
|
81
|
-
will_retry = is_retry(response, request.context.options.get(
|
78
|
+
will_retry = is_retry(response, request.context.options.get("mode")) or await is_checksum_retry(response)
|
82
79
|
|
83
80
|
# Auth error could come from Bearer challenge, in which case this request will be made again
|
84
81
|
is_auth_error = response.http_response.status_code == 401
|
85
82
|
should_update_counts = not (will_retry or is_auth_error)
|
86
83
|
|
87
84
|
if should_update_counts and download_stream_current is not None:
|
88
|
-
download_stream_current += int(response.http_response.headers.get(
|
85
|
+
download_stream_current += int(response.http_response.headers.get("Content-Length", 0))
|
89
86
|
if data_stream_total is None:
|
90
|
-
content_range = response.http_response.headers.get(
|
87
|
+
content_range = response.http_response.headers.get("Content-Range")
|
91
88
|
if content_range:
|
92
|
-
data_stream_total = int(content_range.split(
|
89
|
+
data_stream_total = int(content_range.split(" ", 1)[1].split("/", 1)[1])
|
93
90
|
else:
|
94
91
|
data_stream_total = download_stream_current
|
95
92
|
elif should_update_counts and upload_stream_current is not None:
|
96
|
-
upload_stream_current += int(response.http_request.headers.get(
|
93
|
+
upload_stream_current += int(response.http_request.headers.get("Content-Length", 0))
|
97
94
|
for pipeline_obj in [request, response]:
|
98
|
-
if hasattr(pipeline_obj,
|
99
|
-
pipeline_obj.context[
|
100
|
-
pipeline_obj.context[
|
101
|
-
pipeline_obj.context[
|
95
|
+
if hasattr(pipeline_obj, "context"):
|
96
|
+
pipeline_obj.context["data_stream_total"] = data_stream_total
|
97
|
+
pipeline_obj.context["download_stream_current"] = download_stream_current
|
98
|
+
pipeline_obj.context["upload_stream_current"] = upload_stream_current
|
102
99
|
if response_callback:
|
103
100
|
if asyncio.iscoroutine(response_callback):
|
104
|
-
await response_callback(response)
|
101
|
+
await response_callback(response) # type: ignore
|
105
102
|
else:
|
106
103
|
response_callback(response)
|
107
|
-
request.context[
|
104
|
+
request.context["response_callback"] = response_callback
|
108
105
|
return response
|
109
106
|
|
107
|
+
|
110
108
|
class AsyncStorageRetryPolicy(StorageRetryPolicy):
|
111
109
|
"""
|
112
110
|
The base class for Exponential and Linear retries containing shared code.
|
@@ -125,37 +123,29 @@ class AsyncStorageRetryPolicy(StorageRetryPolicy):
|
|
125
123
|
while retries_remaining:
|
126
124
|
try:
|
127
125
|
response = await self.next.send(request)
|
128
|
-
if is_retry(response, retry_settings[
|
126
|
+
if is_retry(response, retry_settings["mode"]) or await is_checksum_retry(response):
|
129
127
|
retries_remaining = self.increment(
|
130
|
-
retry_settings,
|
131
|
-
|
132
|
-
response=response.http_response)
|
128
|
+
retry_settings, request=request.http_request, response=response.http_response
|
129
|
+
)
|
133
130
|
if retries_remaining:
|
134
131
|
await retry_hook(
|
135
|
-
retry_settings,
|
136
|
-
|
137
|
-
response=response.http_response,
|
138
|
-
error=None)
|
132
|
+
retry_settings, request=request.http_request, response=response.http_response, error=None
|
133
|
+
)
|
139
134
|
await self.sleep(retry_settings, request.context.transport)
|
140
135
|
continue
|
141
136
|
break
|
142
137
|
except AzureError as err:
|
143
138
|
if isinstance(err, AzureSigningError):
|
144
139
|
raise
|
145
|
-
retries_remaining = self.increment(
|
146
|
-
retry_settings, request=request.http_request, error=err)
|
140
|
+
retries_remaining = self.increment(retry_settings, request=request.http_request, error=err)
|
147
141
|
if retries_remaining:
|
148
|
-
await retry_hook(
|
149
|
-
retry_settings,
|
150
|
-
request=request.http_request,
|
151
|
-
response=None,
|
152
|
-
error=err)
|
142
|
+
await retry_hook(retry_settings, request=request.http_request, response=None, error=err)
|
153
143
|
await self.sleep(retry_settings, request.context.transport)
|
154
144
|
continue
|
155
145
|
raise err
|
156
|
-
if retry_settings[
|
157
|
-
response.context[
|
158
|
-
response.http_response.location_mode = retry_settings[
|
146
|
+
if retry_settings["history"]:
|
147
|
+
response.context["history"] = retry_settings["history"]
|
148
|
+
response.http_response.location_mode = retry_settings["mode"]
|
159
149
|
return response
|
160
150
|
|
161
151
|
|
@@ -176,7 +166,8 @@ class ExponentialRetry(AsyncStorageRetryPolicy):
|
|
176
166
|
increment_base: int = 3,
|
177
167
|
retry_total: int = 3,
|
178
168
|
retry_to_secondary: bool = False,
|
179
|
-
random_jitter_range: int = 3,
|
169
|
+
random_jitter_range: int = 3,
|
170
|
+
**kwargs
|
180
171
|
) -> None:
|
181
172
|
"""
|
182
173
|
Constructs an Exponential retry object. The initial_backoff is used for
|
@@ -203,21 +194,20 @@ class ExponentialRetry(AsyncStorageRetryPolicy):
|
|
203
194
|
self.initial_backoff = initial_backoff
|
204
195
|
self.increment_base = increment_base
|
205
196
|
self.random_jitter_range = random_jitter_range
|
206
|
-
super(ExponentialRetry, self).__init__(
|
207
|
-
retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
|
197
|
+
super(ExponentialRetry, self).__init__(retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
|
208
198
|
|
209
199
|
def get_backoff_time(self, settings: Dict[str, Any]) -> float:
|
210
200
|
"""
|
211
201
|
Calculates how long to sleep before retrying.
|
212
202
|
|
213
|
-
:param Dict[str, Any]
|
203
|
+
:param Dict[str, Any] settings: The configurable values pertaining to the backoff time.
|
214
204
|
:return:
|
215
205
|
An integer indicating how long to wait before retrying the request,
|
216
206
|
or None to indicate no retry should be performed.
|
217
207
|
:rtype: int or None
|
218
208
|
"""
|
219
209
|
random_generator = random.Random()
|
220
|
-
backoff = self.initial_backoff + (0 if settings[
|
210
|
+
backoff = self.initial_backoff + (0 if settings["count"] == 0 else pow(self.increment_base, settings["count"]))
|
221
211
|
random_range_start = backoff - self.random_jitter_range if backoff > self.random_jitter_range else 0
|
222
212
|
random_range_end = backoff + self.random_jitter_range
|
223
213
|
return random_generator.uniform(random_range_start, random_range_end)
|
@@ -232,7 +222,8 @@ class LinearRetry(AsyncStorageRetryPolicy):
|
|
232
222
|
"""A number in seconds which indicates a range to jitter/randomize for the back-off interval."""
|
233
223
|
|
234
224
|
def __init__(
|
235
|
-
self,
|
225
|
+
self,
|
226
|
+
backoff: int = 15,
|
236
227
|
retry_total: int = 3,
|
237
228
|
retry_to_secondary: bool = False,
|
238
229
|
random_jitter_range: int = 3,
|
@@ -255,14 +246,13 @@ class LinearRetry(AsyncStorageRetryPolicy):
|
|
255
246
|
"""
|
256
247
|
self.backoff = backoff
|
257
248
|
self.random_jitter_range = random_jitter_range
|
258
|
-
super(LinearRetry, self).__init__(
|
259
|
-
retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
|
249
|
+
super(LinearRetry, self).__init__(retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs)
|
260
250
|
|
261
251
|
def get_backoff_time(self, settings: Dict[str, Any]) -> float:
|
262
252
|
"""
|
263
253
|
Calculates how long to sleep before retrying.
|
264
254
|
|
265
|
-
:param Dict[str, Any]
|
255
|
+
:param Dict[str, Any] settings: The configurable values pertaining to the backoff time.
|
266
256
|
:return:
|
267
257
|
An integer indicating how long to wait before retrying the request,
|
268
258
|
or None to indicate no retry should be performed.
|
@@ -271,14 +261,13 @@ class LinearRetry(AsyncStorageRetryPolicy):
|
|
271
261
|
random_generator = random.Random()
|
272
262
|
# the backoff interval normally does not change, however there is the possibility
|
273
263
|
# that it was modified by accessing the property directly after initializing the object
|
274
|
-
random_range_start = self.backoff - self.random_jitter_range
|
275
|
-
if self.backoff > self.random_jitter_range else 0
|
264
|
+
random_range_start = self.backoff - self.random_jitter_range if self.backoff > self.random_jitter_range else 0
|
276
265
|
random_range_end = self.backoff + self.random_jitter_range
|
277
266
|
return random_generator.uniform(random_range_start, random_range_end)
|
278
267
|
|
279
268
|
|
280
269
|
class AsyncStorageBearerTokenCredentialPolicy(AsyncBearerTokenCredentialPolicy):
|
281
|
-
"""
|
270
|
+
"""Custom Bearer token credential policy for following Storage Bearer challenges"""
|
282
271
|
|
283
272
|
def __init__(self, credential: "AsyncTokenCredential", audience: str, **kwargs: Any) -> None:
|
284
273
|
super(AsyncStorageBearerTokenCredentialPolicy, self).__init__(credential, audience, **kwargs)
|
@@ -6,7 +6,7 @@
|
|
6
6
|
|
7
7
|
import logging
|
8
8
|
import stat
|
9
|
-
from io import
|
9
|
+
from io import SEEK_END, SEEK_SET, UnsupportedOperation
|
10
10
|
from os import fstat
|
11
11
|
from typing import Dict, Optional
|
12
12
|
|
@@ -37,12 +37,13 @@ def serialize_iso(attr):
|
|
37
37
|
raise OverflowError("Hit max or min date")
|
38
38
|
|
39
39
|
date = f"{utc.tm_year:04}-{utc.tm_mon:02}-{utc.tm_mday:02}T{utc.tm_hour:02}:{utc.tm_min:02}:{utc.tm_sec:02}"
|
40
|
-
return date +
|
40
|
+
return date + "Z"
|
41
41
|
except (ValueError, OverflowError) as err:
|
42
42
|
raise ValueError("Unable to serialize datetime object.") from err
|
43
43
|
except AttributeError as err:
|
44
44
|
raise TypeError("ISO-8601 object must be valid datetime object.") from err
|
45
45
|
|
46
|
+
|
46
47
|
def get_length(data):
|
47
48
|
length = None
|
48
49
|
# Check if object implements the __len__ method, covers most input cases such as bytearray.
|
@@ -62,7 +63,7 @@ def get_length(data):
|
|
62
63
|
try:
|
63
64
|
mode = fstat(fileno).st_mode
|
64
65
|
if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
|
65
|
-
#st_size only meaningful if regular file or symlink, other types
|
66
|
+
# st_size only meaningful if regular file or symlink, other types
|
66
67
|
# e.g. sockets may return misleading sizes like 0
|
67
68
|
return fstat(fileno).st_size
|
68
69
|
except OSError:
|
@@ -84,13 +85,13 @@ def get_length(data):
|
|
84
85
|
|
85
86
|
def read_length(data):
|
86
87
|
try:
|
87
|
-
if hasattr(data,
|
88
|
-
read_data = b
|
88
|
+
if hasattr(data, "read"):
|
89
|
+
read_data = b""
|
89
90
|
for chunk in iter(lambda: data.read(4096), b""):
|
90
91
|
read_data += chunk
|
91
92
|
return len(read_data), read_data
|
92
|
-
if hasattr(data,
|
93
|
-
read_data = b
|
93
|
+
if hasattr(data, "__iter__"):
|
94
|
+
read_data = b""
|
94
95
|
for chunk in data:
|
95
96
|
read_data += chunk
|
96
97
|
return len(read_data), read_data
|
@@ -100,8 +101,13 @@ def read_length(data):
|
|
100
101
|
|
101
102
|
|
102
103
|
def validate_and_format_range_headers(
|
103
|
-
|
104
|
-
|
104
|
+
start_range,
|
105
|
+
end_range,
|
106
|
+
start_range_required=True,
|
107
|
+
end_range_required=True,
|
108
|
+
check_content_md5=False,
|
109
|
+
align_to_page=False,
|
110
|
+
):
|
105
111
|
# If end range is provided, start range must be provided
|
106
112
|
if (start_range_required or end_range is not None) and start_range is None:
|
107
113
|
raise ValueError("start_range value cannot be None.")
|
@@ -111,16 +117,18 @@ def validate_and_format_range_headers(
|
|
111
117
|
# Page ranges must be 512 aligned
|
112
118
|
if align_to_page:
|
113
119
|
if start_range is not None and start_range % 512 != 0:
|
114
|
-
raise ValueError(
|
115
|
-
|
120
|
+
raise ValueError(
|
121
|
+
f"Invalid page blob start_range: {start_range}. " "The size must be aligned to a 512-byte boundary."
|
122
|
+
)
|
116
123
|
if end_range is not None and end_range % 512 != 511:
|
117
|
-
raise ValueError(
|
118
|
-
|
124
|
+
raise ValueError(
|
125
|
+
f"Invalid page blob end_range: {end_range}. " "The size must be aligned to a 512-byte boundary."
|
126
|
+
)
|
119
127
|
|
120
128
|
# Format based on whether end_range is present
|
121
129
|
range_header = None
|
122
130
|
if end_range is not None:
|
123
|
-
range_header = f
|
131
|
+
range_header = f"bytes={start_range}-{end_range}"
|
124
132
|
elif start_range is not None:
|
125
133
|
range_header = f"bytes={start_range}-"
|
126
134
|
|
@@ -131,7 +139,7 @@ def validate_and_format_range_headers(
|
|
131
139
|
raise ValueError("Both start and end range required for MD5 content validation.")
|
132
140
|
if end_range - start_range > 4 * 1024 * 1024:
|
133
141
|
raise ValueError("Getting content MD5 for a range greater than 4MB is not supported.")
|
134
|
-
range_validation =
|
142
|
+
range_validation = "true"
|
135
143
|
|
136
144
|
return range_header, range_validation
|
137
145
|
|
@@ -140,7 +148,7 @@ def add_metadata_headers(metadata: Optional[Dict[str, str]] = None) -> Dict[str,
|
|
140
148
|
headers = {}
|
141
149
|
if metadata:
|
142
150
|
for key, value in metadata.items():
|
143
|
-
headers[f
|
151
|
+
headers[f"x-ms-meta-{key.strip()}"] = value.strip() if value else value
|
144
152
|
return headers
|
145
153
|
|
146
154
|
|
@@ -158,29 +166,26 @@ def serialize_batch_body(requests, batch_id):
|
|
158
166
|
a list of sub-request for the batch request
|
159
167
|
:param str batch_id:
|
160
168
|
to be embedded in batch sub-request delimiter
|
161
|
-
:
|
169
|
+
:return: The body bytes for this batch.
|
162
170
|
:rtype: bytes
|
163
171
|
"""
|
164
172
|
|
165
173
|
if requests is None or len(requests) == 0:
|
166
|
-
raise ValueError(
|
174
|
+
raise ValueError("Please provide sub-request(s) for this batch request")
|
167
175
|
|
168
|
-
delimiter_bytes = (_get_batch_request_delimiter(batch_id, True, False) + _HTTP_LINE_ENDING).encode(
|
169
|
-
newline_bytes = _HTTP_LINE_ENDING.encode(
|
176
|
+
delimiter_bytes = (_get_batch_request_delimiter(batch_id, True, False) + _HTTP_LINE_ENDING).encode("utf-8")
|
177
|
+
newline_bytes = _HTTP_LINE_ENDING.encode("utf-8")
|
170
178
|
batch_body = []
|
171
179
|
|
172
180
|
content_index = 0
|
173
181
|
for request in requests:
|
174
|
-
request.headers.update({
|
175
|
-
"Content-ID": str(content_index),
|
176
|
-
"Content-Length": str(0)
|
177
|
-
})
|
182
|
+
request.headers.update({"Content-ID": str(content_index), "Content-Length": str(0)})
|
178
183
|
batch_body.append(delimiter_bytes)
|
179
184
|
batch_body.append(_make_body_from_sub_request(request))
|
180
185
|
batch_body.append(newline_bytes)
|
181
186
|
content_index += 1
|
182
187
|
|
183
|
-
batch_body.append(_get_batch_request_delimiter(batch_id, True, True).encode(
|
188
|
+
batch_body.append(_get_batch_request_delimiter(batch_id, True, True).encode("utf-8"))
|
184
189
|
# final line of body MUST have \r\n at the end, or it will not be properly read by the service
|
185
190
|
batch_body.append(newline_bytes)
|
186
191
|
|
@@ -197,35 +202,35 @@ def _get_batch_request_delimiter(batch_id, is_prepend_dashes=False, is_append_da
|
|
197
202
|
Whether to include the starting dashes. Used in the body, but non on defining the delimiter.
|
198
203
|
:param bool is_append_dashes:
|
199
204
|
Whether to include the ending dashes. Used in the body on the closing delimiter only.
|
200
|
-
:
|
205
|
+
:return: The delimiter, WITHOUT a trailing newline.
|
201
206
|
:rtype: str
|
202
207
|
"""
|
203
208
|
|
204
|
-
prepend_dashes =
|
205
|
-
append_dashes =
|
209
|
+
prepend_dashes = "--" if is_prepend_dashes else ""
|
210
|
+
append_dashes = "--" if is_append_dashes else ""
|
206
211
|
|
207
212
|
return prepend_dashes + _REQUEST_DELIMITER_PREFIX + batch_id + append_dashes
|
208
213
|
|
209
214
|
|
210
215
|
def _make_body_from_sub_request(sub_request):
|
211
216
|
"""
|
212
|
-
|
213
|
-
|
214
|
-
|
217
|
+
Content-Type: application/http
|
218
|
+
Content-ID: <sequential int ID>
|
219
|
+
Content-Transfer-Encoding: <value> (if present)
|
215
220
|
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
+
<verb> <path><query> HTTP/<version>
|
222
|
+
<header key>: <header value> (repeated as necessary)
|
223
|
+
Content-Length: <value>
|
224
|
+
(newline if content length > 0)
|
225
|
+
<body> (if content length > 0)
|
221
226
|
|
222
|
-
|
227
|
+
Serializes an http request.
|
223
228
|
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
+
:param ~azure.core.pipeline.transport.HttpRequest sub_request:
|
230
|
+
Request to serialize.
|
231
|
+
:return: The serialized sub-request in bytes
|
232
|
+
:rtype: bytes
|
233
|
+
"""
|
229
234
|
|
230
235
|
# put the sub-request's headers into a list for efficient str concatenation
|
231
236
|
sub_request_body = []
|
@@ -249,9 +254,9 @@ def _make_body_from_sub_request(sub_request):
|
|
249
254
|
|
250
255
|
# append HTTP verb and path and query and HTTP version
|
251
256
|
sub_request_body.append(sub_request.method)
|
252
|
-
sub_request_body.append(
|
257
|
+
sub_request_body.append(" ")
|
253
258
|
sub_request_body.append(sub_request.url)
|
254
|
-
sub_request_body.append(
|
259
|
+
sub_request_body.append(" ")
|
255
260
|
sub_request_body.append(_HTTP1_1_IDENTIFIER)
|
256
261
|
sub_request_body.append(_HTTP_LINE_ENDING)
|
257
262
|
|
@@ -266,4 +271,4 @@ def _make_body_from_sub_request(sub_request):
|
|
266
271
|
# append blank line
|
267
272
|
sub_request_body.append(_HTTP_LINE_ENDING)
|
268
273
|
|
269
|
-
return
|
274
|
+
return "".join(sub_request_body).encode()
|
@@ -46,23 +46,23 @@ def parse_length_from_content_range(content_range):
|
|
46
46
|
# First, split in space and take the second half: '1-3/65537'
|
47
47
|
# Next, split on slash and take the second half: '65537'
|
48
48
|
# Finally, convert to an int: 65537
|
49
|
-
return int(content_range.split(
|
49
|
+
return int(content_range.split(" ", 1)[1].split("/", 1)[1])
|
50
50
|
|
51
51
|
|
52
52
|
def normalize_headers(headers):
|
53
53
|
normalized = {}
|
54
54
|
for key, value in headers.items():
|
55
|
-
if key.startswith(
|
55
|
+
if key.startswith("x-ms-"):
|
56
56
|
key = key[5:]
|
57
|
-
normalized[key.lower().replace(
|
57
|
+
normalized[key.lower().replace("-", "_")] = get_enum_value(value)
|
58
58
|
return normalized
|
59
59
|
|
60
60
|
|
61
61
|
def deserialize_metadata(response, obj, headers): # pylint: disable=unused-argument
|
62
62
|
try:
|
63
|
-
raw_metadata = {k: v for k, v in response.http_response.headers.items() if k.lower().startswith(
|
63
|
+
raw_metadata = {k: v for k, v in response.http_response.headers.items() if k.lower().startswith("x-ms-meta-")}
|
64
64
|
except AttributeError:
|
65
|
-
raw_metadata = {k: v for k, v in response.headers.items() if k.lower().startswith(
|
65
|
+
raw_metadata = {k: v for k, v in response.headers.items() if k.lower().startswith("x-ms-meta-")}
|
66
66
|
return {k[10:]: v for k, v in raw_metadata.items()}
|
67
67
|
|
68
68
|
|
@@ -82,19 +82,23 @@ def return_raw_deserialized(response, *_):
|
|
82
82
|
return response.http_response.location_mode, response.context[ContentDecodePolicy.CONTEXT_NAME]
|
83
83
|
|
84
84
|
|
85
|
-
def process_storage_error(storage_error) -> NoReturn:
|
85
|
+
def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # pylint:disable=too-many-statements, too-many-branches
|
86
86
|
raise_error = HttpResponseError
|
87
87
|
serialized = False
|
88
88
|
if isinstance(storage_error, AzureSigningError):
|
89
|
-
storage_error.message =
|
90
|
-
|
89
|
+
storage_error.message = (
|
90
|
+
storage_error.message
|
91
|
+
+ ". This is likely due to an invalid shared key. Please check your shared key and try again."
|
92
|
+
)
|
91
93
|
if not storage_error.response or storage_error.response.status_code in [200, 204]:
|
92
94
|
raise storage_error
|
93
95
|
# If it is one of those three then it has been serialized prior by the generated layer.
|
94
|
-
if isinstance(
|
95
|
-
|
96
|
+
if isinstance(
|
97
|
+
storage_error,
|
98
|
+
(PartialBatchErrorException, ClientAuthenticationError, ResourceNotFoundError, ResourceExistsError),
|
99
|
+
):
|
96
100
|
serialized = True
|
97
|
-
error_code = storage_error.response.headers.get(
|
101
|
+
error_code = storage_error.response.headers.get("x-ms-error-code")
|
98
102
|
error_message = storage_error.message
|
99
103
|
additional_data = {}
|
100
104
|
error_dict = {}
|
@@ -104,27 +108,25 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # py
|
|
104
108
|
if error_body is None or len(error_body) == 0:
|
105
109
|
error_body = storage_error.response.reason
|
106
110
|
except AttributeError:
|
107
|
-
error_body =
|
111
|
+
error_body = ""
|
108
112
|
# If it is an XML response
|
109
113
|
if isinstance(error_body, Element):
|
110
|
-
error_dict = {
|
111
|
-
child.tag.lower(): child.text
|
112
|
-
for child in error_body
|
113
|
-
}
|
114
|
+
error_dict = {child.tag.lower(): child.text for child in error_body}
|
114
115
|
# If it is a JSON response
|
115
116
|
elif isinstance(error_body, dict):
|
116
|
-
error_dict = error_body.get(
|
117
|
+
error_dict = error_body.get("error", {})
|
117
118
|
elif not error_code:
|
118
119
|
_LOGGER.warning(
|
119
|
-
|
120
|
-
|
120
|
+
"Unexpected return type %s from ContentDecodePolicy.deserialize_from_http_generics.", type(error_body)
|
121
|
+
)
|
122
|
+
error_dict = {"message": str(error_body)}
|
121
123
|
|
122
124
|
# If we extracted from a Json or XML response
|
123
125
|
# There is a chance error_dict is just a string
|
124
126
|
if error_dict and isinstance(error_dict, dict):
|
125
|
-
error_code = error_dict.get(
|
126
|
-
error_message = error_dict.get(
|
127
|
-
additional_data = {k: v for k, v in error_dict.items() if k not in {
|
127
|
+
error_code = error_dict.get("code")
|
128
|
+
error_message = error_dict.get("message")
|
129
|
+
additional_data = {k: v for k, v in error_dict.items() if k not in {"code", "message"}}
|
128
130
|
except DecodeError:
|
129
131
|
pass
|
130
132
|
|
@@ -132,31 +134,33 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # py
|
|
132
134
|
# This check would be unnecessary if we have already serialized the error
|
133
135
|
if error_code and not serialized:
|
134
136
|
error_code = StorageErrorCode(error_code)
|
135
|
-
if error_code in [StorageErrorCode.condition_not_met,
|
136
|
-
StorageErrorCode.blob_overwritten]:
|
137
|
+
if error_code in [StorageErrorCode.condition_not_met, StorageErrorCode.blob_overwritten]:
|
137
138
|
raise_error = ResourceModifiedError
|
138
|
-
if error_code in [StorageErrorCode.invalid_authentication_info,
|
139
|
-
StorageErrorCode.authentication_failed]:
|
139
|
+
if error_code in [StorageErrorCode.invalid_authentication_info, StorageErrorCode.authentication_failed]:
|
140
140
|
raise_error = ClientAuthenticationError
|
141
|
-
if error_code in [
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
141
|
+
if error_code in [
|
142
|
+
StorageErrorCode.resource_not_found,
|
143
|
+
StorageErrorCode.cannot_verify_copy_source,
|
144
|
+
StorageErrorCode.blob_not_found,
|
145
|
+
StorageErrorCode.queue_not_found,
|
146
|
+
StorageErrorCode.container_not_found,
|
147
|
+
StorageErrorCode.parent_not_found,
|
148
|
+
StorageErrorCode.share_not_found,
|
149
|
+
]:
|
148
150
|
raise_error = ResourceNotFoundError
|
149
|
-
if error_code in [
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
151
|
+
if error_code in [
|
152
|
+
StorageErrorCode.account_already_exists,
|
153
|
+
StorageErrorCode.account_being_created,
|
154
|
+
StorageErrorCode.resource_already_exists,
|
155
|
+
StorageErrorCode.resource_type_mismatch,
|
156
|
+
StorageErrorCode.blob_already_exists,
|
157
|
+
StorageErrorCode.queue_already_exists,
|
158
|
+
StorageErrorCode.container_already_exists,
|
159
|
+
StorageErrorCode.container_being_deleted,
|
160
|
+
StorageErrorCode.queue_being_deleted,
|
161
|
+
StorageErrorCode.share_already_exists,
|
162
|
+
StorageErrorCode.share_being_deleted,
|
163
|
+
]:
|
160
164
|
raise_error = ResourceExistsError
|
161
165
|
except ValueError:
|
162
166
|
# Got an unknown error code
|
@@ -183,7 +187,7 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # py
|
|
183
187
|
error.args = (error.message,)
|
184
188
|
try:
|
185
189
|
# `from None` prevents us from double printing the exception (suppresses generated layer error context)
|
186
|
-
exec("raise error from None")
|
190
|
+
exec("raise error from None") # pylint: disable=exec-used # nosec
|
187
191
|
except SyntaxError as exc:
|
188
192
|
raise error from exc
|
189
193
|
|