azure-storage-blob 12.26.0b1__py3-none-any.whl → 12.27.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- azure/storage/blob/__init__.py +6 -5
- azure/storage/blob/_blob_client.py +59 -38
- azure/storage/blob/_blob_client.pyi +780 -0
- azure/storage/blob/_blob_client_helpers.py +4 -3
- azure/storage/blob/_blob_service_client.py +57 -17
- azure/storage/blob/_blob_service_client.pyi +182 -0
- azure/storage/blob/_container_client.py +47 -22
- azure/storage/blob/_container_client.pyi +380 -0
- azure/storage/blob/_deserialize.py +1 -1
- azure/storage/blob/_download.py +7 -7
- azure/storage/blob/_encryption.py +177 -184
- azure/storage/blob/_generated/_azure_blob_storage.py +3 -2
- azure/storage/blob/_generated/_configuration.py +2 -2
- azure/storage/blob/_generated/_utils/__init__.py +6 -0
- azure/storage/blob/_generated/{_serialization.py → _utils/serialization.py} +4 -22
- azure/storage/blob/_generated/aio/_azure_blob_storage.py +3 -2
- azure/storage/blob/_generated/aio/_configuration.py +2 -2
- azure/storage/blob/_generated/aio/operations/_append_blob_operations.py +6 -10
- azure/storage/blob/_generated/aio/operations/_blob_operations.py +35 -39
- azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +9 -13
- azure/storage/blob/_generated/aio/operations/_container_operations.py +20 -24
- azure/storage/blob/_generated/aio/operations/_page_blob_operations.py +13 -17
- azure/storage/blob/_generated/aio/operations/_service_operations.py +10 -14
- azure/storage/blob/_generated/models/_models_py3.py +30 -9
- azure/storage/blob/_generated/operations/_append_blob_operations.py +11 -15
- azure/storage/blob/_generated/operations/_blob_operations.py +60 -64
- azure/storage/blob/_generated/operations/_block_blob_operations.py +16 -20
- azure/storage/blob/_generated/operations/_container_operations.py +39 -43
- azure/storage/blob/_generated/operations/_page_blob_operations.py +23 -27
- azure/storage/blob/_generated/operations/_service_operations.py +19 -23
- azure/storage/blob/_lease.py +3 -2
- azure/storage/blob/_lease.pyi +81 -0
- azure/storage/blob/_list_blobs_helper.py +1 -1
- azure/storage/blob/_quick_query_helper.py +3 -3
- azure/storage/blob/_serialize.py +1 -0
- azure/storage/blob/_shared/__init__.py +7 -7
- azure/storage/blob/_shared/authentication.py +49 -32
- azure/storage/blob/_shared/avro/avro_io.py +44 -42
- azure/storage/blob/_shared/avro/avro_io_async.py +42 -41
- azure/storage/blob/_shared/avro/datafile.py +24 -21
- azure/storage/blob/_shared/avro/datafile_async.py +15 -15
- azure/storage/blob/_shared/avro/schema.py +196 -217
- azure/storage/blob/_shared/base_client.py +79 -70
- azure/storage/blob/_shared/base_client_async.py +53 -68
- azure/storage/blob/_shared/constants.py +1 -1
- azure/storage/blob/_shared/models.py +94 -92
- azure/storage/blob/_shared/parser.py +3 -3
- azure/storage/blob/_shared/policies.py +186 -147
- azure/storage/blob/_shared/policies_async.py +58 -69
- azure/storage/blob/_shared/request_handlers.py +50 -45
- azure/storage/blob/_shared/response_handlers.py +54 -45
- azure/storage/blob/_shared/shared_access_signature.py +65 -73
- azure/storage/blob/_shared/uploads.py +56 -49
- azure/storage/blob/_shared/uploads_async.py +70 -58
- azure/storage/blob/_version.py +1 -1
- azure/storage/blob/aio/__init__.py +8 -10
- azure/storage/blob/aio/_blob_client_async.py +81 -48
- azure/storage/blob/aio/_blob_client_async.pyi +763 -0
- azure/storage/blob/aio/_blob_service_client_async.py +54 -15
- azure/storage/blob/aio/_blob_service_client_async.pyi +187 -0
- azure/storage/blob/aio/_container_client_async.py +55 -26
- azure/storage/blob/aio/_container_client_async.pyi +384 -0
- azure/storage/blob/aio/_download_async.py +15 -11
- azure/storage/blob/aio/_lease_async.py +3 -2
- azure/storage/blob/aio/_lease_async.pyi +81 -0
- azure/storage/blob/aio/_quick_query_helper_async.py +3 -3
- {azure_storage_blob-12.26.0b1.dist-info → azure_storage_blob-12.27.0.dist-info}/METADATA +18 -6
- azure_storage_blob-12.27.0.dist-info/RECORD +94 -0
- {azure_storage_blob-12.26.0b1.dist-info → azure_storage_blob-12.27.0.dist-info}/WHEEL +1 -1
- azure_storage_blob-12.26.0b1.dist-info/RECORD +0 -85
- {azure_storage_blob-12.26.0b1.dist-info → azure_storage_blob-12.27.0.dist-info/licenses}/LICENSE +0 -0
- {azure_storage_blob-12.26.0b1.dist-info → azure_storage_blob-12.27.0.dist-info}/top_level.txt +0 -0
@@ -28,6 +28,7 @@ from . import sign_string
|
|
28
28
|
logger = logging.getLogger(__name__)
|
29
29
|
|
30
30
|
|
31
|
+
# fmt: off
|
31
32
|
table_lv0 = [
|
32
33
|
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
|
33
34
|
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
|
@@ -51,6 +52,8 @@ table_lv4 = [
|
|
51
52
|
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
|
52
53
|
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
|
53
54
|
]
|
55
|
+
# fmt: on
|
56
|
+
|
54
57
|
|
55
58
|
def compare(lhs: str, rhs: str) -> int: # pylint:disable=too-many-return-statements
|
56
59
|
tables = [table_lv0, table_lv4]
|
@@ -95,6 +98,7 @@ def _wrap_exception(ex, desired_type):
|
|
95
98
|
msg = ex.args[0]
|
96
99
|
return desired_type(msg)
|
97
100
|
|
101
|
+
|
98
102
|
# This method attempts to emulate the sorting done by the service
|
99
103
|
def _storage_header_sort(input_headers: List[Tuple[str, str]]) -> List[Tuple[str, str]]:
|
100
104
|
|
@@ -135,38 +139,42 @@ class SharedKeyCredentialPolicy(SansIOHTTPPolicy):
|
|
135
139
|
@staticmethod
|
136
140
|
def _get_headers(request, headers_to_sign):
|
137
141
|
headers = dict((name.lower(), value) for name, value in request.http_request.headers.items() if value)
|
138
|
-
if
|
139
|
-
del headers[
|
140
|
-
return
|
142
|
+
if "content-length" in headers and headers["content-length"] == "0":
|
143
|
+
del headers["content-length"]
|
144
|
+
return "\n".join(headers.get(x, "") for x in headers_to_sign) + "\n"
|
141
145
|
|
142
146
|
@staticmethod
|
143
147
|
def _get_verb(request):
|
144
|
-
return request.http_request.method +
|
148
|
+
return request.http_request.method + "\n"
|
145
149
|
|
146
150
|
def _get_canonicalized_resource(self, request):
|
147
151
|
uri_path = urlparse(request.http_request.url).path
|
148
152
|
try:
|
149
|
-
if
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
+
if (
|
154
|
+
isinstance(request.context.transport, AioHttpTransport)
|
155
|
+
or isinstance(getattr(request.context.transport, "_transport", None), AioHttpTransport)
|
156
|
+
or isinstance(
|
157
|
+
getattr(getattr(request.context.transport, "_transport", None), "_transport", None),
|
158
|
+
AioHttpTransport,
|
159
|
+
)
|
160
|
+
):
|
153
161
|
uri_path = URL(uri_path)
|
154
|
-
return
|
162
|
+
return "/" + self.account_name + str(uri_path)
|
155
163
|
except TypeError:
|
156
164
|
pass
|
157
|
-
return
|
165
|
+
return "/" + self.account_name + uri_path
|
158
166
|
|
159
167
|
@staticmethod
|
160
168
|
def _get_canonicalized_headers(request):
|
161
|
-
string_to_sign =
|
169
|
+
string_to_sign = ""
|
162
170
|
x_ms_headers = []
|
163
171
|
for name, value in request.http_request.headers.items():
|
164
|
-
if name.startswith(
|
172
|
+
if name.startswith("x-ms-"):
|
165
173
|
x_ms_headers.append((name.lower(), value))
|
166
174
|
x_ms_headers = _storage_header_sort(x_ms_headers)
|
167
175
|
for name, value in x_ms_headers:
|
168
176
|
if value is not None:
|
169
|
-
string_to_sign +=
|
177
|
+
string_to_sign += "".join([name, ":", value, "\n"])
|
170
178
|
return string_to_sign
|
171
179
|
|
172
180
|
@staticmethod
|
@@ -174,37 +182,46 @@ class SharedKeyCredentialPolicy(SansIOHTTPPolicy):
|
|
174
182
|
sorted_queries = list(request.http_request.query.items())
|
175
183
|
sorted_queries.sort()
|
176
184
|
|
177
|
-
string_to_sign =
|
185
|
+
string_to_sign = ""
|
178
186
|
for name, value in sorted_queries:
|
179
187
|
if value is not None:
|
180
|
-
string_to_sign +=
|
188
|
+
string_to_sign += "\n" + name.lower() + ":" + unquote(value)
|
181
189
|
|
182
190
|
return string_to_sign
|
183
191
|
|
184
192
|
def _add_authorization_header(self, request, string_to_sign):
|
185
193
|
try:
|
186
194
|
signature = sign_string(self.account_key, string_to_sign)
|
187
|
-
auth_string =
|
188
|
-
request.http_request.headers[
|
195
|
+
auth_string = "SharedKey " + self.account_name + ":" + signature
|
196
|
+
request.http_request.headers["Authorization"] = auth_string
|
189
197
|
except Exception as ex:
|
190
198
|
# Wrap any error that occurred as signing error
|
191
199
|
# Doing so will clarify/locate the source of problem
|
192
200
|
raise _wrap_exception(ex, AzureSigningError) from ex
|
193
201
|
|
194
202
|
def on_request(self, request):
|
195
|
-
string_to_sign =
|
196
|
-
self._get_verb(request)
|
197
|
-
self._get_headers(
|
203
|
+
string_to_sign = (
|
204
|
+
self._get_verb(request)
|
205
|
+
+ self._get_headers(
|
198
206
|
request,
|
199
207
|
[
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
+
"content-encoding",
|
209
|
+
"content-language",
|
210
|
+
"content-length",
|
211
|
+
"content-md5",
|
212
|
+
"content-type",
|
213
|
+
"date",
|
214
|
+
"if-modified-since",
|
215
|
+
"if-match",
|
216
|
+
"if-none-match",
|
217
|
+
"if-unmodified-since",
|
218
|
+
"byte_range",
|
219
|
+
],
|
220
|
+
)
|
221
|
+
+ self._get_canonicalized_headers(request)
|
222
|
+
+ self._get_canonicalized_resource(request)
|
223
|
+
+ self._get_canonicalized_resource_query(request)
|
224
|
+
)
|
208
225
|
|
209
226
|
self._add_authorization_header(request, string_to_sign)
|
210
227
|
# logger.debug("String_to_sign=%s", string_to_sign)
|
@@ -212,7 +229,7 @@ class SharedKeyCredentialPolicy(SansIOHTTPPolicy):
|
|
212
229
|
|
213
230
|
class StorageHttpChallenge(object):
|
214
231
|
def __init__(self, challenge):
|
215
|
-
"""
|
232
|
+
"""Parses an HTTP WWW-Authentication Bearer challenge from the Storage service."""
|
216
233
|
if not challenge:
|
217
234
|
raise ValueError("Challenge cannot be empty")
|
218
235
|
|
@@ -221,7 +238,7 @@ class StorageHttpChallenge(object):
|
|
221
238
|
|
222
239
|
# name=value pairs either comma or space separated with values possibly being
|
223
240
|
# enclosed in quotes
|
224
|
-
for item in re.split(
|
241
|
+
for item in re.split("[, ]", trimmed_challenge):
|
225
242
|
comps = item.split("=")
|
226
243
|
if len(comps) == 2:
|
227
244
|
key = comps[0].strip(' "')
|
@@ -230,11 +247,11 @@ class StorageHttpChallenge(object):
|
|
230
247
|
self._parameters[key] = value
|
231
248
|
|
232
249
|
# Extract and verify required parameters
|
233
|
-
self.authorization_uri = self._parameters.get(
|
250
|
+
self.authorization_uri = self._parameters.get("authorization_uri")
|
234
251
|
if not self.authorization_uri:
|
235
252
|
raise ValueError("Authorization Uri not found")
|
236
253
|
|
237
|
-
self.resource_id = self._parameters.get(
|
254
|
+
self.resource_id = self._parameters.get("resource_id")
|
238
255
|
if not self.resource_id:
|
239
256
|
raise ValueError("Resource id not found")
|
240
257
|
|
@@ -42,8 +42,8 @@ logger = logging.getLogger(__name__)
|
|
42
42
|
# ------------------------------------------------------------------------------
|
43
43
|
# Constants
|
44
44
|
|
45
|
-
STRUCT_FLOAT = struct.Struct(
|
46
|
-
STRUCT_DOUBLE = struct.Struct(
|
45
|
+
STRUCT_FLOAT = struct.Struct("<f") # little-endian float
|
46
|
+
STRUCT_DOUBLE = struct.Struct("<d") # little-endian double
|
47
47
|
|
48
48
|
# ------------------------------------------------------------------------------
|
49
49
|
# Exceptions
|
@@ -56,6 +56,7 @@ class SchemaResolutionException(schema.AvroException):
|
|
56
56
|
fail_msg += f"\nWriter's Schema: {pretty_writers}"
|
57
57
|
schema.AvroException.__init__(self, fail_msg)
|
58
58
|
|
59
|
+
|
59
60
|
# ------------------------------------------------------------------------------
|
60
61
|
# Decoder
|
61
62
|
|
@@ -78,14 +79,14 @@ class BinaryDecoder(object):
|
|
78
79
|
"""Read n bytes.
|
79
80
|
|
80
81
|
:param int n: Number of bytes to read.
|
81
|
-
:
|
82
|
+
:return: The next n bytes from the input.
|
82
83
|
:rtype: bytes
|
83
84
|
"""
|
84
|
-
assert
|
85
|
+
assert n >= 0, n
|
85
86
|
input_bytes = self.reader.read(n)
|
86
87
|
if n > 0 and not input_bytes:
|
87
88
|
raise StopIteration
|
88
|
-
assert
|
89
|
+
assert len(input_bytes) == n, input_bytes
|
89
90
|
return input_bytes
|
90
91
|
|
91
92
|
@staticmethod
|
@@ -149,7 +150,7 @@ class BinaryDecoder(object):
|
|
149
150
|
Bytes are encoded as a long followed by that many bytes of data.
|
150
151
|
"""
|
151
152
|
nbytes = self.read_long()
|
152
|
-
assert
|
153
|
+
assert nbytes >= 0, nbytes
|
153
154
|
return self.read(nbytes)
|
154
155
|
|
155
156
|
def read_utf8(self):
|
@@ -160,9 +161,9 @@ class BinaryDecoder(object):
|
|
160
161
|
input_bytes = self.read_bytes()
|
161
162
|
if PY3:
|
162
163
|
try:
|
163
|
-
return input_bytes.decode(
|
164
|
+
return input_bytes.decode("utf-8")
|
164
165
|
except UnicodeDecodeError as exn:
|
165
|
-
logger.error(
|
166
|
+
logger.error("Invalid UTF-8 input bytes: %r", input_bytes) # pylint: disable=do-not-log-raised-errors
|
166
167
|
raise exn
|
167
168
|
else:
|
168
169
|
# PY2
|
@@ -216,41 +217,40 @@ class DatumReader(object):
|
|
216
217
|
def set_writer_schema(self, writer_schema):
|
217
218
|
self._writer_schema = writer_schema
|
218
219
|
|
219
|
-
writer_schema = property(lambda self: self._writer_schema,
|
220
|
-
set_writer_schema)
|
220
|
+
writer_schema = property(lambda self: self._writer_schema, set_writer_schema)
|
221
221
|
|
222
222
|
def read(self, decoder):
|
223
223
|
return self.read_data(self.writer_schema, decoder)
|
224
224
|
|
225
225
|
def read_data(self, writer_schema, decoder):
|
226
226
|
# function dispatch for reading data based on type of writer's schema
|
227
|
-
if writer_schema.type ==
|
227
|
+
if writer_schema.type == "null":
|
228
228
|
result = decoder.read_null()
|
229
|
-
elif writer_schema.type ==
|
229
|
+
elif writer_schema.type == "boolean":
|
230
230
|
result = decoder.read_boolean()
|
231
|
-
elif writer_schema.type ==
|
231
|
+
elif writer_schema.type == "string":
|
232
232
|
result = decoder.read_utf8()
|
233
|
-
elif writer_schema.type ==
|
233
|
+
elif writer_schema.type == "int":
|
234
234
|
result = decoder.read_int()
|
235
|
-
elif writer_schema.type ==
|
235
|
+
elif writer_schema.type == "long":
|
236
236
|
result = decoder.read_long()
|
237
|
-
elif writer_schema.type ==
|
237
|
+
elif writer_schema.type == "float":
|
238
238
|
result = decoder.read_float()
|
239
|
-
elif writer_schema.type ==
|
239
|
+
elif writer_schema.type == "double":
|
240
240
|
result = decoder.read_double()
|
241
|
-
elif writer_schema.type ==
|
241
|
+
elif writer_schema.type == "bytes":
|
242
242
|
result = decoder.read_bytes()
|
243
|
-
elif writer_schema.type ==
|
243
|
+
elif writer_schema.type == "fixed":
|
244
244
|
result = self.read_fixed(writer_schema, decoder)
|
245
|
-
elif writer_schema.type ==
|
245
|
+
elif writer_schema.type == "enum":
|
246
246
|
result = self.read_enum(writer_schema, decoder)
|
247
|
-
elif writer_schema.type ==
|
247
|
+
elif writer_schema.type == "array":
|
248
248
|
result = self.read_array(writer_schema, decoder)
|
249
|
-
elif writer_schema.type ==
|
249
|
+
elif writer_schema.type == "map":
|
250
250
|
result = self.read_map(writer_schema, decoder)
|
251
|
-
elif writer_schema.type in [
|
251
|
+
elif writer_schema.type in ["union", "error_union"]:
|
252
252
|
result = self.read_union(writer_schema, decoder)
|
253
|
-
elif writer_schema.type in [
|
253
|
+
elif writer_schema.type in ["record", "error", "request"]:
|
254
254
|
result = self.read_record(writer_schema, decoder)
|
255
255
|
else:
|
256
256
|
fail_msg = f"Cannot read unknown schema type: {writer_schema.type}"
|
@@ -258,35 +258,35 @@ class DatumReader(object):
|
|
258
258
|
return result
|
259
259
|
|
260
260
|
def skip_data(self, writer_schema, decoder):
|
261
|
-
if writer_schema.type ==
|
261
|
+
if writer_schema.type == "null":
|
262
262
|
result = decoder.skip_null()
|
263
|
-
elif writer_schema.type ==
|
263
|
+
elif writer_schema.type == "boolean":
|
264
264
|
result = decoder.skip_boolean()
|
265
|
-
elif writer_schema.type ==
|
265
|
+
elif writer_schema.type == "string":
|
266
266
|
result = decoder.skip_utf8()
|
267
|
-
elif writer_schema.type ==
|
267
|
+
elif writer_schema.type == "int":
|
268
268
|
result = decoder.skip_int()
|
269
|
-
elif writer_schema.type ==
|
269
|
+
elif writer_schema.type == "long":
|
270
270
|
result = decoder.skip_long()
|
271
|
-
elif writer_schema.type ==
|
271
|
+
elif writer_schema.type == "float":
|
272
272
|
result = decoder.skip_float()
|
273
|
-
elif writer_schema.type ==
|
273
|
+
elif writer_schema.type == "double":
|
274
274
|
result = decoder.skip_double()
|
275
|
-
elif writer_schema.type ==
|
275
|
+
elif writer_schema.type == "bytes":
|
276
276
|
result = decoder.skip_bytes()
|
277
|
-
elif writer_schema.type ==
|
277
|
+
elif writer_schema.type == "fixed":
|
278
278
|
result = self.skip_fixed(writer_schema, decoder)
|
279
|
-
elif writer_schema.type ==
|
279
|
+
elif writer_schema.type == "enum":
|
280
280
|
result = self.skip_enum(decoder)
|
281
|
-
elif writer_schema.type ==
|
281
|
+
elif writer_schema.type == "array":
|
282
282
|
self.skip_array(writer_schema, decoder)
|
283
283
|
result = None
|
284
|
-
elif writer_schema.type ==
|
284
|
+
elif writer_schema.type == "map":
|
285
285
|
self.skip_map(writer_schema, decoder)
|
286
286
|
result = None
|
287
|
-
elif writer_schema.type in [
|
287
|
+
elif writer_schema.type in ["union", "error_union"]:
|
288
288
|
result = self.skip_union(writer_schema, decoder)
|
289
|
-
elif writer_schema.type in [
|
289
|
+
elif writer_schema.type in ["record", "error", "request"]:
|
290
290
|
self.skip_record(writer_schema, decoder)
|
291
291
|
result = None
|
292
292
|
else:
|
@@ -389,8 +389,9 @@ class DatumReader(object):
|
|
389
389
|
# schema resolution
|
390
390
|
index_of_schema = int(decoder.read_long())
|
391
391
|
if index_of_schema >= len(writer_schema.schemas):
|
392
|
-
fail_msg = (
|
393
|
-
|
392
|
+
fail_msg = (
|
393
|
+
f"Can't access branch index {index_of_schema} " f"for union with {len(writer_schema.schemas)} branches"
|
394
|
+
)
|
394
395
|
raise SchemaResolutionException(fail_msg, writer_schema)
|
395
396
|
selected_writer_schema = writer_schema.schemas[index_of_schema]
|
396
397
|
|
@@ -400,8 +401,9 @@ class DatumReader(object):
|
|
400
401
|
def skip_union(self, writer_schema, decoder):
|
401
402
|
index_of_schema = int(decoder.read_long())
|
402
403
|
if index_of_schema >= len(writer_schema.schemas):
|
403
|
-
fail_msg = (
|
404
|
-
|
404
|
+
fail_msg = (
|
405
|
+
f"Can't access branch index {index_of_schema} " f"for union with {len(writer_schema.schemas)} branches"
|
406
|
+
)
|
405
407
|
raise SchemaResolutionException(fail_msg, writer_schema)
|
406
408
|
return self.skip_data(writer_schema.schemas[index_of_schema], decoder)
|
407
409
|
|
@@ -61,14 +61,14 @@ class AsyncBinaryDecoder(object):
|
|
61
61
|
"""Read n bytes.
|
62
62
|
|
63
63
|
:param int n: Number of bytes to read.
|
64
|
-
:
|
64
|
+
:return: The next n bytes from the input.
|
65
65
|
:rtype: bytes
|
66
66
|
"""
|
67
|
-
assert
|
67
|
+
assert n >= 0, n
|
68
68
|
input_bytes = await self.reader.read(n)
|
69
69
|
if n > 0 and not input_bytes:
|
70
70
|
raise StopAsyncIteration
|
71
|
-
assert
|
71
|
+
assert len(input_bytes) == n, input_bytes
|
72
72
|
return input_bytes
|
73
73
|
|
74
74
|
@staticmethod
|
@@ -132,7 +132,7 @@ class AsyncBinaryDecoder(object):
|
|
132
132
|
Bytes are encoded as a long followed by that many bytes of data.
|
133
133
|
"""
|
134
134
|
nbytes = await self.read_long()
|
135
|
-
assert
|
135
|
+
assert nbytes >= 0, nbytes
|
136
136
|
return await self.read(nbytes)
|
137
137
|
|
138
138
|
async def read_utf8(self):
|
@@ -143,13 +143,13 @@ class AsyncBinaryDecoder(object):
|
|
143
143
|
input_bytes = await self.read_bytes()
|
144
144
|
if PY3:
|
145
145
|
try:
|
146
|
-
return input_bytes.decode(
|
146
|
+
return input_bytes.decode("utf-8")
|
147
147
|
except UnicodeDecodeError as exn:
|
148
|
-
logger.error(
|
148
|
+
logger.error("Invalid UTF-8 input bytes: %r", input_bytes) # pylint: disable=do-not-log-raised-errors
|
149
149
|
raise exn
|
150
150
|
else:
|
151
151
|
# PY2
|
152
|
-
return unicode(input_bytes, "utf-8")
|
152
|
+
return unicode(input_bytes, "utf-8") # pylint: disable=undefined-variable
|
153
153
|
|
154
154
|
def skip_null(self):
|
155
155
|
pass
|
@@ -200,41 +200,40 @@ class AsyncDatumReader(object):
|
|
200
200
|
def set_writer_schema(self, writer_schema):
|
201
201
|
self._writer_schema = writer_schema
|
202
202
|
|
203
|
-
writer_schema = property(lambda self: self._writer_schema,
|
204
|
-
set_writer_schema)
|
203
|
+
writer_schema = property(lambda self: self._writer_schema, set_writer_schema)
|
205
204
|
|
206
205
|
async def read(self, decoder):
|
207
206
|
return await self.read_data(self.writer_schema, decoder)
|
208
207
|
|
209
208
|
async def read_data(self, writer_schema, decoder):
|
210
209
|
# function dispatch for reading data based on type of writer's schema
|
211
|
-
if writer_schema.type ==
|
210
|
+
if writer_schema.type == "null":
|
212
211
|
result = decoder.read_null()
|
213
|
-
elif writer_schema.type ==
|
212
|
+
elif writer_schema.type == "boolean":
|
214
213
|
result = await decoder.read_boolean()
|
215
|
-
elif writer_schema.type ==
|
214
|
+
elif writer_schema.type == "string":
|
216
215
|
result = await decoder.read_utf8()
|
217
|
-
elif writer_schema.type ==
|
216
|
+
elif writer_schema.type == "int":
|
218
217
|
result = await decoder.read_int()
|
219
|
-
elif writer_schema.type ==
|
218
|
+
elif writer_schema.type == "long":
|
220
219
|
result = await decoder.read_long()
|
221
|
-
elif writer_schema.type ==
|
220
|
+
elif writer_schema.type == "float":
|
222
221
|
result = await decoder.read_float()
|
223
|
-
elif writer_schema.type ==
|
222
|
+
elif writer_schema.type == "double":
|
224
223
|
result = await decoder.read_double()
|
225
|
-
elif writer_schema.type ==
|
224
|
+
elif writer_schema.type == "bytes":
|
226
225
|
result = await decoder.read_bytes()
|
227
|
-
elif writer_schema.type ==
|
226
|
+
elif writer_schema.type == "fixed":
|
228
227
|
result = await self.read_fixed(writer_schema, decoder)
|
229
|
-
elif writer_schema.type ==
|
228
|
+
elif writer_schema.type == "enum":
|
230
229
|
result = await self.read_enum(writer_schema, decoder)
|
231
|
-
elif writer_schema.type ==
|
230
|
+
elif writer_schema.type == "array":
|
232
231
|
result = await self.read_array(writer_schema, decoder)
|
233
|
-
elif writer_schema.type ==
|
232
|
+
elif writer_schema.type == "map":
|
234
233
|
result = await self.read_map(writer_schema, decoder)
|
235
|
-
elif writer_schema.type in [
|
234
|
+
elif writer_schema.type in ["union", "error_union"]:
|
236
235
|
result = await self.read_union(writer_schema, decoder)
|
237
|
-
elif writer_schema.type in [
|
236
|
+
elif writer_schema.type in ["record", "error", "request"]:
|
238
237
|
result = await self.read_record(writer_schema, decoder)
|
239
238
|
else:
|
240
239
|
fail_msg = f"Cannot read unknown schema type: {writer_schema.type}"
|
@@ -242,35 +241,35 @@ class AsyncDatumReader(object):
|
|
242
241
|
return result
|
243
242
|
|
244
243
|
async def skip_data(self, writer_schema, decoder):
|
245
|
-
if writer_schema.type ==
|
244
|
+
if writer_schema.type == "null":
|
246
245
|
result = decoder.skip_null()
|
247
|
-
elif writer_schema.type ==
|
246
|
+
elif writer_schema.type == "boolean":
|
248
247
|
result = await decoder.skip_boolean()
|
249
|
-
elif writer_schema.type ==
|
248
|
+
elif writer_schema.type == "string":
|
250
249
|
result = await decoder.skip_utf8()
|
251
|
-
elif writer_schema.type ==
|
250
|
+
elif writer_schema.type == "int":
|
252
251
|
result = await decoder.skip_int()
|
253
|
-
elif writer_schema.type ==
|
252
|
+
elif writer_schema.type == "long":
|
254
253
|
result = await decoder.skip_long()
|
255
|
-
elif writer_schema.type ==
|
254
|
+
elif writer_schema.type == "float":
|
256
255
|
result = await decoder.skip_float()
|
257
|
-
elif writer_schema.type ==
|
256
|
+
elif writer_schema.type == "double":
|
258
257
|
result = await decoder.skip_double()
|
259
|
-
elif writer_schema.type ==
|
258
|
+
elif writer_schema.type == "bytes":
|
260
259
|
result = await decoder.skip_bytes()
|
261
|
-
elif writer_schema.type ==
|
260
|
+
elif writer_schema.type == "fixed":
|
262
261
|
result = await self.skip_fixed(writer_schema, decoder)
|
263
|
-
elif writer_schema.type ==
|
262
|
+
elif writer_schema.type == "enum":
|
264
263
|
result = await self.skip_enum(decoder)
|
265
|
-
elif writer_schema.type ==
|
264
|
+
elif writer_schema.type == "array":
|
266
265
|
await self.skip_array(writer_schema, decoder)
|
267
266
|
result = None
|
268
|
-
elif writer_schema.type ==
|
267
|
+
elif writer_schema.type == "map":
|
269
268
|
await self.skip_map(writer_schema, decoder)
|
270
269
|
result = None
|
271
|
-
elif writer_schema.type in [
|
270
|
+
elif writer_schema.type in ["union", "error_union"]:
|
272
271
|
result = await self.skip_union(writer_schema, decoder)
|
273
|
-
elif writer_schema.type in [
|
272
|
+
elif writer_schema.type in ["record", "error", "request"]:
|
274
273
|
await self.skip_record(writer_schema, decoder)
|
275
274
|
result = None
|
276
275
|
else:
|
@@ -373,8 +372,9 @@ class AsyncDatumReader(object):
|
|
373
372
|
# schema resolution
|
374
373
|
index_of_schema = int(await decoder.read_long())
|
375
374
|
if index_of_schema >= len(writer_schema.schemas):
|
376
|
-
fail_msg = (
|
377
|
-
|
375
|
+
fail_msg = (
|
376
|
+
f"Can't access branch index {index_of_schema} " f"for union with {len(writer_schema.schemas)} branches"
|
377
|
+
)
|
378
378
|
raise SchemaResolutionException(fail_msg, writer_schema)
|
379
379
|
selected_writer_schema = writer_schema.schemas[index_of_schema]
|
380
380
|
|
@@ -384,8 +384,9 @@ class AsyncDatumReader(object):
|
|
384
384
|
async def skip_union(self, writer_schema, decoder):
|
385
385
|
index_of_schema = int(await decoder.read_long())
|
386
386
|
if index_of_schema >= len(writer_schema.schemas):
|
387
|
-
fail_msg = (
|
388
|
-
|
387
|
+
fail_msg = (
|
388
|
+
f"Can't access branch index {index_of_schema} " f"for union with {len(writer_schema.schemas)} branches"
|
389
|
+
)
|
389
390
|
raise SchemaResolutionException(fail_msg, writer_schema)
|
390
391
|
return await self.skip_data(writer_schema.schemas[index_of_schema], decoder)
|
391
392
|
|