azure-storage-blob 12.25.0b1__py3-none-any.whl → 12.26.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- azure/storage/blob/__init__.py +3 -2
- azure/storage/blob/_blob_client.py +94 -41
- azure/storage/blob/_blob_client_helpers.py +19 -4
- azure/storage/blob/_blob_service_client.py +16 -13
- azure/storage/blob/_container_client.py +25 -22
- azure/storage/blob/_deserialize.py +1 -1
- azure/storage/blob/_download.py +7 -7
- azure/storage/blob/_encryption.py +177 -184
- azure/storage/blob/_generated/_azure_blob_storage.py +1 -1
- azure/storage/blob/_generated/_configuration.py +2 -2
- azure/storage/blob/_generated/_serialization.py +3 -3
- azure/storage/blob/_generated/aio/_azure_blob_storage.py +1 -1
- azure/storage/blob/_generated/aio/_configuration.py +2 -2
- azure/storage/blob/_generated/aio/operations/_append_blob_operations.py +5 -4
- azure/storage/blob/_generated/aio/operations/_blob_operations.py +5 -25
- azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +9 -7
- azure/storage/blob/_generated/aio/operations/_container_operations.py +1 -19
- azure/storage/blob/_generated/aio/operations/_page_blob_operations.py +5 -10
- azure/storage/blob/_generated/aio/operations/_service_operations.py +1 -8
- azure/storage/blob/_generated/models/__init__.py +2 -0
- azure/storage/blob/_generated/models/_azure_blob_storage_enums.py +6 -0
- azure/storage/blob/_generated/operations/_append_blob_operations.py +12 -9
- azure/storage/blob/_generated/operations/_blob_operations.py +32 -49
- azure/storage/blob/_generated/operations/_block_blob_operations.py +21 -13
- azure/storage/blob/_generated/operations/_container_operations.py +19 -37
- azure/storage/blob/_generated/operations/_page_blob_operations.py +17 -19
- azure/storage/blob/_generated/operations/_service_operations.py +9 -17
- azure/storage/blob/_lease.py +1 -0
- azure/storage/blob/_quick_query_helper.py +20 -24
- azure/storage/blob/_serialize.py +1 -0
- azure/storage/blob/_shared/__init__.py +7 -7
- azure/storage/blob/_shared/authentication.py +49 -32
- azure/storage/blob/_shared/avro/avro_io.py +45 -43
- azure/storage/blob/_shared/avro/avro_io_async.py +42 -41
- azure/storage/blob/_shared/avro/datafile.py +24 -21
- azure/storage/blob/_shared/avro/datafile_async.py +15 -15
- azure/storage/blob/_shared/avro/schema.py +196 -217
- azure/storage/blob/_shared/base_client.py +87 -61
- azure/storage/blob/_shared/base_client_async.py +58 -51
- azure/storage/blob/_shared/constants.py +1 -1
- azure/storage/blob/_shared/models.py +93 -92
- azure/storage/blob/_shared/parser.py +3 -3
- azure/storage/blob/_shared/policies.py +176 -145
- azure/storage/blob/_shared/policies_async.py +59 -70
- azure/storage/blob/_shared/request_handlers.py +51 -47
- azure/storage/blob/_shared/response_handlers.py +49 -45
- azure/storage/blob/_shared/shared_access_signature.py +67 -71
- azure/storage/blob/_shared/uploads.py +56 -49
- azure/storage/blob/_shared/uploads_async.py +72 -61
- azure/storage/blob/_shared_access_signature.py +3 -1
- azure/storage/blob/_version.py +1 -1
- azure/storage/blob/aio/__init__.py +3 -2
- azure/storage/blob/aio/_blob_client_async.py +241 -44
- azure/storage/blob/aio/_blob_service_client_async.py +13 -11
- azure/storage/blob/aio/_container_client_async.py +28 -25
- azure/storage/blob/aio/_download_async.py +16 -12
- azure/storage/blob/aio/_lease_async.py +1 -0
- azure/storage/blob/aio/_quick_query_helper_async.py +194 -0
- {azure_storage_blob-12.25.0b1.dist-info → azure_storage_blob-12.26.0.dist-info}/METADATA +7 -7
- azure_storage_blob-12.26.0.dist-info/RECORD +85 -0
- {azure_storage_blob-12.25.0b1.dist-info → azure_storage_blob-12.26.0.dist-info}/WHEEL +1 -1
- azure_storage_blob-12.25.0b1.dist-info/RECORD +0 -84
- {azure_storage_blob-12.25.0b1.dist-info → azure_storage_blob-12.26.0.dist-info}/LICENSE +0 -0
- {azure_storage_blob-12.25.0b1.dist-info → azure_storage_blob-12.26.0.dist-info}/top_level.txt +0 -0
@@ -5,7 +5,10 @@
|
|
5
5
|
# --------------------------------------------------------------------------
|
6
6
|
|
7
7
|
from io import BytesIO
|
8
|
-
from typing import
|
8
|
+
from typing import (
|
9
|
+
Any, Dict, Generator, IO, Iterable, Optional, Type,
|
10
|
+
TYPE_CHECKING
|
11
|
+
)
|
9
12
|
|
10
13
|
from ._shared.avro.avro_io import DatumReader
|
11
14
|
from ._shared.avro.datafile import DataFileReader
|
@@ -14,11 +17,11 @@ if TYPE_CHECKING:
|
|
14
17
|
from ._models import BlobQueryError
|
15
18
|
|
16
19
|
|
17
|
-
class BlobQueryReader
|
20
|
+
class BlobQueryReader: # pylint: disable=too-many-instance-attributes
|
18
21
|
"""A streaming object to read query results."""
|
19
22
|
|
20
23
|
name: str
|
21
|
-
"""The name of the blob being
|
24
|
+
"""The name of the blob being queried."""
|
22
25
|
container: str
|
23
26
|
"""The name of the container where the blob is."""
|
24
27
|
response_headers: Dict[str, Any]
|
@@ -28,8 +31,7 @@ class BlobQueryReader(object): # pylint: disable=too-many-instance-attributes
|
|
28
31
|
method will return these lines via a generator."""
|
29
32
|
|
30
33
|
def __init__(
|
31
|
-
self,
|
32
|
-
name: str = None, # type: ignore [assignment]
|
34
|
+
self, name: str = None, # type: ignore [assignment]
|
33
35
|
container: str = None, # type: ignore [assignment]
|
34
36
|
errors: Any = None,
|
35
37
|
record_delimiter: str = '\n',
|
@@ -50,7 +52,7 @@ class BlobQueryReader(object): # pylint: disable=too-many-instance-attributes
|
|
50
52
|
self._first_result = self._process_record(next(self._parsed_results))
|
51
53
|
self._error_cls = error_cls
|
52
54
|
|
53
|
-
def __len__(self):
|
55
|
+
def __len__(self) -> int:
|
54
56
|
return self._size
|
55
57
|
|
56
58
|
def _process_record(self, result: Dict[str, Any]) -> Optional[bytes]:
|
@@ -77,21 +79,19 @@ class BlobQueryReader(object): # pylint: disable=too-many-instance-attributes
|
|
77
79
|
if processed_result is not None:
|
78
80
|
yield processed_result
|
79
81
|
|
80
|
-
def readall(self) ->
|
82
|
+
def readall(self) -> bytes:
|
81
83
|
"""Return all query results.
|
82
84
|
|
83
85
|
This operation is blocking until all data is downloaded.
|
84
|
-
If encoding has been configured - this will be used to decode individual
|
85
|
-
records are they are received.
|
86
86
|
|
87
|
-
:
|
88
|
-
:rtype:
|
87
|
+
:return: The query results.
|
88
|
+
:rtype: bytes
|
89
89
|
"""
|
90
90
|
stream = BytesIO()
|
91
91
|
self.readinto(stream)
|
92
92
|
data = stream.getvalue()
|
93
93
|
if self._encoding:
|
94
|
-
return data.decode(self._encoding)
|
94
|
+
return data.decode(self._encoding) # type: ignore [return-value]
|
95
95
|
return data
|
96
96
|
|
97
97
|
def readinto(self, stream: IO) -> None:
|
@@ -100,34 +100,30 @@ class BlobQueryReader(object): # pylint: disable=too-many-instance-attributes
|
|
100
100
|
:param IO stream:
|
101
101
|
The stream to download to. This can be an open file-handle,
|
102
102
|
or any writable stream.
|
103
|
-
:
|
103
|
+
:return: None
|
104
104
|
"""
|
105
105
|
for record in self._iter_stream():
|
106
106
|
stream.write(record)
|
107
107
|
|
108
|
-
def records(self) -> Iterable[
|
108
|
+
def records(self) -> Iterable[bytes]:
|
109
109
|
"""Returns a record generator for the query result.
|
110
110
|
|
111
111
|
Records will be returned line by line.
|
112
|
-
If encoding has been configured - this will be used to decode individual
|
113
|
-
records are they are received.
|
114
112
|
|
115
|
-
:
|
116
|
-
:rtype: Iterable[
|
113
|
+
:return: A record generator for the query result.
|
114
|
+
:rtype: Iterable[bytes]
|
117
115
|
"""
|
118
116
|
delimiter = self.record_delimiter.encode('utf-8')
|
119
117
|
for record_chunk in self._iter_stream():
|
120
118
|
for record in record_chunk.split(delimiter):
|
121
119
|
if self._encoding:
|
122
|
-
yield record.decode(self._encoding)
|
120
|
+
yield record.decode(self._encoding) # type: ignore [misc]
|
123
121
|
else:
|
124
122
|
yield record
|
125
123
|
|
126
124
|
|
127
|
-
class QuickQueryStreamer
|
128
|
-
"""
|
129
|
-
File-like streaming iterator.
|
130
|
-
"""
|
125
|
+
class QuickQueryStreamer:
|
126
|
+
"""File-like streaming iterator."""
|
131
127
|
|
132
128
|
def __init__(self, generator):
|
133
129
|
self.generator = generator
|
@@ -183,7 +179,7 @@ class QuickQueryStreamer(object):
|
|
183
179
|
if relative_start < 0:
|
184
180
|
raise ValueError("Buffer has dumped too much data")
|
185
181
|
relative_end = relative_start + size
|
186
|
-
data = self._buf[relative_start:
|
182
|
+
data = self._buf[relative_start:relative_end]
|
187
183
|
|
188
184
|
# dump the extra data in buffer
|
189
185
|
# buffer start--------------------16bytes----current read position
|
azure/storage/blob/_serialize.py
CHANGED
@@ -11,7 +11,7 @@ import hmac
|
|
11
11
|
try:
|
12
12
|
from urllib.parse import quote, unquote
|
13
13
|
except ImportError:
|
14
|
-
from urllib2 import quote, unquote
|
14
|
+
from urllib2 import quote, unquote # type: ignore
|
15
15
|
|
16
16
|
|
17
17
|
def url_quote(url):
|
@@ -24,20 +24,20 @@ def url_unquote(url):
|
|
24
24
|
|
25
25
|
def encode_base64(data):
|
26
26
|
if isinstance(data, str):
|
27
|
-
data = data.encode(
|
27
|
+
data = data.encode("utf-8")
|
28
28
|
encoded = base64.b64encode(data)
|
29
|
-
return encoded.decode(
|
29
|
+
return encoded.decode("utf-8")
|
30
30
|
|
31
31
|
|
32
32
|
def decode_base64_to_bytes(data):
|
33
33
|
if isinstance(data, str):
|
34
|
-
data = data.encode(
|
34
|
+
data = data.encode("utf-8")
|
35
35
|
return base64.b64decode(data)
|
36
36
|
|
37
37
|
|
38
38
|
def decode_base64_to_text(data):
|
39
39
|
decoded_bytes = decode_base64_to_bytes(data)
|
40
|
-
return decoded_bytes.decode(
|
40
|
+
return decoded_bytes.decode("utf-8")
|
41
41
|
|
42
42
|
|
43
43
|
def sign_string(key, string_to_sign, key_is_base64=True):
|
@@ -45,9 +45,9 @@ def sign_string(key, string_to_sign, key_is_base64=True):
|
|
45
45
|
key = decode_base64_to_bytes(key)
|
46
46
|
else:
|
47
47
|
if isinstance(key, str):
|
48
|
-
key = key.encode(
|
48
|
+
key = key.encode("utf-8")
|
49
49
|
if isinstance(string_to_sign, str):
|
50
|
-
string_to_sign = string_to_sign.encode(
|
50
|
+
string_to_sign = string_to_sign.encode("utf-8")
|
51
51
|
signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256)
|
52
52
|
digest = signed_hmac_sha256.digest()
|
53
53
|
encoded_digest = encode_base64(digest)
|
@@ -28,6 +28,7 @@ from . import sign_string
|
|
28
28
|
logger = logging.getLogger(__name__)
|
29
29
|
|
30
30
|
|
31
|
+
# fmt: off
|
31
32
|
table_lv0 = [
|
32
33
|
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
|
33
34
|
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
|
@@ -51,6 +52,8 @@ table_lv4 = [
|
|
51
52
|
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
|
52
53
|
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
|
53
54
|
]
|
55
|
+
# fmt: on
|
56
|
+
|
54
57
|
|
55
58
|
def compare(lhs: str, rhs: str) -> int: # pylint:disable=too-many-return-statements
|
56
59
|
tables = [table_lv0, table_lv4]
|
@@ -95,6 +98,7 @@ def _wrap_exception(ex, desired_type):
|
|
95
98
|
msg = ex.args[0]
|
96
99
|
return desired_type(msg)
|
97
100
|
|
101
|
+
|
98
102
|
# This method attempts to emulate the sorting done by the service
|
99
103
|
def _storage_header_sort(input_headers: List[Tuple[str, str]]) -> List[Tuple[str, str]]:
|
100
104
|
|
@@ -135,38 +139,42 @@ class SharedKeyCredentialPolicy(SansIOHTTPPolicy):
|
|
135
139
|
@staticmethod
|
136
140
|
def _get_headers(request, headers_to_sign):
|
137
141
|
headers = dict((name.lower(), value) for name, value in request.http_request.headers.items() if value)
|
138
|
-
if
|
139
|
-
del headers[
|
140
|
-
return
|
142
|
+
if "content-length" in headers and headers["content-length"] == "0":
|
143
|
+
del headers["content-length"]
|
144
|
+
return "\n".join(headers.get(x, "") for x in headers_to_sign) + "\n"
|
141
145
|
|
142
146
|
@staticmethod
|
143
147
|
def _get_verb(request):
|
144
|
-
return request.http_request.method +
|
148
|
+
return request.http_request.method + "\n"
|
145
149
|
|
146
150
|
def _get_canonicalized_resource(self, request):
|
147
151
|
uri_path = urlparse(request.http_request.url).path
|
148
152
|
try:
|
149
|
-
if
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
+
if (
|
154
|
+
isinstance(request.context.transport, AioHttpTransport)
|
155
|
+
or isinstance(getattr(request.context.transport, "_transport", None), AioHttpTransport)
|
156
|
+
or isinstance(
|
157
|
+
getattr(getattr(request.context.transport, "_transport", None), "_transport", None),
|
158
|
+
AioHttpTransport,
|
159
|
+
)
|
160
|
+
):
|
153
161
|
uri_path = URL(uri_path)
|
154
|
-
return
|
162
|
+
return "/" + self.account_name + str(uri_path)
|
155
163
|
except TypeError:
|
156
164
|
pass
|
157
|
-
return
|
165
|
+
return "/" + self.account_name + uri_path
|
158
166
|
|
159
167
|
@staticmethod
|
160
168
|
def _get_canonicalized_headers(request):
|
161
|
-
string_to_sign =
|
169
|
+
string_to_sign = ""
|
162
170
|
x_ms_headers = []
|
163
171
|
for name, value in request.http_request.headers.items():
|
164
|
-
if name.startswith(
|
172
|
+
if name.startswith("x-ms-"):
|
165
173
|
x_ms_headers.append((name.lower(), value))
|
166
174
|
x_ms_headers = _storage_header_sort(x_ms_headers)
|
167
175
|
for name, value in x_ms_headers:
|
168
176
|
if value is not None:
|
169
|
-
string_to_sign +=
|
177
|
+
string_to_sign += "".join([name, ":", value, "\n"])
|
170
178
|
return string_to_sign
|
171
179
|
|
172
180
|
@staticmethod
|
@@ -174,37 +182,46 @@ class SharedKeyCredentialPolicy(SansIOHTTPPolicy):
|
|
174
182
|
sorted_queries = list(request.http_request.query.items())
|
175
183
|
sorted_queries.sort()
|
176
184
|
|
177
|
-
string_to_sign =
|
185
|
+
string_to_sign = ""
|
178
186
|
for name, value in sorted_queries:
|
179
187
|
if value is not None:
|
180
|
-
string_to_sign +=
|
188
|
+
string_to_sign += "\n" + name.lower() + ":" + unquote(value)
|
181
189
|
|
182
190
|
return string_to_sign
|
183
191
|
|
184
192
|
def _add_authorization_header(self, request, string_to_sign):
|
185
193
|
try:
|
186
194
|
signature = sign_string(self.account_key, string_to_sign)
|
187
|
-
auth_string =
|
188
|
-
request.http_request.headers[
|
195
|
+
auth_string = "SharedKey " + self.account_name + ":" + signature
|
196
|
+
request.http_request.headers["Authorization"] = auth_string
|
189
197
|
except Exception as ex:
|
190
198
|
# Wrap any error that occurred as signing error
|
191
199
|
# Doing so will clarify/locate the source of problem
|
192
200
|
raise _wrap_exception(ex, AzureSigningError) from ex
|
193
201
|
|
194
202
|
def on_request(self, request):
|
195
|
-
string_to_sign =
|
196
|
-
self._get_verb(request)
|
197
|
-
self._get_headers(
|
203
|
+
string_to_sign = (
|
204
|
+
self._get_verb(request)
|
205
|
+
+ self._get_headers(
|
198
206
|
request,
|
199
207
|
[
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
+
"content-encoding",
|
209
|
+
"content-language",
|
210
|
+
"content-length",
|
211
|
+
"content-md5",
|
212
|
+
"content-type",
|
213
|
+
"date",
|
214
|
+
"if-modified-since",
|
215
|
+
"if-match",
|
216
|
+
"if-none-match",
|
217
|
+
"if-unmodified-since",
|
218
|
+
"byte_range",
|
219
|
+
],
|
220
|
+
)
|
221
|
+
+ self._get_canonicalized_headers(request)
|
222
|
+
+ self._get_canonicalized_resource(request)
|
223
|
+
+ self._get_canonicalized_resource_query(request)
|
224
|
+
)
|
208
225
|
|
209
226
|
self._add_authorization_header(request, string_to_sign)
|
210
227
|
# logger.debug("String_to_sign=%s", string_to_sign)
|
@@ -212,7 +229,7 @@ class SharedKeyCredentialPolicy(SansIOHTTPPolicy):
|
|
212
229
|
|
213
230
|
class StorageHttpChallenge(object):
|
214
231
|
def __init__(self, challenge):
|
215
|
-
"""
|
232
|
+
"""Parses an HTTP WWW-Authentication Bearer challenge from the Storage service."""
|
216
233
|
if not challenge:
|
217
234
|
raise ValueError("Challenge cannot be empty")
|
218
235
|
|
@@ -221,7 +238,7 @@ class StorageHttpChallenge(object):
|
|
221
238
|
|
222
239
|
# name=value pairs either comma or space separated with values possibly being
|
223
240
|
# enclosed in quotes
|
224
|
-
for item in re.split(
|
241
|
+
for item in re.split("[, ]", trimmed_challenge):
|
225
242
|
comps = item.split("=")
|
226
243
|
if len(comps) == 2:
|
227
244
|
key = comps[0].strip(' "')
|
@@ -230,11 +247,11 @@ class StorageHttpChallenge(object):
|
|
230
247
|
self._parameters[key] = value
|
231
248
|
|
232
249
|
# Extract and verify required parameters
|
233
|
-
self.authorization_uri = self._parameters.get(
|
250
|
+
self.authorization_uri = self._parameters.get("authorization_uri")
|
234
251
|
if not self.authorization_uri:
|
235
252
|
raise ValueError("Authorization Uri not found")
|
236
253
|
|
237
|
-
self.resource_id = self._parameters.get(
|
254
|
+
self.resource_id = self._parameters.get("resource_id")
|
238
255
|
if not self.resource_id:
|
239
256
|
raise ValueError("Resource id not found")
|
240
257
|
|
@@ -42,8 +42,8 @@ logger = logging.getLogger(__name__)
|
|
42
42
|
# ------------------------------------------------------------------------------
|
43
43
|
# Constants
|
44
44
|
|
45
|
-
STRUCT_FLOAT = struct.Struct(
|
46
|
-
STRUCT_DOUBLE = struct.Struct(
|
45
|
+
STRUCT_FLOAT = struct.Struct("<f") # little-endian float
|
46
|
+
STRUCT_DOUBLE = struct.Struct("<d") # little-endian double
|
47
47
|
|
48
48
|
# ------------------------------------------------------------------------------
|
49
49
|
# Exceptions
|
@@ -56,6 +56,7 @@ class SchemaResolutionException(schema.AvroException):
|
|
56
56
|
fail_msg += f"\nWriter's Schema: {pretty_writers}"
|
57
57
|
schema.AvroException.__init__(self, fail_msg)
|
58
58
|
|
59
|
+
|
59
60
|
# ------------------------------------------------------------------------------
|
60
61
|
# Decoder
|
61
62
|
|
@@ -78,14 +79,14 @@ class BinaryDecoder(object):
|
|
78
79
|
"""Read n bytes.
|
79
80
|
|
80
81
|
:param int n: Number of bytes to read.
|
81
|
-
:
|
82
|
+
:return: The next n bytes from the input.
|
82
83
|
:rtype: bytes
|
83
84
|
"""
|
84
|
-
assert
|
85
|
+
assert n >= 0, n
|
85
86
|
input_bytes = self.reader.read(n)
|
86
87
|
if n > 0 and not input_bytes:
|
87
88
|
raise StopIteration
|
88
|
-
assert
|
89
|
+
assert len(input_bytes) == n, input_bytes
|
89
90
|
return input_bytes
|
90
91
|
|
91
92
|
@staticmethod
|
@@ -149,7 +150,7 @@ class BinaryDecoder(object):
|
|
149
150
|
Bytes are encoded as a long followed by that many bytes of data.
|
150
151
|
"""
|
151
152
|
nbytes = self.read_long()
|
152
|
-
assert
|
153
|
+
assert nbytes >= 0, nbytes
|
153
154
|
return self.read(nbytes)
|
154
155
|
|
155
156
|
def read_utf8(self):
|
@@ -160,13 +161,13 @@ class BinaryDecoder(object):
|
|
160
161
|
input_bytes = self.read_bytes()
|
161
162
|
if PY3:
|
162
163
|
try:
|
163
|
-
return input_bytes.decode(
|
164
|
+
return input_bytes.decode("utf-8")
|
164
165
|
except UnicodeDecodeError as exn:
|
165
|
-
logger.error(
|
166
|
+
logger.error("Invalid UTF-8 input bytes: %r", input_bytes) # pylint: disable=do-not-log-raised-errors
|
166
167
|
raise exn
|
167
168
|
else:
|
168
169
|
# PY2
|
169
|
-
return unicode(input_bytes, "utf-8")
|
170
|
+
return unicode(input_bytes, "utf-8") # pylint: disable=undefined-variable
|
170
171
|
|
171
172
|
def skip_null(self):
|
172
173
|
pass
|
@@ -216,41 +217,40 @@ class DatumReader(object):
|
|
216
217
|
def set_writer_schema(self, writer_schema):
|
217
218
|
self._writer_schema = writer_schema
|
218
219
|
|
219
|
-
writer_schema = property(lambda self: self._writer_schema,
|
220
|
-
set_writer_schema)
|
220
|
+
writer_schema = property(lambda self: self._writer_schema, set_writer_schema)
|
221
221
|
|
222
222
|
def read(self, decoder):
|
223
223
|
return self.read_data(self.writer_schema, decoder)
|
224
224
|
|
225
225
|
def read_data(self, writer_schema, decoder):
|
226
226
|
# function dispatch for reading data based on type of writer's schema
|
227
|
-
if writer_schema.type ==
|
227
|
+
if writer_schema.type == "null":
|
228
228
|
result = decoder.read_null()
|
229
|
-
elif writer_schema.type ==
|
229
|
+
elif writer_schema.type == "boolean":
|
230
230
|
result = decoder.read_boolean()
|
231
|
-
elif writer_schema.type ==
|
231
|
+
elif writer_schema.type == "string":
|
232
232
|
result = decoder.read_utf8()
|
233
|
-
elif writer_schema.type ==
|
233
|
+
elif writer_schema.type == "int":
|
234
234
|
result = decoder.read_int()
|
235
|
-
elif writer_schema.type ==
|
235
|
+
elif writer_schema.type == "long":
|
236
236
|
result = decoder.read_long()
|
237
|
-
elif writer_schema.type ==
|
237
|
+
elif writer_schema.type == "float":
|
238
238
|
result = decoder.read_float()
|
239
|
-
elif writer_schema.type ==
|
239
|
+
elif writer_schema.type == "double":
|
240
240
|
result = decoder.read_double()
|
241
|
-
elif writer_schema.type ==
|
241
|
+
elif writer_schema.type == "bytes":
|
242
242
|
result = decoder.read_bytes()
|
243
|
-
elif writer_schema.type ==
|
243
|
+
elif writer_schema.type == "fixed":
|
244
244
|
result = self.read_fixed(writer_schema, decoder)
|
245
|
-
elif writer_schema.type ==
|
245
|
+
elif writer_schema.type == "enum":
|
246
246
|
result = self.read_enum(writer_schema, decoder)
|
247
|
-
elif writer_schema.type ==
|
247
|
+
elif writer_schema.type == "array":
|
248
248
|
result = self.read_array(writer_schema, decoder)
|
249
|
-
elif writer_schema.type ==
|
249
|
+
elif writer_schema.type == "map":
|
250
250
|
result = self.read_map(writer_schema, decoder)
|
251
|
-
elif writer_schema.type in [
|
251
|
+
elif writer_schema.type in ["union", "error_union"]:
|
252
252
|
result = self.read_union(writer_schema, decoder)
|
253
|
-
elif writer_schema.type in [
|
253
|
+
elif writer_schema.type in ["record", "error", "request"]:
|
254
254
|
result = self.read_record(writer_schema, decoder)
|
255
255
|
else:
|
256
256
|
fail_msg = f"Cannot read unknown schema type: {writer_schema.type}"
|
@@ -258,35 +258,35 @@ class DatumReader(object):
|
|
258
258
|
return result
|
259
259
|
|
260
260
|
def skip_data(self, writer_schema, decoder):
|
261
|
-
if writer_schema.type ==
|
261
|
+
if writer_schema.type == "null":
|
262
262
|
result = decoder.skip_null()
|
263
|
-
elif writer_schema.type ==
|
263
|
+
elif writer_schema.type == "boolean":
|
264
264
|
result = decoder.skip_boolean()
|
265
|
-
elif writer_schema.type ==
|
265
|
+
elif writer_schema.type == "string":
|
266
266
|
result = decoder.skip_utf8()
|
267
|
-
elif writer_schema.type ==
|
267
|
+
elif writer_schema.type == "int":
|
268
268
|
result = decoder.skip_int()
|
269
|
-
elif writer_schema.type ==
|
269
|
+
elif writer_schema.type == "long":
|
270
270
|
result = decoder.skip_long()
|
271
|
-
elif writer_schema.type ==
|
271
|
+
elif writer_schema.type == "float":
|
272
272
|
result = decoder.skip_float()
|
273
|
-
elif writer_schema.type ==
|
273
|
+
elif writer_schema.type == "double":
|
274
274
|
result = decoder.skip_double()
|
275
|
-
elif writer_schema.type ==
|
275
|
+
elif writer_schema.type == "bytes":
|
276
276
|
result = decoder.skip_bytes()
|
277
|
-
elif writer_schema.type ==
|
277
|
+
elif writer_schema.type == "fixed":
|
278
278
|
result = self.skip_fixed(writer_schema, decoder)
|
279
|
-
elif writer_schema.type ==
|
279
|
+
elif writer_schema.type == "enum":
|
280
280
|
result = self.skip_enum(decoder)
|
281
|
-
elif writer_schema.type ==
|
281
|
+
elif writer_schema.type == "array":
|
282
282
|
self.skip_array(writer_schema, decoder)
|
283
283
|
result = None
|
284
|
-
elif writer_schema.type ==
|
284
|
+
elif writer_schema.type == "map":
|
285
285
|
self.skip_map(writer_schema, decoder)
|
286
286
|
result = None
|
287
|
-
elif writer_schema.type in [
|
287
|
+
elif writer_schema.type in ["union", "error_union"]:
|
288
288
|
result = self.skip_union(writer_schema, decoder)
|
289
|
-
elif writer_schema.type in [
|
289
|
+
elif writer_schema.type in ["record", "error", "request"]:
|
290
290
|
self.skip_record(writer_schema, decoder)
|
291
291
|
result = None
|
292
292
|
else:
|
@@ -389,8 +389,9 @@ class DatumReader(object):
|
|
389
389
|
# schema resolution
|
390
390
|
index_of_schema = int(decoder.read_long())
|
391
391
|
if index_of_schema >= len(writer_schema.schemas):
|
392
|
-
fail_msg = (
|
393
|
-
|
392
|
+
fail_msg = (
|
393
|
+
f"Can't access branch index {index_of_schema} " f"for union with {len(writer_schema.schemas)} branches"
|
394
|
+
)
|
394
395
|
raise SchemaResolutionException(fail_msg, writer_schema)
|
395
396
|
selected_writer_schema = writer_schema.schemas[index_of_schema]
|
396
397
|
|
@@ -400,8 +401,9 @@ class DatumReader(object):
|
|
400
401
|
def skip_union(self, writer_schema, decoder):
|
401
402
|
index_of_schema = int(decoder.read_long())
|
402
403
|
if index_of_schema >= len(writer_schema.schemas):
|
403
|
-
fail_msg = (
|
404
|
-
|
404
|
+
fail_msg = (
|
405
|
+
f"Can't access branch index {index_of_schema} " f"for union with {len(writer_schema.schemas)} branches"
|
406
|
+
)
|
405
407
|
raise SchemaResolutionException(fail_msg, writer_schema)
|
406
408
|
return self.skip_data(writer_schema.schemas[index_of_schema], decoder)
|
407
409
|
|