azure-ai-textanalytics 5.3.0b2__py3-none-any.whl → 6.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of azure-ai-textanalytics might be problematic. Click here for more details.

Files changed (128) hide show
  1. azure/ai/textanalytics/__init__.py +26 -193
  2. azure/ai/textanalytics/_client.py +111 -0
  3. azure/ai/textanalytics/_configuration.py +73 -0
  4. azure/ai/textanalytics/{_generated/v2022_05_01/operations → _operations}/__init__.py +13 -8
  5. azure/ai/textanalytics/_operations/_operations.py +716 -0
  6. azure/ai/textanalytics/{_generated/v2022_05_01/models → _operations}/_patch.py +8 -6
  7. azure/ai/textanalytics/_patch.py +350 -0
  8. azure/ai/textanalytics/{_generated/aio → _utils}/__init__.py +1 -5
  9. azure/ai/textanalytics/_utils/model_base.py +1237 -0
  10. azure/ai/textanalytics/{_generated/_serialization.py → _utils/serialization.py} +640 -616
  11. azure/ai/textanalytics/{_generated/v2022_05_01/aio/_vendor.py → _utils/utils.py} +10 -12
  12. azure/ai/textanalytics/_version.py +8 -7
  13. azure/ai/textanalytics/aio/__init__.py +25 -14
  14. azure/ai/textanalytics/aio/_client.py +115 -0
  15. azure/ai/textanalytics/aio/_configuration.py +75 -0
  16. azure/ai/textanalytics/{_generated/v2022_10_01_preview/aio/operations → aio/_operations}/__init__.py +13 -8
  17. azure/ai/textanalytics/aio/_operations/_operations.py +623 -0
  18. azure/ai/textanalytics/{_generated/v2022_05_01 → aio/_operations}/_patch.py +8 -6
  19. azure/ai/textanalytics/aio/_patch.py +344 -0
  20. azure/ai/textanalytics/models/__init__.py +402 -0
  21. azure/ai/textanalytics/models/_enums.py +1979 -0
  22. azure/ai/textanalytics/models/_models.py +6641 -0
  23. azure/ai/textanalytics/{_generated/v2022_05_01/aio → models}/_patch.py +8 -6
  24. azure/ai/textanalytics/py.typed +1 -0
  25. {azure_ai_textanalytics-5.3.0b2.dist-info → azure_ai_textanalytics-6.0.0b1.dist-info}/METADATA +668 -403
  26. azure_ai_textanalytics-6.0.0b1.dist-info/RECORD +29 -0
  27. {azure_ai_textanalytics-5.3.0b2.dist-info → azure_ai_textanalytics-6.0.0b1.dist-info}/WHEEL +1 -1
  28. azure/ai/textanalytics/_base_client.py +0 -113
  29. azure/ai/textanalytics/_check.py +0 -22
  30. azure/ai/textanalytics/_dict_mixin.py +0 -57
  31. azure/ai/textanalytics/_generated/__init__.py +0 -16
  32. azure/ai/textanalytics/_generated/_configuration.py +0 -70
  33. azure/ai/textanalytics/_generated/_operations_mixin.py +0 -795
  34. azure/ai/textanalytics/_generated/_text_analytics_client.py +0 -126
  35. azure/ai/textanalytics/_generated/_version.py +0 -8
  36. azure/ai/textanalytics/_generated/aio/_configuration.py +0 -66
  37. azure/ai/textanalytics/_generated/aio/_operations_mixin.py +0 -776
  38. azure/ai/textanalytics/_generated/aio/_text_analytics_client.py +0 -124
  39. azure/ai/textanalytics/_generated/models.py +0 -8
  40. azure/ai/textanalytics/_generated/v2022_05_01/__init__.py +0 -20
  41. azure/ai/textanalytics/_generated/v2022_05_01/_configuration.py +0 -72
  42. azure/ai/textanalytics/_generated/v2022_05_01/_text_analytics_client.py +0 -100
  43. azure/ai/textanalytics/_generated/v2022_05_01/_vendor.py +0 -45
  44. azure/ai/textanalytics/_generated/v2022_05_01/aio/__init__.py +0 -20
  45. azure/ai/textanalytics/_generated/v2022_05_01/aio/_configuration.py +0 -71
  46. azure/ai/textanalytics/_generated/v2022_05_01/aio/_text_analytics_client.py +0 -97
  47. azure/ai/textanalytics/_generated/v2022_05_01/aio/operations/__init__.py +0 -18
  48. azure/ai/textanalytics/_generated/v2022_05_01/aio/operations/_patch.py +0 -121
  49. azure/ai/textanalytics/_generated/v2022_05_01/aio/operations/_text_analytics_client_operations.py +0 -603
  50. azure/ai/textanalytics/_generated/v2022_05_01/models/__init__.py +0 -281
  51. azure/ai/textanalytics/_generated/v2022_05_01/models/_models_py3.py +0 -5722
  52. azure/ai/textanalytics/_generated/v2022_05_01/models/_text_analytics_client_enums.py +0 -439
  53. azure/ai/textanalytics/_generated/v2022_05_01/operations/_patch.py +0 -120
  54. azure/ai/textanalytics/_generated/v2022_05_01/operations/_text_analytics_client_operations.py +0 -744
  55. azure/ai/textanalytics/_generated/v2022_10_01_preview/__init__.py +0 -20
  56. azure/ai/textanalytics/_generated/v2022_10_01_preview/_configuration.py +0 -72
  57. azure/ai/textanalytics/_generated/v2022_10_01_preview/_patch.py +0 -19
  58. azure/ai/textanalytics/_generated/v2022_10_01_preview/_text_analytics_client.py +0 -100
  59. azure/ai/textanalytics/_generated/v2022_10_01_preview/_vendor.py +0 -45
  60. azure/ai/textanalytics/_generated/v2022_10_01_preview/aio/__init__.py +0 -20
  61. azure/ai/textanalytics/_generated/v2022_10_01_preview/aio/_configuration.py +0 -71
  62. azure/ai/textanalytics/_generated/v2022_10_01_preview/aio/_patch.py +0 -19
  63. azure/ai/textanalytics/_generated/v2022_10_01_preview/aio/_text_analytics_client.py +0 -97
  64. azure/ai/textanalytics/_generated/v2022_10_01_preview/aio/_vendor.py +0 -27
  65. azure/ai/textanalytics/_generated/v2022_10_01_preview/aio/operations/_patch.py +0 -121
  66. azure/ai/textanalytics/_generated/v2022_10_01_preview/aio/operations/_text_analytics_client_operations.py +0 -603
  67. azure/ai/textanalytics/_generated/v2022_10_01_preview/models/__init__.py +0 -405
  68. azure/ai/textanalytics/_generated/v2022_10_01_preview/models/_models_py3.py +0 -8420
  69. azure/ai/textanalytics/_generated/v2022_10_01_preview/models/_patch.py +0 -486
  70. azure/ai/textanalytics/_generated/v2022_10_01_preview/models/_text_analytics_client_enums.py +0 -729
  71. azure/ai/textanalytics/_generated/v2022_10_01_preview/operations/__init__.py +0 -18
  72. azure/ai/textanalytics/_generated/v2022_10_01_preview/operations/_patch.py +0 -120
  73. azure/ai/textanalytics/_generated/v2022_10_01_preview/operations/_text_analytics_client_operations.py +0 -744
  74. azure/ai/textanalytics/_generated/v3_0/__init__.py +0 -20
  75. azure/ai/textanalytics/_generated/v3_0/_configuration.py +0 -66
  76. azure/ai/textanalytics/_generated/v3_0/_patch.py +0 -31
  77. azure/ai/textanalytics/_generated/v3_0/_text_analytics_client.py +0 -96
  78. azure/ai/textanalytics/_generated/v3_0/_vendor.py +0 -33
  79. azure/ai/textanalytics/_generated/v3_0/aio/__init__.py +0 -20
  80. azure/ai/textanalytics/_generated/v3_0/aio/_configuration.py +0 -65
  81. azure/ai/textanalytics/_generated/v3_0/aio/_patch.py +0 -31
  82. azure/ai/textanalytics/_generated/v3_0/aio/_text_analytics_client.py +0 -93
  83. azure/ai/textanalytics/_generated/v3_0/aio/_vendor.py +0 -27
  84. azure/ai/textanalytics/_generated/v3_0/aio/operations/__init__.py +0 -18
  85. azure/ai/textanalytics/_generated/v3_0/aio/operations/_patch.py +0 -19
  86. azure/ai/textanalytics/_generated/v3_0/aio/operations/_text_analytics_client_operations.py +0 -428
  87. azure/ai/textanalytics/_generated/v3_0/models/__init__.py +0 -81
  88. azure/ai/textanalytics/_generated/v3_0/models/_models_py3.py +0 -1467
  89. azure/ai/textanalytics/_generated/v3_0/models/_patch.py +0 -19
  90. azure/ai/textanalytics/_generated/v3_0/models/_text_analytics_client_enums.py +0 -58
  91. azure/ai/textanalytics/_generated/v3_0/operations/__init__.py +0 -18
  92. azure/ai/textanalytics/_generated/v3_0/operations/_patch.py +0 -19
  93. azure/ai/textanalytics/_generated/v3_0/operations/_text_analytics_client_operations.py +0 -604
  94. azure/ai/textanalytics/_generated/v3_1/__init__.py +0 -20
  95. azure/ai/textanalytics/_generated/v3_1/_configuration.py +0 -66
  96. azure/ai/textanalytics/_generated/v3_1/_patch.py +0 -31
  97. azure/ai/textanalytics/_generated/v3_1/_text_analytics_client.py +0 -98
  98. azure/ai/textanalytics/_generated/v3_1/_vendor.py +0 -45
  99. azure/ai/textanalytics/_generated/v3_1/aio/__init__.py +0 -20
  100. azure/ai/textanalytics/_generated/v3_1/aio/_configuration.py +0 -65
  101. azure/ai/textanalytics/_generated/v3_1/aio/_patch.py +0 -31
  102. azure/ai/textanalytics/_generated/v3_1/aio/_text_analytics_client.py +0 -95
  103. azure/ai/textanalytics/_generated/v3_1/aio/_vendor.py +0 -27
  104. azure/ai/textanalytics/_generated/v3_1/aio/operations/__init__.py +0 -18
  105. azure/ai/textanalytics/_generated/v3_1/aio/operations/_patch.py +0 -19
  106. azure/ai/textanalytics/_generated/v3_1/aio/operations/_text_analytics_client_operations.py +0 -1291
  107. azure/ai/textanalytics/_generated/v3_1/models/__init__.py +0 -205
  108. azure/ai/textanalytics/_generated/v3_1/models/_models_py3.py +0 -3976
  109. azure/ai/textanalytics/_generated/v3_1/models/_patch.py +0 -19
  110. azure/ai/textanalytics/_generated/v3_1/models/_text_analytics_client_enums.py +0 -367
  111. azure/ai/textanalytics/_generated/v3_1/operations/__init__.py +0 -18
  112. azure/ai/textanalytics/_generated/v3_1/operations/_patch.py +0 -19
  113. azure/ai/textanalytics/_generated/v3_1/operations/_text_analytics_client_operations.py +0 -1709
  114. azure/ai/textanalytics/_lro.py +0 -553
  115. azure/ai/textanalytics/_models.py +0 -3158
  116. azure/ai/textanalytics/_policies.py +0 -66
  117. azure/ai/textanalytics/_request_handlers.py +0 -104
  118. azure/ai/textanalytics/_response_handlers.py +0 -583
  119. azure/ai/textanalytics/_text_analytics_client.py +0 -2081
  120. azure/ai/textanalytics/_user_agent.py +0 -8
  121. azure/ai/textanalytics/_validate.py +0 -113
  122. azure/ai/textanalytics/aio/_base_client_async.py +0 -98
  123. azure/ai/textanalytics/aio/_lro_async.py +0 -503
  124. azure/ai/textanalytics/aio/_response_handlers_async.py +0 -94
  125. azure/ai/textanalytics/aio/_text_analytics_client_async.py +0 -2077
  126. azure_ai_textanalytics-5.3.0b2.dist-info/RECORD +0 -115
  127. {azure_ai_textanalytics-5.3.0b2.dist-info → azure_ai_textanalytics-6.0.0b1.dist-info/licenses}/LICENSE +0 -0
  128. {azure_ai_textanalytics-5.3.0b2.dist-info → azure_ai_textanalytics-6.0.0b1.dist-info}/top_level.txt +0 -0
@@ -1,30 +1,13 @@
1
+ # pylint: disable=line-too-long,useless-suppression,too-many-lines
2
+ # coding=utf-8
1
3
  # --------------------------------------------------------------------------
2
- #
3
4
  # Copyright (c) Microsoft Corporation. All rights reserved.
4
- #
5
- # The MIT License (MIT)
6
- #
7
- # Permission is hereby granted, free of charge, to any person obtaining a copy
8
- # of this software and associated documentation files (the ""Software""), to
9
- # deal in the Software without restriction, including without limitation the
10
- # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
11
- # sell copies of the Software, and to permit persons to whom the Software is
12
- # furnished to do so, subject to the following conditions:
13
- #
14
- # The above copyright notice and this permission notice shall be included in
15
- # all copies or substantial portions of the Software.
16
- #
17
- # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18
- # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19
- # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20
- # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21
- # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
22
- # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
23
- # IN THE SOFTWARE.
24
- #
5
+ # Licensed under the MIT License. See License.txt in the project root for license information.
6
+ # Code generated by Microsoft (R) Python Code Generator.
7
+ # Changes may cause incorrect behavior and will be lost if the code is regenerated.
25
8
  # --------------------------------------------------------------------------
26
9
 
27
- # pylint: skip-file
10
+ # pyright: reportUnnecessaryTypeIgnoreComment=false
28
11
 
29
12
  from base64 import b64decode, b64encode
30
13
  import calendar
@@ -37,34 +20,45 @@ import logging
37
20
  import re
38
21
  import sys
39
22
  import codecs
23
+ from typing import (
24
+ Any,
25
+ cast,
26
+ Optional,
27
+ Union,
28
+ AnyStr,
29
+ IO,
30
+ Mapping,
31
+ Callable,
32
+ MutableMapping,
33
+ )
34
+
40
35
  try:
41
36
  from urllib import quote # type: ignore
42
37
  except ImportError:
43
- from urllib.parse import quote # type: ignore
38
+ from urllib.parse import quote
44
39
  import xml.etree.ElementTree as ET
45
40
 
46
- import isodate
41
+ import isodate # type: ignore
42
+ from typing_extensions import Self
47
43
 
48
- from typing import Dict, Any, cast, TYPE_CHECKING
44
+ from azure.core.exceptions import DeserializationError, SerializationError
45
+ from azure.core.serialization import NULL as CoreNull
49
46
 
50
- from azure.core.exceptions import DeserializationError, SerializationError, raise_with_traceback
47
+ _BOM = codecs.BOM_UTF8.decode(encoding="utf-8")
51
48
 
52
- _BOM = codecs.BOM_UTF8.decode(encoding='utf-8')
49
+ JSON = MutableMapping[str, Any]
53
50
 
54
- if TYPE_CHECKING:
55
- from typing import Optional, Union, AnyStr, IO, Mapping
56
51
 
57
52
  class RawDeserializer:
58
53
 
59
54
  # Accept "text" because we're open minded people...
60
- JSON_REGEXP = re.compile(r'^(application|text)/([a-z+.]+\+)?json$')
55
+ JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$")
61
56
 
62
57
  # Name used in context
63
58
  CONTEXT_NAME = "deserialized_data"
64
59
 
65
60
  @classmethod
66
- def deserialize_from_text(cls, data, content_type=None):
67
- # type: (Optional[Union[AnyStr, IO]], Optional[str]) -> Any
61
+ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any:
68
62
  """Decode data according to content-type.
69
63
 
70
64
  Accept a stream of data as well, but will be load at once in memory for now.
@@ -74,13 +68,15 @@ class RawDeserializer:
74
68
  :param data: Input, could be bytes or stream (will be decoded with UTF8) or text
75
69
  :type data: str or bytes or IO
76
70
  :param str content_type: The content type.
71
+ :return: The deserialized data.
72
+ :rtype: object
77
73
  """
78
- if hasattr(data, 'read'):
74
+ if hasattr(data, "read"):
79
75
  # Assume a stream
80
76
  data = cast(IO, data).read()
81
77
 
82
78
  if isinstance(data, bytes):
83
- data_as_str = data.decode(encoding='utf-8-sig')
79
+ data_as_str = data.decode(encoding="utf-8-sig")
84
80
  else:
85
81
  # Explain to mypy the correct type.
86
82
  data_as_str = cast(str, data)
@@ -95,7 +91,7 @@ class RawDeserializer:
95
91
  try:
96
92
  return json.loads(data_as_str)
97
93
  except ValueError as err:
98
- raise DeserializationError("JSON is invalid: {}".format(err), err)
94
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
99
95
  elif "xml" in (content_type or []):
100
96
  try:
101
97
 
@@ -107,7 +103,7 @@ class RawDeserializer:
107
103
  pass
108
104
 
109
105
  return ET.fromstring(data_as_str) # nosec
110
- except ET.ParseError:
106
+ except ET.ParseError as err:
111
107
  # It might be because the server has an issue, and returned JSON with
112
108
  # content-type XML....
113
109
  # So let's try a JSON load, and if it's still broken
@@ -116,7 +112,8 @@ class RawDeserializer:
116
112
  try:
117
113
  return True, json.loads(data)
118
114
  except ValueError:
119
- return False, None # Don't care about this one
115
+ return False, None # Don't care about this one
116
+
120
117
  success, json_result = _json_attemp(data)
121
118
  if success:
122
119
  return json_result
@@ -125,22 +122,28 @@ class RawDeserializer:
125
122
  # The function hack is because Py2.7 messes up with exception
126
123
  # context otherwise.
127
124
  _LOGGER.critical("Wasn't XML not JSON, failing")
128
- raise_with_traceback(DeserializationError, "XML is invalid")
125
+ raise DeserializationError("XML is invalid") from err
126
+ elif content_type.startswith("text/"):
127
+ return data_as_str
129
128
  raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
130
129
 
131
130
  @classmethod
132
- def deserialize_from_http_generics(cls, body_bytes, headers):
133
- # type: (Optional[Union[AnyStr, IO]], Mapping) -> Any
131
+ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any:
134
132
  """Deserialize from HTTP response.
135
133
 
136
134
  Use bytes and headers to NOT use any requests/aiohttp or whatever
137
135
  specific implementation.
138
136
  Headers will tested for "content-type"
137
+
138
+ :param bytes body_bytes: The body of the response.
139
+ :param dict headers: The headers of the response.
140
+ :returns: The deserialized data.
141
+ :rtype: object
139
142
  """
140
143
  # Try to use content-type from headers if available
141
144
  content_type = None
142
- if 'content-type' in headers:
143
- content_type = headers['content-type'].split(";")[0].strip().lower()
145
+ if "content-type" in headers:
146
+ content_type = headers["content-type"].split(";")[0].strip().lower()
144
147
  # Ouch, this server did not declare what it sent...
145
148
  # Let's guess it's JSON...
146
149
  # Also, since Autorest was considering that an empty body was a valid JSON,
@@ -152,91 +155,44 @@ class RawDeserializer:
152
155
  return cls.deserialize_from_text(body_bytes, content_type)
153
156
  return None
154
157
 
155
- try:
156
- basestring # type: ignore
157
- unicode_str = unicode # type: ignore
158
- except NameError:
159
- basestring = str # type: ignore
160
- unicode_str = str # type: ignore
161
158
 
162
159
  _LOGGER = logging.getLogger(__name__)
163
160
 
164
161
  try:
165
- _long_type = long # type: ignore
162
+ _long_type = long # type: ignore
166
163
  except NameError:
167
164
  _long_type = int
168
165
 
169
- class UTC(datetime.tzinfo):
170
- """Time Zone info for handling UTC"""
171
-
172
- def utcoffset(self, dt):
173
- """UTF offset for UTC is 0."""
174
- return datetime.timedelta(0)
175
-
176
- def tzname(self, dt):
177
- """Timestamp representation."""
178
- return "Z"
179
-
180
- def dst(self, dt):
181
- """No daylight saving for UTC."""
182
- return datetime.timedelta(hours=1)
183
-
184
- try:
185
- from datetime import timezone as _FixedOffset
186
- except ImportError: # Python 2.7
187
- class _FixedOffset(datetime.tzinfo): # type: ignore
188
- """Fixed offset in minutes east from UTC.
189
- Copy/pasted from Python doc
190
- :param datetime.timedelta offset: offset in timedelta format
191
- """
192
-
193
- def __init__(self, offset):
194
- self.__offset = offset
195
-
196
- def utcoffset(self, dt):
197
- return self.__offset
198
-
199
- def tzname(self, dt):
200
- return str(self.__offset.total_seconds()/3600)
201
-
202
- def __repr__(self):
203
- return "<FixedOffset {}>".format(self.tzname(None))
204
-
205
- def dst(self, dt):
206
- return datetime.timedelta(0)
207
-
208
- def __getinitargs__(self):
209
- return (self.__offset,)
210
-
211
- try:
212
- from datetime import timezone
213
- TZ_UTC = timezone.utc # type: ignore
214
- except ImportError:
215
- TZ_UTC = UTC() # type: ignore
166
+ TZ_UTC = datetime.timezone.utc
216
167
 
217
168
  _FLATTEN = re.compile(r"(?<!\\)\.")
218
169
 
219
- def attribute_transformer(key, attr_desc, value):
170
+
171
+ def attribute_transformer(key, attr_desc, value): # pylint: disable=unused-argument
220
172
  """A key transformer that returns the Python attribute.
221
173
 
222
174
  :param str key: The attribute name
223
175
  :param dict attr_desc: The attribute metadata
224
176
  :param object value: The value
225
177
  :returns: A key using attribute name
178
+ :rtype: str
226
179
  """
227
180
  return (key, value)
228
181
 
229
- def full_restapi_key_transformer(key, attr_desc, value):
182
+
183
+ def full_restapi_key_transformer(key, attr_desc, value): # pylint: disable=unused-argument
230
184
  """A key transformer that returns the full RestAPI key path.
231
185
 
232
- :param str _: The attribute name
186
+ :param str key: The attribute name
233
187
  :param dict attr_desc: The attribute metadata
234
188
  :param object value: The value
235
189
  :returns: A list of keys using RestAPI syntax.
190
+ :rtype: list
236
191
  """
237
- keys = _FLATTEN.split(attr_desc['key'])
192
+ keys = _FLATTEN.split(attr_desc["key"])
238
193
  return ([_decode_attribute_map_key(k) for k in keys], value)
239
194
 
195
+
240
196
  def last_restapi_key_transformer(key, attr_desc, value):
241
197
  """A key transformer that returns the last RestAPI key.
242
198
 
@@ -244,31 +200,40 @@ def last_restapi_key_transformer(key, attr_desc, value):
244
200
  :param dict attr_desc: The attribute metadata
245
201
  :param object value: The value
246
202
  :returns: The last RestAPI key.
203
+ :rtype: str
247
204
  """
248
205
  key, value = full_restapi_key_transformer(key, attr_desc, value)
249
206
  return (key[-1], value)
250
207
 
208
+
251
209
  def _create_xml_node(tag, prefix=None, ns=None):
252
- """Create a XML node."""
210
+ """Create a XML node.
211
+
212
+ :param str tag: The tag name
213
+ :param str prefix: The prefix
214
+ :param str ns: The namespace
215
+ :return: The XML node
216
+ :rtype: xml.etree.ElementTree.Element
217
+ """
253
218
  if prefix and ns:
254
219
  ET.register_namespace(prefix, ns)
255
220
  if ns:
256
- return ET.Element("{"+ns+"}"+tag)
257
- else:
258
- return ET.Element(tag)
221
+ return ET.Element("{" + ns + "}" + tag)
222
+ return ET.Element(tag)
223
+
259
224
 
260
- class Model(object):
225
+ class Model:
261
226
  """Mixin for all client request body/response body models to support
262
227
  serialization and deserialization.
263
228
  """
264
229
 
265
- _subtype_map = {} # type: Dict[str, Dict[str, Any]]
266
- _attribute_map = {} # type: Dict[str, Dict[str, Any]]
267
- _validation = {} # type: Dict[str, Dict[str, Any]]
230
+ _subtype_map: dict[str, dict[str, Any]] = {}
231
+ _attribute_map: dict[str, dict[str, Any]] = {}
232
+ _validation: dict[str, dict[str, Any]] = {}
268
233
 
269
- def __init__(self, **kwargs):
270
- self.additional_properties = {}
271
- for k in kwargs:
234
+ def __init__(self, **kwargs: Any) -> None:
235
+ self.additional_properties: Optional[dict[str, Any]] = {}
236
+ for k in kwargs: # pylint: disable=consider-using-dict-items
272
237
  if k not in self._attribute_map:
273
238
  _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
274
239
  elif k in self._validation and self._validation[k].get("readonly", False):
@@ -276,27 +241,37 @@ class Model(object):
276
241
  else:
277
242
  setattr(self, k, kwargs[k])
278
243
 
279
- def __eq__(self, other):
280
- """Compare objects by comparing all attributes."""
244
+ def __eq__(self, other: Any) -> bool:
245
+ """Compare objects by comparing all attributes.
246
+
247
+ :param object other: The object to compare
248
+ :returns: True if objects are equal
249
+ :rtype: bool
250
+ """
281
251
  if isinstance(other, self.__class__):
282
252
  return self.__dict__ == other.__dict__
283
253
  return False
284
254
 
285
- def __ne__(self, other):
286
- """Compare objects by comparing all attributes."""
255
+ def __ne__(self, other: Any) -> bool:
256
+ """Compare objects by comparing all attributes.
257
+
258
+ :param object other: The object to compare
259
+ :returns: True if objects are not equal
260
+ :rtype: bool
261
+ """
287
262
  return not self.__eq__(other)
288
263
 
289
- def __str__(self):
264
+ def __str__(self) -> str:
290
265
  return str(self.__dict__)
291
266
 
292
267
  @classmethod
293
- def enable_additional_properties_sending(cls):
294
- cls._attribute_map['additional_properties'] = {'key': '', 'type': '{object}'}
268
+ def enable_additional_properties_sending(cls) -> None:
269
+ cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"}
295
270
 
296
271
  @classmethod
297
- def is_xml_model(cls):
272
+ def is_xml_model(cls) -> bool:
298
273
  try:
299
- cls._xml_map
274
+ cls._xml_map # type: ignore
300
275
  except AttributeError:
301
276
  return False
302
277
  return True
@@ -304,20 +279,19 @@ class Model(object):
304
279
  @classmethod
305
280
  def _create_xml_node(cls):
306
281
  """Create XML node.
282
+
283
+ :returns: The XML node
284
+ :rtype: xml.etree.ElementTree.Element
307
285
  """
308
286
  try:
309
- xml_map = cls._xml_map
287
+ xml_map = cls._xml_map # type: ignore
310
288
  except AttributeError:
311
289
  xml_map = {}
312
290
 
313
- return _create_xml_node(
314
- xml_map.get('name', cls.__name__),
315
- xml_map.get("prefix", None),
316
- xml_map.get("ns", None)
317
- )
291
+ return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None))
318
292
 
319
- def serialize(self, keep_readonly=False, **kwargs):
320
- """Return the JSON that would be sent to azure from this model.
293
+ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
294
+ """Return the JSON that would be sent to server from this model.
321
295
 
322
296
  This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`.
323
297
 
@@ -328,10 +302,17 @@ class Model(object):
328
302
  :rtype: dict
329
303
  """
330
304
  serializer = Serializer(self._infer_class_models())
331
- return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs)
305
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
306
+ self, keep_readonly=keep_readonly, **kwargs
307
+ )
332
308
 
333
- def as_dict(self, keep_readonly=True, key_transformer=attribute_transformer, **kwargs):
334
- """Return a dict that can be JSONify using json.dump.
309
+ def as_dict(
310
+ self,
311
+ keep_readonly: bool = True,
312
+ key_transformer: Callable[[str, dict[str, Any], Any], Any] = attribute_transformer,
313
+ **kwargs: Any
314
+ ) -> JSON:
315
+ """Return a dict that can be serialized using json.dump.
335
316
 
336
317
  Advanced usage might optionally use a callback as parameter:
337
318
 
@@ -357,40 +338,49 @@ class Model(object):
357
338
 
358
339
  If you want XML serialization, you can pass the kwargs is_xml=True.
359
340
 
341
+ :param bool keep_readonly: If you want to serialize the readonly attributes
360
342
  :param function key_transformer: A key transformer function.
361
343
  :returns: A dict JSON compatible object
362
344
  :rtype: dict
363
345
  """
364
346
  serializer = Serializer(self._infer_class_models())
365
- return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs)
347
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
348
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
349
+ )
366
350
 
367
351
  @classmethod
368
352
  def _infer_class_models(cls):
369
353
  try:
370
- str_models = cls.__module__.rsplit('.', 1)[0]
354
+ str_models = cls.__module__.rsplit(".", 1)[0]
371
355
  models = sys.modules[str_models]
372
356
  client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
373
357
  if cls.__name__ not in client_models:
374
358
  raise ValueError("Not Autorest generated code")
375
- except Exception:
359
+ except Exception: # pylint: disable=broad-exception-caught
376
360
  # Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
377
361
  client_models = {cls.__name__: cls}
378
362
  return client_models
379
363
 
380
364
  @classmethod
381
- def deserialize(cls, data, content_type=None):
365
+ def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self:
382
366
  """Parse a str using the RestAPI syntax and return a model.
383
367
 
384
368
  :param str data: A str using RestAPI structure. JSON by default.
385
369
  :param str content_type: JSON by default, set application/xml if XML.
386
370
  :returns: An instance of this model
387
- :raises: DeserializationError if something went wrong
371
+ :raises DeserializationError: if something went wrong
372
+ :rtype: Self
388
373
  """
389
374
  deserializer = Deserializer(cls._infer_class_models())
390
- return deserializer(cls.__name__, data, content_type=content_type)
375
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
391
376
 
392
377
  @classmethod
393
- def from_dict(cls, data, key_extractors=None, content_type=None):
378
+ def from_dict(
379
+ cls,
380
+ data: Any,
381
+ key_extractors: Optional[Callable[[str, dict[str, Any], Any], Any]] = None,
382
+ content_type: Optional[str] = None,
383
+ ) -> Self:
394
384
  """Parse a dict using given key extractor return a model.
395
385
 
396
386
  By default consider key
@@ -398,45 +388,51 @@ class Model(object):
398
388
  and last_rest_key_case_insensitive_extractor)
399
389
 
400
390
  :param dict data: A dict using RestAPI structure
391
+ :param function key_extractors: A key extractor function.
401
392
  :param str content_type: JSON by default, set application/xml if XML.
402
393
  :returns: An instance of this model
403
- :raises: DeserializationError if something went wrong
394
+ :raises DeserializationError: if something went wrong
395
+ :rtype: Self
404
396
  """
405
397
  deserializer = Deserializer(cls._infer_class_models())
406
- deserializer.key_extractors = [
407
- attribute_key_case_insensitive_extractor,
408
- rest_key_case_insensitive_extractor,
409
- last_rest_key_case_insensitive_extractor
410
- ] if key_extractors is None else key_extractors
411
- return deserializer(cls.__name__, data, content_type=content_type)
398
+ deserializer.key_extractors = ( # type: ignore
399
+ [ # type: ignore
400
+ attribute_key_case_insensitive_extractor,
401
+ rest_key_case_insensitive_extractor,
402
+ last_rest_key_case_insensitive_extractor,
403
+ ]
404
+ if key_extractors is None
405
+ else key_extractors
406
+ )
407
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
412
408
 
413
409
  @classmethod
414
410
  def _flatten_subtype(cls, key, objects):
415
- if '_subtype_map' not in cls.__dict__:
411
+ if "_subtype_map" not in cls.__dict__:
416
412
  return {}
417
413
  result = dict(cls._subtype_map[key])
418
414
  for valuetype in cls._subtype_map[key].values():
419
- result.update(objects[valuetype]._flatten_subtype(key, objects))
415
+ result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access
420
416
  return result
421
417
 
422
418
  @classmethod
423
419
  def _classify(cls, response, objects):
424
420
  """Check the class _subtype_map for any child classes.
425
421
  We want to ignore any inherited _subtype_maps.
426
- Remove the polymorphic key from the initial data.
422
+
423
+ :param dict response: The initial data
424
+ :param dict objects: The class objects
425
+ :returns: The class to be used
426
+ :rtype: class
427
427
  """
428
- for subtype_key in cls.__dict__.get('_subtype_map', {}).keys():
428
+ for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
429
429
  subtype_value = None
430
430
 
431
431
  if not isinstance(response, ET.Element):
432
432
  rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
433
- subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None)
433
+ subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
434
434
  else:
435
- subtype_value = xml_key_extractor(
436
- subtype_key,
437
- cls._attribute_map[subtype_key],
438
- response
439
- )
435
+ subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
440
436
  if subtype_value:
441
437
  # Try to match base class. Can be class name only
442
438
  # (bug to fix in Autorest to support x-ms-discriminator-name)
@@ -444,7 +440,7 @@ class Model(object):
444
440
  return cls
445
441
  flatten_mapping_type = cls._flatten_subtype(subtype_key, objects)
446
442
  try:
447
- return objects[flatten_mapping_type[subtype_value]]
443
+ return objects[flatten_mapping_type[subtype_value]] # type: ignore
448
444
  except KeyError:
449
445
  _LOGGER.warning(
450
446
  "Subtype value %s has no mapping, use base class %s.",
@@ -453,11 +449,7 @@ class Model(object):
453
449
  )
454
450
  break
455
451
  else:
456
- _LOGGER.warning(
457
- "Discriminator %s is absent or null, use base class %s.",
458
- subtype_key,
459
- cls.__name__
460
- )
452
+ _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__)
461
453
  break
462
454
  return cls
463
455
 
@@ -468,29 +460,42 @@ class Model(object):
468
460
  :returns: A list of RestAPI part
469
461
  :rtype: list
470
462
  """
471
- rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]['key'])
463
+ rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"])
472
464
  return [_decode_attribute_map_key(key_part) for key_part in rest_split_key]
473
465
 
474
466
 
475
467
  def _decode_attribute_map_key(key):
476
468
  """This decode a key in an _attribute_map to the actual key we want to look at
477
- inside the received data.
469
+ inside the received data.
478
470
 
479
- :param str key: A key string from the generated code
471
+ :param str key: A key string from the generated code
472
+ :returns: The decoded key
473
+ :rtype: str
480
474
  """
481
- return key.replace('\\.', '.')
475
+ return key.replace("\\.", ".")
482
476
 
483
477
 
484
- class Serializer(object):
478
+ class Serializer: # pylint: disable=too-many-public-methods
485
479
  """Request object model serializer."""
486
480
 
487
- basic_types = {str: 'str', int: 'int', bool: 'bool', float: 'float'}
488
-
489
- _xml_basic_types_serializers = {'bool': lambda x:str(x).lower()}
490
- days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu",
491
- 4: "Fri", 5: "Sat", 6: "Sun"}
492
- months = {1: "Jan", 2: "Feb", 3: "Mar", 4: "Apr", 5: "May", 6: "Jun",
493
- 7: "Jul", 8: "Aug", 9: "Sep", 10: "Oct", 11: "Nov", 12: "Dec"}
481
+ basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
482
+
483
+ _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()}
484
+ days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"}
485
+ months = {
486
+ 1: "Jan",
487
+ 2: "Feb",
488
+ 3: "Mar",
489
+ 4: "Apr",
490
+ 5: "May",
491
+ 6: "Jun",
492
+ 7: "Jul",
493
+ 8: "Aug",
494
+ 9: "Sep",
495
+ 10: "Oct",
496
+ 11: "Nov",
497
+ 12: "Dec",
498
+ }
494
499
  validation = {
495
500
  "min_length": lambda x, y: len(x) < y,
496
501
  "max_length": lambda x, y: len(x) > y,
@@ -502,36 +507,39 @@ class Serializer(object):
502
507
  "max_items": lambda x, y: len(x) > y,
503
508
  "pattern": lambda x, y: not re.match(y, x, re.UNICODE),
504
509
  "unique": lambda x, y: len(x) != len(set(x)),
505
- "multiple": lambda x, y: x % y != 0
506
- }
510
+ "multiple": lambda x, y: x % y != 0,
511
+ }
507
512
 
508
- def __init__(self, classes=None):
513
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
509
514
  self.serialize_type = {
510
- 'iso-8601': Serializer.serialize_iso,
511
- 'rfc-1123': Serializer.serialize_rfc,
512
- 'unix-time': Serializer.serialize_unix,
513
- 'duration': Serializer.serialize_duration,
514
- 'date': Serializer.serialize_date,
515
- 'time': Serializer.serialize_time,
516
- 'decimal': Serializer.serialize_decimal,
517
- 'long': Serializer.serialize_long,
518
- 'bytearray': Serializer.serialize_bytearray,
519
- 'base64': Serializer.serialize_base64,
520
- 'object': self.serialize_object,
521
- '[]': self.serialize_iter,
522
- '{}': self.serialize_dict
523
- }
524
- self.dependencies = dict(classes) if classes else {}
515
+ "iso-8601": Serializer.serialize_iso,
516
+ "rfc-1123": Serializer.serialize_rfc,
517
+ "unix-time": Serializer.serialize_unix,
518
+ "duration": Serializer.serialize_duration,
519
+ "date": Serializer.serialize_date,
520
+ "time": Serializer.serialize_time,
521
+ "decimal": Serializer.serialize_decimal,
522
+ "long": Serializer.serialize_long,
523
+ "bytearray": Serializer.serialize_bytearray,
524
+ "base64": Serializer.serialize_base64,
525
+ "object": self.serialize_object,
526
+ "[]": self.serialize_iter,
527
+ "{}": self.serialize_dict,
528
+ }
529
+ self.dependencies: dict[str, type] = dict(classes) if classes else {}
525
530
  self.key_transformer = full_restapi_key_transformer
526
531
  self.client_side_validation = True
527
532
 
528
- def _serialize(self, target_obj, data_type=None, **kwargs):
533
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
534
+ self, target_obj, data_type=None, **kwargs
535
+ ):
529
536
  """Serialize data into a string according to type.
530
537
 
531
- :param target_obj: The data to be serialized.
538
+ :param object target_obj: The data to be serialized.
532
539
  :param str data_type: The type to be serialized from.
533
540
  :rtype: str, dict
534
- :raises: SerializationError if serialization fails.
541
+ :raises SerializationError: if serialization fails.
542
+ :returns: The serialized data.
535
543
  """
536
544
  key_transformer = kwargs.get("key_transformer", self.key_transformer)
537
545
  keep_readonly = kwargs.get("keep_readonly", False)
@@ -542,14 +550,12 @@ class Serializer(object):
542
550
  class_name = target_obj.__class__.__name__
543
551
 
544
552
  if data_type:
545
- return self.serialize_data(
546
- target_obj, data_type, **kwargs)
553
+ return self.serialize_data(target_obj, data_type, **kwargs)
547
554
 
548
555
  if not hasattr(target_obj, "_attribute_map"):
549
556
  data_type = type(target_obj).__name__
550
557
  if data_type in self.basic_types.values():
551
- return self.serialize_data(
552
- target_obj, data_type, **kwargs)
558
+ return self.serialize_data(target_obj, data_type, **kwargs)
553
559
 
554
560
  # Force "is_xml" kwargs if we detect a XML model
555
561
  try:
@@ -559,101 +565,97 @@ class Serializer(object):
559
565
 
560
566
  serialized = {}
561
567
  if is_xml_model_serialization:
562
- serialized = target_obj._create_xml_node()
568
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
563
569
  try:
564
- attributes = target_obj._attribute_map
570
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
565
571
  for attr, attr_desc in attributes.items():
566
572
  attr_name = attr
567
- if not keep_readonly and target_obj._validation.get(attr_name, {}).get('readonly', False):
573
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
574
+ attr_name, {}
575
+ ).get("readonly", False):
568
576
  continue
569
577
 
570
- if attr_name == "additional_properties" and attr_desc["key"] == '':
578
+ if attr_name == "additional_properties" and attr_desc["key"] == "":
571
579
  if target_obj.additional_properties is not None:
572
- serialized.update(target_obj.additional_properties)
580
+ serialized |= target_obj.additional_properties
573
581
  continue
574
582
  try:
575
583
 
576
584
  orig_attr = getattr(target_obj, attr)
577
585
  if is_xml_model_serialization:
578
- pass # Don't provide "transformer" for XML for now. Keep "orig_attr"
579
- else: # JSON
586
+ pass # Don't provide "transformer" for XML for now. Keep "orig_attr"
587
+ else: # JSON
580
588
  keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr)
581
589
  keys = keys if isinstance(keys, list) else [keys]
582
590
 
583
-
584
591
  kwargs["serialization_ctxt"] = attr_desc
585
- new_attr = self.serialize_data(orig_attr, attr_desc['type'], **kwargs)
586
-
592
+ new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs)
587
593
 
588
594
  if is_xml_model_serialization:
589
- xml_desc = attr_desc.get('xml', {})
590
- xml_name = xml_desc.get('name', attr_desc['key'])
591
- xml_prefix = xml_desc.get('prefix', None)
592
- xml_ns = xml_desc.get('ns', None)
595
+ xml_desc = attr_desc.get("xml", {})
596
+ xml_name = xml_desc.get("name", attr_desc["key"])
597
+ xml_prefix = xml_desc.get("prefix", None)
598
+ xml_ns = xml_desc.get("ns", None)
593
599
  if xml_desc.get("attr", False):
594
600
  if xml_ns:
595
601
  ET.register_namespace(xml_prefix, xml_ns)
596
- xml_name = "{}{}".format(xml_ns, xml_name)
597
- serialized.set(xml_name, new_attr)
602
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
603
+ serialized.set(xml_name, new_attr) # type: ignore
598
604
  continue
599
605
  if xml_desc.get("text", False):
600
- serialized.text = new_attr
606
+ serialized.text = new_attr # type: ignore
601
607
  continue
602
608
  if isinstance(new_attr, list):
603
- serialized.extend(new_attr)
609
+ serialized.extend(new_attr) # type: ignore
604
610
  elif isinstance(new_attr, ET.Element):
605
- # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
606
- if 'name' not in getattr(orig_attr, '_xml_map', {}):
611
+ # If the down XML has no XML/Name,
612
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
613
+ if "name" not in getattr(orig_attr, "_xml_map", {}):
607
614
  splitted_tag = new_attr.tag.split("}")
608
- if len(splitted_tag) == 2: # Namespace
615
+ if len(splitted_tag) == 2: # Namespace
609
616
  new_attr.tag = "}".join([splitted_tag[0], xml_name])
610
617
  else:
611
618
  new_attr.tag = xml_name
612
- serialized.append(new_attr)
619
+ serialized.append(new_attr) # type: ignore
613
620
  else: # That's a basic type
614
621
  # Integrate namespace if necessary
615
- local_node = _create_xml_node(
616
- xml_name,
617
- xml_prefix,
618
- xml_ns
619
- )
620
- local_node.text = unicode_str(new_attr)
621
- serialized.append(local_node)
622
- else: # JSON
623
- for k in reversed(keys):
624
- unflattened = {k: new_attr}
625
- new_attr = unflattened
622
+ local_node = _create_xml_node(xml_name, xml_prefix, xml_ns)
623
+ local_node.text = str(new_attr)
624
+ serialized.append(local_node) # type: ignore
625
+ else: # JSON
626
+ for k in reversed(keys): # type: ignore
627
+ new_attr = {k: new_attr}
626
628
 
627
629
  _new_attr = new_attr
628
630
  _serialized = serialized
629
- for k in keys:
631
+ for k in keys: # type: ignore
630
632
  if k not in _serialized:
631
- _serialized.update(_new_attr)
632
- _new_attr = _new_attr[k]
633
+ _serialized.update(_new_attr) # type: ignore
634
+ _new_attr = _new_attr[k] # type: ignore
633
635
  _serialized = _serialized[k]
634
- except ValueError:
635
- continue
636
+ except ValueError as err:
637
+ if isinstance(err, SerializationError):
638
+ raise
636
639
 
637
640
  except (AttributeError, KeyError, TypeError) as err:
638
- msg = "Attribute {} in object {} cannot be serialized.\n{}".format(
639
- attr_name, class_name, str(target_obj))
640
- raise_with_traceback(SerializationError, msg, err)
641
- else:
642
- return serialized
641
+ msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
642
+ raise SerializationError(msg) from err
643
+ return serialized
643
644
 
644
645
  def body(self, data, data_type, **kwargs):
645
646
  """Serialize data intended for a request body.
646
647
 
647
- :param data: The data to be serialized.
648
+ :param object data: The data to be serialized.
648
649
  :param str data_type: The type to be serialized from.
649
650
  :rtype: dict
650
- :raises: SerializationError if serialization fails.
651
- :raises: ValueError if data is None
651
+ :raises SerializationError: if serialization fails.
652
+ :raises ValueError: if data is None
653
+ :returns: The serialized request body
652
654
  """
653
655
 
654
656
  # Just in case this is a dict
655
- internal_data_type = data_type.strip('[]{}')
656
- internal_data_type = self.dependencies.get(internal_data_type, None)
657
+ internal_data_type_str = data_type.strip("[]{}")
658
+ internal_data_type = self.dependencies.get(internal_data_type_str, None)
657
659
  try:
658
660
  is_xml_model_serialization = kwargs["is_xml"]
659
661
  except KeyError:
@@ -668,124 +670,119 @@ class Serializer(object):
668
670
  # We're not able to deal with additional properties for now.
669
671
  deserializer.additional_properties_detection = False
670
672
  if is_xml_model_serialization:
671
- deserializer.key_extractors = [
673
+ deserializer.key_extractors = [ # type: ignore
672
674
  attribute_key_case_insensitive_extractor,
673
675
  ]
674
676
  else:
675
677
  deserializer.key_extractors = [
676
678
  rest_key_case_insensitive_extractor,
677
679
  attribute_key_case_insensitive_extractor,
678
- last_rest_key_case_insensitive_extractor
680
+ last_rest_key_case_insensitive_extractor,
679
681
  ]
680
- data = deserializer._deserialize(data_type, data)
682
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
681
683
  except DeserializationError as err:
682
- raise_with_traceback(
683
- SerializationError, "Unable to build a model: "+str(err), err)
684
+ raise SerializationError("Unable to build a model: " + str(err)) from err
684
685
 
685
686
  return self._serialize(data, data_type, **kwargs)
686
687
 
687
688
  def url(self, name, data, data_type, **kwargs):
688
689
  """Serialize data intended for a URL path.
689
690
 
690
- :param data: The data to be serialized.
691
+ :param str name: The name of the URL path parameter.
692
+ :param object data: The data to be serialized.
691
693
  :param str data_type: The type to be serialized from.
692
694
  :rtype: str
693
- :raises: TypeError if serialization fails.
694
- :raises: ValueError if data is None
695
+ :returns: The serialized URL path
696
+ :raises TypeError: if serialization fails.
697
+ :raises ValueError: if data is None
695
698
  """
696
699
  try:
697
700
  output = self.serialize_data(data, data_type, **kwargs)
698
- if data_type == 'bool':
701
+ if data_type == "bool":
699
702
  output = json.dumps(output)
700
703
 
701
- if kwargs.get('skip_quote') is True:
704
+ if kwargs.get("skip_quote") is True:
702
705
  output = str(output)
706
+ output = output.replace("{", quote("{")).replace("}", quote("}"))
703
707
  else:
704
- output = quote(str(output), safe='')
705
- except SerializationError:
706
- raise TypeError("{} must be type {}.".format(name, data_type))
707
- else:
708
- return output
708
+ output = quote(str(output), safe="")
709
+ except SerializationError as exc:
710
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
711
+ return output
709
712
 
710
713
  def query(self, name, data, data_type, **kwargs):
711
714
  """Serialize data intended for a URL query.
712
715
 
713
- :param data: The data to be serialized.
716
+ :param str name: The name of the query parameter.
717
+ :param object data: The data to be serialized.
714
718
  :param str data_type: The type to be serialized from.
715
- :rtype: str
716
- :raises: TypeError if serialization fails.
717
- :raises: ValueError if data is None
719
+ :rtype: str, list
720
+ :raises TypeError: if serialization fails.
721
+ :raises ValueError: if data is None
722
+ :returns: The serialized query parameter
718
723
  """
719
724
  try:
720
725
  # Treat the list aside, since we don't want to encode the div separator
721
726
  if data_type.startswith("["):
722
727
  internal_data_type = data_type[1:-1]
723
- data = [
724
- self.serialize_data(d, internal_data_type, **kwargs) if d is not None else ""
725
- for d
726
- in data
727
- ]
728
- if not kwargs.get('skip_quote', False):
729
- data = [
730
- quote(str(d), safe='')
731
- for d
732
- in data
733
- ]
734
- return str(self.serialize_iter(data, internal_data_type, **kwargs))
728
+ do_quote = not kwargs.get("skip_quote", False)
729
+ return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs)
735
730
 
736
731
  # Not a list, regular serialization
737
732
  output = self.serialize_data(data, data_type, **kwargs)
738
- if data_type == 'bool':
733
+ if data_type == "bool":
739
734
  output = json.dumps(output)
740
- if kwargs.get('skip_quote') is True:
735
+ if kwargs.get("skip_quote") is True:
741
736
  output = str(output)
742
737
  else:
743
- output = quote(str(output), safe='')
744
- except SerializationError:
745
- raise TypeError("{} must be type {}.".format(name, data_type))
746
- else:
747
- return str(output)
738
+ output = quote(str(output), safe="")
739
+ except SerializationError as exc:
740
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
741
+ return str(output)
748
742
 
749
743
  def header(self, name, data, data_type, **kwargs):
750
744
  """Serialize data intended for a request header.
751
745
 
752
- :param data: The data to be serialized.
746
+ :param str name: The name of the header.
747
+ :param object data: The data to be serialized.
753
748
  :param str data_type: The type to be serialized from.
754
749
  :rtype: str
755
- :raises: TypeError if serialization fails.
756
- :raises: ValueError if data is None
750
+ :raises TypeError: if serialization fails.
751
+ :raises ValueError: if data is None
752
+ :returns: The serialized header
757
753
  """
758
754
  try:
759
- if data_type in ['[str]']:
755
+ if data_type in ["[str]"]:
760
756
  data = ["" if d is None else d for d in data]
761
757
 
762
758
  output = self.serialize_data(data, data_type, **kwargs)
763
- if data_type == 'bool':
759
+ if data_type == "bool":
764
760
  output = json.dumps(output)
765
- except SerializationError:
766
- raise TypeError("{} must be type {}.".format(name, data_type))
767
- else:
768
- return str(output)
761
+ except SerializationError as exc:
762
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
763
+ return str(output)
769
764
 
770
765
  def serialize_data(self, data, data_type, **kwargs):
771
766
  """Serialize generic data according to supplied data type.
772
767
 
773
- :param data: The data to be serialized.
768
+ :param object data: The data to be serialized.
774
769
  :param str data_type: The type to be serialized from.
775
- :param bool required: Whether it's essential that the data not be
776
- empty or None
777
- :raises: AttributeError if required data is None.
778
- :raises: ValueError if data is None
779
- :raises: SerializationError if serialization fails.
770
+ :raises AttributeError: if required data is None.
771
+ :raises ValueError: if data is None
772
+ :raises SerializationError: if serialization fails.
773
+ :returns: The serialized data.
774
+ :rtype: str, int, float, bool, dict, list
780
775
  """
781
776
  if data is None:
782
777
  raise ValueError("No value for given attribute")
783
778
 
784
779
  try:
780
+ if data is CoreNull:
781
+ return None
785
782
  if data_type in self.basic_types.values():
786
783
  return self.serialize_basic(data, data_type, **kwargs)
787
784
 
788
- elif data_type in self.serialize_type:
785
+ if data_type in self.serialize_type:
789
786
  return self.serialize_type[data_type](data, **kwargs)
790
787
 
791
788
  # If dependencies is empty, try with current data class
@@ -796,18 +793,15 @@ class Serializer(object):
796
793
 
797
794
  iter_type = data_type[0] + data_type[-1]
798
795
  if iter_type in self.serialize_type:
799
- return self.serialize_type[iter_type](
800
- data, data_type[1:-1], **kwargs)
796
+ return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs)
801
797
 
802
798
  except (ValueError, TypeError) as err:
803
799
  msg = "Unable to serialize value: {!r} as type: {!r}."
804
- raise_with_traceback(
805
- SerializationError, msg.format(data, data_type), err)
806
- else:
807
- return self._serialize(data, **kwargs)
800
+ raise SerializationError(msg.format(data, data_type)) from err
801
+ return self._serialize(data, **kwargs)
808
802
 
809
803
  @classmethod
810
- def _get_custom_serializers(cls, data_type, **kwargs):
804
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
811
805
  custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
812
806
  if custom_serializer:
813
807
  return custom_serializer
@@ -823,23 +817,26 @@ class Serializer(object):
823
817
  - basic_types_serializers dict[str, callable] : If set, use the callable as serializer
824
818
  - is_xml bool : If set, use xml_basic_types_serializers
825
819
 
826
- :param data: Object to be serialized.
820
+ :param obj data: Object to be serialized.
827
821
  :param str data_type: Type of object in the iterable.
822
+ :rtype: str, int, float, bool
823
+ :return: serialized object
828
824
  """
829
825
  custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
830
826
  if custom_serializer:
831
827
  return custom_serializer(data)
832
- if data_type == 'str':
828
+ if data_type == "str":
833
829
  return cls.serialize_unicode(data)
834
- return eval(data_type)(data) # nosec
830
+ return eval(data_type)(data) # nosec # pylint: disable=eval-used
835
831
 
836
832
  @classmethod
837
833
  def serialize_unicode(cls, data):
838
834
  """Special handling for serializing unicode strings in Py2.
839
835
  Encode to UTF-8 if unicode, otherwise handle as a str.
840
836
 
841
- :param data: Object to be serialized.
837
+ :param str data: Object to be serialized.
842
838
  :rtype: str
839
+ :return: serialized object
843
840
  """
844
841
  try: # If I received an enum, return its value
845
842
  return data.value
@@ -847,14 +844,13 @@ class Serializer(object):
847
844
  pass
848
845
 
849
846
  try:
850
- if isinstance(data, unicode):
847
+ if isinstance(data, unicode): # type: ignore
851
848
  # Don't change it, JSON and XML ElementTree are totally able
852
849
  # to serialize correctly u'' strings
853
850
  return data
854
851
  except NameError:
855
852
  return str(data)
856
- else:
857
- return str(data)
853
+ return str(data)
858
854
 
859
855
  def serialize_iter(self, data, iter_type, div=None, **kwargs):
860
856
  """Serialize iterable.
@@ -864,13 +860,13 @@ class Serializer(object):
864
860
  serialization_ctxt['type'] should be same as data_type.
865
861
  - is_xml bool : If set, serialize as XML
866
862
 
867
- :param list attr: Object to be serialized.
863
+ :param list data: Object to be serialized.
868
864
  :param str iter_type: Type of object in the iterable.
869
- :param bool required: Whether the objects in the iterable must
870
- not be None or empty.
871
865
  :param str div: If set, this str will be used to combine the elements
872
866
  in the iterable into a combined string. Default is 'None'.
867
+ Defaults to False.
873
868
  :rtype: list, str
869
+ :return: serialized iterable
874
870
  """
875
871
  if isinstance(data, str):
876
872
  raise SerializationError("Refuse str type as a valid iter type.")
@@ -882,29 +878,30 @@ class Serializer(object):
882
878
  for d in data:
883
879
  try:
884
880
  serialized.append(self.serialize_data(d, iter_type, **kwargs))
885
- except ValueError:
881
+ except ValueError as err:
882
+ if isinstance(err, SerializationError):
883
+ raise
886
884
  serialized.append(None)
887
885
 
886
+ if kwargs.get("do_quote", False):
887
+ serialized = ["" if s is None else quote(str(s), safe="") for s in serialized]
888
+
888
889
  if div:
889
- serialized = ['' if s is None else str(s) for s in serialized]
890
+ serialized = ["" if s is None else str(s) for s in serialized]
890
891
  serialized = div.join(serialized)
891
892
 
892
- if 'xml' in serialization_ctxt or is_xml:
893
+ if "xml" in serialization_ctxt or is_xml:
893
894
  # XML serialization is more complicated
894
- xml_desc = serialization_ctxt.get('xml', {})
895
- xml_name = xml_desc.get('name')
895
+ xml_desc = serialization_ctxt.get("xml", {})
896
+ xml_name = xml_desc.get("name")
896
897
  if not xml_name:
897
- xml_name = serialization_ctxt['key']
898
+ xml_name = serialization_ctxt["key"]
898
899
 
899
900
  # Create a wrap node if necessary (use the fact that Element and list have "append")
900
901
  is_wrapped = xml_desc.get("wrapped", False)
901
902
  node_name = xml_desc.get("itemsName", xml_name)
902
903
  if is_wrapped:
903
- final_result = _create_xml_node(
904
- xml_name,
905
- xml_desc.get('prefix', None),
906
- xml_desc.get('ns', None)
907
- )
904
+ final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
908
905
  else:
909
906
  final_result = []
910
907
  # All list elements to "local_node"
@@ -912,11 +909,7 @@ class Serializer(object):
912
909
  if isinstance(el, ET.Element):
913
910
  el_node = el
914
911
  else:
915
- el_node = _create_xml_node(
916
- node_name,
917
- xml_desc.get('prefix', None),
918
- xml_desc.get('ns', None)
919
- )
912
+ el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
920
913
  if el is not None: # Otherwise it writes "None" :-p
921
914
  el_node.text = str(el)
922
915
  final_result.append(el_node)
@@ -928,36 +921,32 @@ class Serializer(object):
928
921
 
929
922
  :param dict attr: Object to be serialized.
930
923
  :param str dict_type: Type of object in the dictionary.
931
- :param bool required: Whether the objects in the dictionary must
932
- not be None or empty.
933
924
  :rtype: dict
925
+ :return: serialized dictionary
934
926
  """
935
927
  serialization_ctxt = kwargs.get("serialization_ctxt", {})
936
928
  serialized = {}
937
929
  for key, value in attr.items():
938
930
  try:
939
- serialized[self.serialize_unicode(key)] = self.serialize_data(
940
- value, dict_type, **kwargs)
941
- except ValueError:
931
+ serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs)
932
+ except ValueError as err:
933
+ if isinstance(err, SerializationError):
934
+ raise
942
935
  serialized[self.serialize_unicode(key)] = None
943
936
 
944
- if 'xml' in serialization_ctxt:
937
+ if "xml" in serialization_ctxt:
945
938
  # XML serialization is more complicated
946
- xml_desc = serialization_ctxt['xml']
947
- xml_name = xml_desc['name']
939
+ xml_desc = serialization_ctxt["xml"]
940
+ xml_name = xml_desc["name"]
948
941
 
949
- final_result = _create_xml_node(
950
- xml_name,
951
- xml_desc.get('prefix', None),
952
- xml_desc.get('ns', None)
953
- )
942
+ final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
954
943
  for key, value in serialized.items():
955
944
  ET.SubElement(final_result, key).text = value
956
945
  return final_result
957
946
 
958
947
  return serialized
959
948
 
960
- def serialize_object(self, attr, **kwargs):
949
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
961
950
  """Serialize a generic object.
962
951
  This will be handled as a dictionary. If object passed in is not
963
952
  a basic type (str, int, float, dict, list) it will simply be
@@ -965,6 +954,7 @@ class Serializer(object):
965
954
 
966
955
  :param dict attr: Object to be serialized.
967
956
  :rtype: dict or str
957
+ :return: serialized object
968
958
  """
969
959
  if attr is None:
970
960
  return None
@@ -975,7 +965,7 @@ class Serializer(object):
975
965
  return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs)
976
966
  if obj_type is _long_type:
977
967
  return self.serialize_long(attr)
978
- if obj_type is unicode_str:
968
+ if obj_type is str:
979
969
  return self.serialize_unicode(attr)
980
970
  if obj_type is datetime.datetime:
981
971
  return self.serialize_iso(attr)
@@ -989,15 +979,14 @@ class Serializer(object):
989
979
  return self.serialize_decimal(attr)
990
980
 
991
981
  # If it's a model or I know this dependency, serialize as a Model
992
- elif obj_type in self.dependencies.values() or isinstance(attr, Model):
982
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
993
983
  return self._serialize(attr)
994
984
 
995
985
  if obj_type == dict:
996
986
  serialized = {}
997
987
  for key, value in attr.items():
998
988
  try:
999
- serialized[self.serialize_unicode(key)] = self.serialize_object(
1000
- value, **kwargs)
989
+ serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs)
1001
990
  except ValueError:
1002
991
  serialized[self.serialize_unicode(key)] = None
1003
992
  return serialized
@@ -1006,8 +995,7 @@ class Serializer(object):
1006
995
  serialized = []
1007
996
  for obj in attr:
1008
997
  try:
1009
- serialized.append(self.serialize_object(
1010
- obj, **kwargs))
998
+ serialized.append(self.serialize_object(obj, **kwargs))
1011
999
  except ValueError:
1012
1000
  pass
1013
1001
  return serialized
@@ -1020,58 +1008,63 @@ class Serializer(object):
1020
1008
  except AttributeError:
1021
1009
  result = attr
1022
1010
  try:
1023
- enum_obj(result)
1011
+ enum_obj(result) # type: ignore
1024
1012
  return result
1025
- except ValueError:
1026
- for enum_value in enum_obj:
1013
+ except ValueError as exc:
1014
+ for enum_value in enum_obj: # type: ignore
1027
1015
  if enum_value.value.lower() == str(attr).lower():
1028
1016
  return enum_value.value
1029
1017
  error = "{!r} is not valid value for enum {!r}"
1030
- raise SerializationError(error.format(attr, enum_obj))
1018
+ raise SerializationError(error.format(attr, enum_obj)) from exc
1031
1019
 
1032
1020
  @staticmethod
1033
- def serialize_bytearray(attr, **kwargs):
1021
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
1034
1022
  """Serialize bytearray into base-64 string.
1035
1023
 
1036
- :param attr: Object to be serialized.
1024
+ :param str attr: Object to be serialized.
1037
1025
  :rtype: str
1026
+ :return: serialized base64
1038
1027
  """
1039
1028
  return b64encode(attr).decode()
1040
1029
 
1041
1030
  @staticmethod
1042
- def serialize_base64(attr, **kwargs):
1031
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
1043
1032
  """Serialize str into base-64 string.
1044
1033
 
1045
- :param attr: Object to be serialized.
1034
+ :param str attr: Object to be serialized.
1046
1035
  :rtype: str
1036
+ :return: serialized base64
1047
1037
  """
1048
- encoded = b64encode(attr).decode('ascii')
1049
- return encoded.strip('=').replace('+', '-').replace('/', '_')
1038
+ encoded = b64encode(attr).decode("ascii")
1039
+ return encoded.strip("=").replace("+", "-").replace("/", "_")
1050
1040
 
1051
1041
  @staticmethod
1052
- def serialize_decimal(attr, **kwargs):
1042
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
1053
1043
  """Serialize Decimal object to float.
1054
1044
 
1055
- :param attr: Object to be serialized.
1045
+ :param decimal attr: Object to be serialized.
1056
1046
  :rtype: float
1047
+ :return: serialized decimal
1057
1048
  """
1058
1049
  return float(attr)
1059
1050
 
1060
1051
  @staticmethod
1061
- def serialize_long(attr, **kwargs):
1052
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
1062
1053
  """Serialize long (Py2) or int (Py3).
1063
1054
 
1064
- :param attr: Object to be serialized.
1055
+ :param int attr: Object to be serialized.
1065
1056
  :rtype: int/long
1057
+ :return: serialized long
1066
1058
  """
1067
1059
  return _long_type(attr)
1068
1060
 
1069
1061
  @staticmethod
1070
- def serialize_date(attr, **kwargs):
1062
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
1071
1063
  """Serialize Date object into ISO-8601 formatted string.
1072
1064
 
1073
1065
  :param Date attr: Object to be serialized.
1074
1066
  :rtype: str
1067
+ :return: serialized date
1075
1068
  """
1076
1069
  if isinstance(attr, str):
1077
1070
  attr = isodate.parse_date(attr)
@@ -1079,11 +1072,12 @@ class Serializer(object):
1079
1072
  return t
1080
1073
 
1081
1074
  @staticmethod
1082
- def serialize_time(attr, **kwargs):
1075
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
1083
1076
  """Serialize Time object into ISO-8601 formatted string.
1084
1077
 
1085
1078
  :param datetime.time attr: Object to be serialized.
1086
1079
  :rtype: str
1080
+ :return: serialized time
1087
1081
  """
1088
1082
  if isinstance(attr, str):
1089
1083
  attr = isodate.parse_time(attr)
@@ -1093,94 +1087,102 @@ class Serializer(object):
1093
1087
  return t
1094
1088
 
1095
1089
  @staticmethod
1096
- def serialize_duration(attr, **kwargs):
1090
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
1097
1091
  """Serialize TimeDelta object into ISO-8601 formatted string.
1098
1092
 
1099
1093
  :param TimeDelta attr: Object to be serialized.
1100
1094
  :rtype: str
1095
+ :return: serialized duration
1101
1096
  """
1102
1097
  if isinstance(attr, str):
1103
1098
  attr = isodate.parse_duration(attr)
1104
1099
  return isodate.duration_isoformat(attr)
1105
1100
 
1106
1101
  @staticmethod
1107
- def serialize_rfc(attr, **kwargs):
1102
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
1108
1103
  """Serialize Datetime object into RFC-1123 formatted string.
1109
1104
 
1110
1105
  :param Datetime attr: Object to be serialized.
1111
1106
  :rtype: str
1112
- :raises: TypeError if format invalid.
1107
+ :raises TypeError: if format invalid.
1108
+ :return: serialized rfc
1113
1109
  """
1114
1110
  try:
1115
1111
  if not attr.tzinfo:
1116
- _LOGGER.warning(
1117
- "Datetime with no tzinfo will be considered UTC.")
1112
+ _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
1118
1113
  utc = attr.utctimetuple()
1119
- except AttributeError:
1120
- raise TypeError("RFC1123 object must be valid Datetime object.")
1114
+ except AttributeError as exc:
1115
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
1121
1116
 
1122
1117
  return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
1123
- Serializer.days[utc.tm_wday], utc.tm_mday,
1124
- Serializer.months[utc.tm_mon], utc.tm_year,
1125
- utc.tm_hour, utc.tm_min, utc.tm_sec)
1118
+ Serializer.days[utc.tm_wday],
1119
+ utc.tm_mday,
1120
+ Serializer.months[utc.tm_mon],
1121
+ utc.tm_year,
1122
+ utc.tm_hour,
1123
+ utc.tm_min,
1124
+ utc.tm_sec,
1125
+ )
1126
1126
 
1127
1127
  @staticmethod
1128
- def serialize_iso(attr, **kwargs):
1128
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
1129
1129
  """Serialize Datetime object into ISO-8601 formatted string.
1130
1130
 
1131
1131
  :param Datetime attr: Object to be serialized.
1132
1132
  :rtype: str
1133
- :raises: SerializationError if format invalid.
1133
+ :raises SerializationError: if format invalid.
1134
+ :return: serialized iso
1134
1135
  """
1135
1136
  if isinstance(attr, str):
1136
1137
  attr = isodate.parse_datetime(attr)
1137
1138
  try:
1138
1139
  if not attr.tzinfo:
1139
- _LOGGER.warning(
1140
- "Datetime with no tzinfo will be considered UTC.")
1140
+ _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
1141
1141
  utc = attr.utctimetuple()
1142
1142
  if utc.tm_year > 9999 or utc.tm_year < 1:
1143
1143
  raise OverflowError("Hit max or min date")
1144
1144
 
1145
- microseconds = str(attr.microsecond).rjust(6,'0').rstrip('0').ljust(3, '0')
1145
+ microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0")
1146
1146
  if microseconds:
1147
- microseconds = '.'+microseconds
1147
+ microseconds = "." + microseconds
1148
1148
  date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format(
1149
- utc.tm_year, utc.tm_mon, utc.tm_mday,
1150
- utc.tm_hour, utc.tm_min, utc.tm_sec)
1151
- return date + microseconds + 'Z'
1149
+ utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec
1150
+ )
1151
+ return date + microseconds + "Z"
1152
1152
  except (ValueError, OverflowError) as err:
1153
1153
  msg = "Unable to serialize datetime object."
1154
- raise_with_traceback(SerializationError, msg, err)
1154
+ raise SerializationError(msg) from err
1155
1155
  except AttributeError as err:
1156
1156
  msg = "ISO-8601 object must be valid Datetime object."
1157
- raise_with_traceback(TypeError, msg, err)
1157
+ raise TypeError(msg) from err
1158
1158
 
1159
1159
  @staticmethod
1160
- def serialize_unix(attr, **kwargs):
1160
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
1161
1161
  """Serialize Datetime object into IntTime format.
1162
1162
  This is represented as seconds.
1163
1163
 
1164
1164
  :param Datetime attr: Object to be serialized.
1165
1165
  :rtype: int
1166
- :raises: SerializationError if format invalid
1166
+ :raises SerializationError: if format invalid
1167
+ :return: serialied unix
1167
1168
  """
1168
1169
  if isinstance(attr, int):
1169
1170
  return attr
1170
1171
  try:
1171
1172
  if not attr.tzinfo:
1172
- _LOGGER.warning(
1173
- "Datetime with no tzinfo will be considered UTC.")
1173
+ _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
1174
1174
  return int(calendar.timegm(attr.utctimetuple()))
1175
- except AttributeError:
1176
- raise TypeError("Unix time object must be valid Datetime object.")
1175
+ except AttributeError as exc:
1176
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
1177
+
1177
1178
 
1178
- def rest_key_extractor(attr, attr_desc, data):
1179
- key = attr_desc['key']
1179
+ def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
1180
+ key = attr_desc["key"]
1180
1181
  working_data = data
1181
1182
 
1182
- while '.' in key:
1183
- dict_keys = _FLATTEN.split(key)
1183
+ while "." in key:
1184
+ # Need the cast, as for some reasons "split" is typed as list[str | Any]
1185
+ dict_keys = cast(list[str], _FLATTEN.split(key))
1184
1186
  if len(dict_keys) == 1:
1185
1187
  key = _decode_attribute_map_key(dict_keys[0])
1186
1188
  break
@@ -1189,17 +1191,19 @@ def rest_key_extractor(attr, attr_desc, data):
1189
1191
  if working_data is None:
1190
1192
  # If at any point while following flatten JSON path see None, it means
1191
1193
  # that all properties under are None as well
1192
- # https://github.com/Azure/msrest-for-python/issues/197
1193
1194
  return None
1194
- key = '.'.join(dict_keys[1:])
1195
+ key = ".".join(dict_keys[1:])
1195
1196
 
1196
1197
  return working_data.get(key)
1197
1198
 
1198
- def rest_key_case_insensitive_extractor(attr, attr_desc, data):
1199
- key = attr_desc['key']
1199
+
1200
+ def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
1201
+ attr, attr_desc, data
1202
+ ):
1203
+ key = attr_desc["key"]
1200
1204
  working_data = data
1201
1205
 
1202
- while '.' in key:
1206
+ while "." in key:
1203
1207
  dict_keys = _FLATTEN.split(key)
1204
1208
  if len(dict_keys) == 1:
1205
1209
  key = _decode_attribute_map_key(dict_keys[0])
@@ -1209,32 +1213,46 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
1209
1213
  if working_data is None:
1210
1214
  # If at any point while following flatten JSON path see None, it means
1211
1215
  # that all properties under are None as well
1212
- # https://github.com/Azure/msrest-for-python/issues/197
1213
1216
  return None
1214
- key = '.'.join(dict_keys[1:])
1217
+ key = ".".join(dict_keys[1:])
1215
1218
 
1216
1219
  if working_data:
1217
1220
  return attribute_key_case_insensitive_extractor(key, None, working_data)
1218
1221
 
1219
- def last_rest_key_extractor(attr, attr_desc, data):
1222
+
1223
+ def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
1220
1224
  """Extract the attribute in "data" based on the last part of the JSON path key.
1225
+
1226
+ :param str attr: The attribute to extract
1227
+ :param dict attr_desc: The attribute description
1228
+ :param dict data: The data to extract from
1229
+ :rtype: object
1230
+ :returns: The extracted attribute
1221
1231
  """
1222
- key = attr_desc['key']
1232
+ key = attr_desc["key"]
1223
1233
  dict_keys = _FLATTEN.split(key)
1224
1234
  return attribute_key_extractor(dict_keys[-1], None, data)
1225
1235
 
1226
- def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
1236
+
1237
+ def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
1227
1238
  """Extract the attribute in "data" based on the last part of the JSON path key.
1228
1239
 
1229
1240
  This is the case insensitive version of "last_rest_key_extractor"
1241
+ :param str attr: The attribute to extract
1242
+ :param dict attr_desc: The attribute description
1243
+ :param dict data: The data to extract from
1244
+ :rtype: object
1245
+ :returns: The extracted attribute
1230
1246
  """
1231
- key = attr_desc['key']
1247
+ key = attr_desc["key"]
1232
1248
  dict_keys = _FLATTEN.split(key)
1233
1249
  return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data)
1234
1250
 
1251
+
1235
1252
  def attribute_key_extractor(attr, _, data):
1236
1253
  return data.get(attr)
1237
1254
 
1255
+
1238
1256
  def attribute_key_case_insensitive_extractor(attr, _, data):
1239
1257
  found_key = None
1240
1258
  lower_attr = attr.lower()
@@ -1245,6 +1263,7 @@ def attribute_key_case_insensitive_extractor(attr, _, data):
1245
1263
 
1246
1264
  return data.get(found_key)
1247
1265
 
1266
+
1248
1267
  def _extract_name_from_internal_type(internal_type):
1249
1268
  """Given an internal type XML description, extract correct XML name with namespace.
1250
1269
 
@@ -1253,14 +1272,14 @@ def _extract_name_from_internal_type(internal_type):
1253
1272
  :returns: A tuple XML name + namespace dict
1254
1273
  """
1255
1274
  internal_type_xml_map = getattr(internal_type, "_xml_map", {})
1256
- xml_name = internal_type_xml_map.get('name', internal_type.__name__)
1275
+ xml_name = internal_type_xml_map.get("name", internal_type.__name__)
1257
1276
  xml_ns = internal_type_xml_map.get("ns", None)
1258
1277
  if xml_ns:
1259
- xml_name = "{}{}".format(xml_ns, xml_name)
1278
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
1260
1279
  return xml_name
1261
1280
 
1262
1281
 
1263
- def xml_key_extractor(attr, attr_desc, data):
1282
+ def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
1264
1283
  if isinstance(data, dict):
1265
1284
  return None
1266
1285
 
@@ -1268,19 +1287,19 @@ def xml_key_extractor(attr, attr_desc, data):
1268
1287
  if not isinstance(data, ET.Element):
1269
1288
  return None
1270
1289
 
1271
- xml_desc = attr_desc.get('xml', {})
1272
- xml_name = xml_desc.get('name', attr_desc['key'])
1290
+ xml_desc = attr_desc.get("xml", {})
1291
+ xml_name = xml_desc.get("name", attr_desc["key"])
1273
1292
 
1274
1293
  # Look for a children
1275
- is_iter_type = attr_desc['type'].startswith("[")
1294
+ is_iter_type = attr_desc["type"].startswith("[")
1276
1295
  is_wrapped = xml_desc.get("wrapped", False)
1277
1296
  internal_type = attr_desc.get("internalType", None)
1278
1297
  internal_type_xml_map = getattr(internal_type, "_xml_map", {})
1279
1298
 
1280
1299
  # Integrate namespace if necessary
1281
- xml_ns = xml_desc.get('ns', internal_type_xml_map.get("ns", None))
1300
+ xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None))
1282
1301
  if xml_ns:
1283
- xml_name = "{}{}".format(xml_ns, xml_name)
1302
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
1284
1303
 
1285
1304
  # If it's an attribute, that's simple
1286
1305
  if xml_desc.get("attr", False):
@@ -1294,15 +1313,15 @@ def xml_key_extractor(attr, attr_desc, data):
1294
1313
  # - Wrapped node
1295
1314
  # - Internal type is an enum (considered basic types)
1296
1315
  # - Internal type has no XML/Name node
1297
- if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or 'name' not in internal_type_xml_map)):
1316
+ if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)):
1298
1317
  children = data.findall(xml_name)
1299
1318
  # If internal type has a local name and it's not a list, I use that name
1300
- elif not is_iter_type and internal_type and 'name' in internal_type_xml_map:
1319
+ elif not is_iter_type and internal_type and "name" in internal_type_xml_map:
1301
1320
  xml_name = _extract_name_from_internal_type(internal_type)
1302
1321
  children = data.findall(xml_name)
1303
1322
  # That's an array
1304
1323
  else:
1305
- if internal_type: # Complex type, ignore itemsName and use the complex type name
1324
+ if internal_type: # Complex type, ignore itemsName and use the complex type name
1306
1325
  items_name = _extract_name_from_internal_type(internal_type)
1307
1326
  else:
1308
1327
  items_name = xml_desc.get("itemsName", xml_name)
@@ -1311,66 +1330,62 @@ def xml_key_extractor(attr, attr_desc, data):
1311
1330
  if len(children) == 0:
1312
1331
  if is_iter_type:
1313
1332
  if is_wrapped:
1314
- return None # is_wrapped no node, we want None
1315
- else:
1316
- return [] # not wrapped, assume empty list
1333
+ return None # is_wrapped no node, we want None
1334
+ return [] # not wrapped, assume empty list
1317
1335
  return None # Assume it's not there, maybe an optional node.
1318
1336
 
1319
1337
  # If is_iter_type and not wrapped, return all found children
1320
1338
  if is_iter_type:
1321
1339
  if not is_wrapped:
1322
1340
  return children
1323
- else: # Iter and wrapped, should have found one node only (the wrap one)
1324
- if len(children) != 1:
1325
- raise DeserializationError(
1326
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
1327
- xml_name
1328
- ))
1329
- return list(children[0]) # Might be empty list and that's ok.
1341
+ # Iter and wrapped, should have found one node only (the wrap one)
1342
+ if len(children) != 1:
1343
+ raise DeserializationError(
1344
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
1345
+ xml_name
1346
+ )
1347
+ )
1348
+ return list(children[0]) # Might be empty list and that's ok.
1330
1349
 
1331
1350
  # Here it's not a itertype, we should have found one element only or empty
1332
1351
  if len(children) > 1:
1333
1352
  raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name))
1334
1353
  return children[0]
1335
1354
 
1336
- class Deserializer(object):
1355
+
1356
+ class Deserializer:
1337
1357
  """Response object model deserializer.
1338
1358
 
1339
1359
  :param dict classes: Class type dictionary for deserializing complex types.
1340
1360
  :ivar list key_extractors: Ordered list of extractors to be used by this deserializer.
1341
1361
  """
1342
1362
 
1343
- basic_types = {str: 'str', int: 'int', bool: 'bool', float: 'float'}
1363
+ basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
1344
1364
 
1345
- valid_date = re.compile(
1346
- r'\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}'
1347
- r'\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?')
1365
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
1348
1366
 
1349
- def __init__(self, classes=None):
1367
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
1350
1368
  self.deserialize_type = {
1351
- 'iso-8601': Deserializer.deserialize_iso,
1352
- 'rfc-1123': Deserializer.deserialize_rfc,
1353
- 'unix-time': Deserializer.deserialize_unix,
1354
- 'duration': Deserializer.deserialize_duration,
1355
- 'date': Deserializer.deserialize_date,
1356
- 'time': Deserializer.deserialize_time,
1357
- 'decimal': Deserializer.deserialize_decimal,
1358
- 'long': Deserializer.deserialize_long,
1359
- 'bytearray': Deserializer.deserialize_bytearray,
1360
- 'base64': Deserializer.deserialize_base64,
1361
- 'object': self.deserialize_object,
1362
- '[]': self.deserialize_iter,
1363
- '{}': self.deserialize_dict
1364
- }
1369
+ "iso-8601": Deserializer.deserialize_iso,
1370
+ "rfc-1123": Deserializer.deserialize_rfc,
1371
+ "unix-time": Deserializer.deserialize_unix,
1372
+ "duration": Deserializer.deserialize_duration,
1373
+ "date": Deserializer.deserialize_date,
1374
+ "time": Deserializer.deserialize_time,
1375
+ "decimal": Deserializer.deserialize_decimal,
1376
+ "long": Deserializer.deserialize_long,
1377
+ "bytearray": Deserializer.deserialize_bytearray,
1378
+ "base64": Deserializer.deserialize_base64,
1379
+ "object": self.deserialize_object,
1380
+ "[]": self.deserialize_iter,
1381
+ "{}": self.deserialize_dict,
1382
+ }
1365
1383
  self.deserialize_expected_types = {
1366
- 'duration': (isodate.Duration, datetime.timedelta),
1367
- 'iso-8601': (datetime.datetime)
1384
+ "duration": (isodate.Duration, datetime.timedelta),
1385
+ "iso-8601": (datetime.datetime),
1368
1386
  }
1369
- self.dependencies = dict(classes) if classes else {}
1370
- self.key_extractors = [
1371
- rest_key_extractor,
1372
- xml_key_extractor
1373
- ]
1387
+ self.dependencies: dict[str, type] = dict(classes) if classes else {}
1388
+ self.key_extractors = [rest_key_extractor, xml_key_extractor]
1374
1389
  # Additional properties only works if the "rest_key_extractor" is used to
1375
1390
  # extract the keys. Making it to work whatever the key extractor is too much
1376
1391
  # complicated, with no real scenario for now.
@@ -1385,66 +1400,63 @@ class Deserializer(object):
1385
1400
  :param str target_obj: Target data type to deserialize to.
1386
1401
  :param requests.Response response_data: REST response object.
1387
1402
  :param str content_type: Swagger "produces" if available.
1388
- :raises: DeserializationError if deserialization fails.
1403
+ :raises DeserializationError: if deserialization fails.
1389
1404
  :return: Deserialized object.
1405
+ :rtype: object
1390
1406
  """
1391
1407
  data = self._unpack_content(response_data, content_type)
1392
1408
  return self._deserialize(target_obj, data)
1393
1409
 
1394
- def _deserialize(self, target_obj, data):
1410
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
1395
1411
  """Call the deserializer on a model.
1396
1412
 
1397
1413
  Data needs to be already deserialized as JSON or XML ElementTree
1398
1414
 
1399
1415
  :param str target_obj: Target data type to deserialize to.
1400
1416
  :param object data: Object to deserialize.
1401
- :raises: DeserializationError if deserialization fails.
1417
+ :raises DeserializationError: if deserialization fails.
1402
1418
  :return: Deserialized object.
1419
+ :rtype: object
1403
1420
  """
1404
1421
  # This is already a model, go recursive just in case
1405
1422
  if hasattr(data, "_attribute_map"):
1406
- constants = [name for name, config in getattr(data, '_validation', {}).items()
1407
- if config.get('constant')]
1423
+ constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
1408
1424
  try:
1409
- for attr, mapconfig in data._attribute_map.items():
1425
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
1410
1426
  if attr in constants:
1411
1427
  continue
1412
1428
  value = getattr(data, attr)
1413
1429
  if value is None:
1414
1430
  continue
1415
- local_type = mapconfig['type']
1416
- internal_data_type = local_type.strip('[]{}')
1431
+ local_type = mapconfig["type"]
1432
+ internal_data_type = local_type.strip("[]{}")
1417
1433
  if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum):
1418
1434
  continue
1419
- setattr(
1420
- data,
1421
- attr,
1422
- self._deserialize(local_type, value)
1423
- )
1435
+ setattr(data, attr, self._deserialize(local_type, value))
1424
1436
  return data
1425
1437
  except AttributeError:
1426
1438
  return
1427
1439
 
1428
1440
  response, class_name = self._classify_target(target_obj, data)
1429
1441
 
1430
- if isinstance(response, basestring):
1442
+ if isinstance(response, str):
1431
1443
  return self.deserialize_data(data, response)
1432
- elif isinstance(response, type) and issubclass(response, Enum):
1444
+ if isinstance(response, type) and issubclass(response, Enum):
1433
1445
  return self.deserialize_enum(data, response)
1434
1446
 
1435
- if data is None:
1447
+ if data is None or data is CoreNull:
1436
1448
  return data
1437
1449
  try:
1438
- attributes = response._attribute_map
1450
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
1439
1451
  d_attrs = {}
1440
1452
  for attr, attr_desc in attributes.items():
1441
1453
  # Check empty string. If it's not empty, someone has a real "additionalProperties"...
1442
- if attr == "additional_properties" and attr_desc["key"] == '':
1454
+ if attr == "additional_properties" and attr_desc["key"] == "":
1443
1455
  continue
1444
1456
  raw_value = None
1445
1457
  # Enhance attr_desc with some dynamic data
1446
- attr_desc = attr_desc.copy() # Do a copy, do not change the real one
1447
- internal_data_type = attr_desc["type"].strip('[]{}')
1458
+ attr_desc = attr_desc.copy() # Do a copy, do not change the real one
1459
+ internal_data_type = attr_desc["type"].strip("[]{}")
1448
1460
  if internal_data_type in self.dependencies:
1449
1461
  attr_desc["internalType"] = self.dependencies[internal_data_type]
1450
1462
 
@@ -1452,37 +1464,36 @@ class Deserializer(object):
1452
1464
  found_value = key_extractor(attr, attr_desc, data)
1453
1465
  if found_value is not None:
1454
1466
  if raw_value is not None and raw_value != found_value:
1455
- msg = ("Ignoring extracted value '%s' from %s for key '%s'"
1456
- " (duplicate extraction, follow extractors order)" )
1457
- _LOGGER.warning(
1458
- msg,
1459
- found_value,
1460
- key_extractor,
1461
- attr
1467
+ msg = (
1468
+ "Ignoring extracted value '%s' from %s for key '%s'"
1469
+ " (duplicate extraction, follow extractors order)"
1462
1470
  )
1471
+ _LOGGER.warning(msg, found_value, key_extractor, attr)
1463
1472
  continue
1464
1473
  raw_value = found_value
1465
1474
 
1466
- value = self.deserialize_data(raw_value, attr_desc['type'])
1475
+ value = self.deserialize_data(raw_value, attr_desc["type"])
1467
1476
  d_attrs[attr] = value
1468
1477
  except (AttributeError, TypeError, KeyError) as err:
1469
- msg = "Unable to deserialize to object: " + class_name
1470
- raise_with_traceback(DeserializationError, msg, err)
1471
- else:
1472
- additional_properties = self._build_additional_properties(attributes, data)
1473
- return self._instantiate_model(response, d_attrs, additional_properties)
1478
+ msg = "Unable to deserialize to object: " + class_name # type: ignore
1479
+ raise DeserializationError(msg) from err
1480
+ additional_properties = self._build_additional_properties(attributes, data)
1481
+ return self._instantiate_model(response, d_attrs, additional_properties)
1474
1482
 
1475
1483
  def _build_additional_properties(self, attribute_map, data):
1476
1484
  if not self.additional_properties_detection:
1477
1485
  return None
1478
- if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != '':
1486
+ if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "":
1479
1487
  # Check empty string. If it's not empty, someone has a real "additionalProperties"
1480
1488
  return None
1481
1489
  if isinstance(data, ET.Element):
1482
1490
  data = {el.tag: el.text for el in data}
1483
1491
 
1484
- known_keys = {_decode_attribute_map_key(_FLATTEN.split(desc['key'])[0])
1485
- for desc in attribute_map.values() if desc['key'] != ''}
1492
+ known_keys = {
1493
+ _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0])
1494
+ for desc in attribute_map.values()
1495
+ if desc["key"] != ""
1496
+ }
1486
1497
  present_keys = set(data.keys())
1487
1498
  missing_keys = present_keys - known_keys
1488
1499
  return {key: data[key] for key in missing_keys}
@@ -1493,22 +1504,24 @@ class Deserializer(object):
1493
1504
  Once classification has been determined, initialize object.
1494
1505
 
1495
1506
  :param str target: The target object type to deserialize to.
1496
- :param str/dict data: The response data to deseralize.
1507
+ :param str/dict data: The response data to deserialize.
1508
+ :return: The classified target object and its class name.
1509
+ :rtype: tuple
1497
1510
  """
1498
1511
  if target is None:
1499
1512
  return None, None
1500
1513
 
1501
- if isinstance(target, basestring):
1514
+ if isinstance(target, str):
1502
1515
  try:
1503
1516
  target = self.dependencies[target]
1504
1517
  except KeyError:
1505
1518
  return target, target
1506
1519
 
1507
1520
  try:
1508
- target = target._classify(data, self.dependencies)
1521
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
1509
1522
  except AttributeError:
1510
1523
  pass # Target is not a Model, no classify
1511
- return target, target.__class__.__name__
1524
+ return target, target.__class__.__name__ # type: ignore
1512
1525
 
1513
1526
  def failsafe_deserialize(self, target_obj, data, content_type=None):
1514
1527
  """Ignores any errors encountered in deserialization,
@@ -1518,15 +1531,16 @@ class Deserializer(object):
1518
1531
  a deserialization error.
1519
1532
 
1520
1533
  :param str target_obj: The target object type to deserialize to.
1521
- :param str/dict data: The response data to deseralize.
1534
+ :param str/dict data: The response data to deserialize.
1522
1535
  :param str content_type: Swagger "produces" if available.
1536
+ :return: Deserialized object.
1537
+ :rtype: object
1523
1538
  """
1524
1539
  try:
1525
1540
  return self(target_obj, data, content_type=content_type)
1526
- except:
1527
- _LOGGER.warning(
1528
- "Ran into a deserialization error. Ignoring since this is failsafe deserialization",
1529
- exc_info=True
1541
+ except: # pylint: disable=bare-except
1542
+ _LOGGER.debug(
1543
+ "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
1530
1544
  )
1531
1545
  return None
1532
1546
 
@@ -1542,10 +1556,12 @@ class Deserializer(object):
1542
1556
 
1543
1557
  If raw_data is something else, bypass all logic and return it directly.
1544
1558
 
1545
- :param raw_data: Data to be processed.
1546
- :param content_type: How to parse if raw_data is a string/bytes.
1559
+ :param obj raw_data: Data to be processed.
1560
+ :param str content_type: How to parse if raw_data is a string/bytes.
1547
1561
  :raises JSONDecodeError: If JSON is requested and parsing is impossible.
1548
1562
  :raises UnicodeDecodeError: If bytes is not UTF8
1563
+ :rtype: object
1564
+ :return: Unpacked content.
1549
1565
  """
1550
1566
  # Assume this is enough to detect a Pipeline Response without importing it
1551
1567
  context = getattr(raw_data, "context", {})
@@ -1554,49 +1570,50 @@ class Deserializer(object):
1554
1570
  return context[RawDeserializer.CONTEXT_NAME]
1555
1571
  raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize")
1556
1572
 
1557
- #Assume this is enough to recognize universal_http.ClientResponse without importing it
1573
+ # Assume this is enough to recognize universal_http.ClientResponse without importing it
1558
1574
  if hasattr(raw_data, "body"):
1559
- return RawDeserializer.deserialize_from_http_generics(
1560
- raw_data.text(),
1561
- raw_data.headers
1562
- )
1575
+ return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers)
1563
1576
 
1564
1577
  # Assume this enough to recognize requests.Response without importing it.
1565
- if hasattr(raw_data, '_content_consumed'):
1566
- return RawDeserializer.deserialize_from_http_generics(
1567
- raw_data.text,
1568
- raw_data.headers
1569
- )
1578
+ if hasattr(raw_data, "_content_consumed"):
1579
+ return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers)
1570
1580
 
1571
- if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, 'read'):
1572
- return RawDeserializer.deserialize_from_text(raw_data, content_type)
1581
+ if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"):
1582
+ return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore
1573
1583
  return raw_data
1574
1584
 
1575
1585
  def _instantiate_model(self, response, attrs, additional_properties=None):
1576
1586
  """Instantiate a response model passing in deserialized args.
1577
1587
 
1578
- :param response: The response model class.
1579
- :param d_attrs: The deserialized response attributes.
1588
+ :param Response response: The response model class.
1589
+ :param dict attrs: The deserialized response attributes.
1590
+ :param dict additional_properties: Additional properties to be set.
1591
+ :rtype: Response
1592
+ :return: The instantiated response model.
1580
1593
  """
1581
1594
  if callable(response):
1582
- subtype = getattr(response, '_subtype_map', {})
1595
+ subtype = getattr(response, "_subtype_map", {})
1583
1596
  try:
1584
- readonly = [k for k, v in response._validation.items()
1585
- if v.get('readonly')]
1586
- const = [k for k, v in response._validation.items()
1587
- if v.get('constant')]
1588
- kwargs = {k: v for k, v in attrs.items()
1589
- if k not in subtype and k not in readonly + const}
1597
+ readonly = [
1598
+ k
1599
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
1600
+ if v.get("readonly")
1601
+ ]
1602
+ const = [
1603
+ k
1604
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
1605
+ if v.get("constant")
1606
+ ]
1607
+ kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
1590
1608
  response_obj = response(**kwargs)
1591
1609
  for attr in readonly:
1592
1610
  setattr(response_obj, attr, attrs.get(attr))
1593
1611
  if additional_properties:
1594
- response_obj.additional_properties = additional_properties
1612
+ response_obj.additional_properties = additional_properties # type: ignore
1595
1613
  return response_obj
1596
1614
  except TypeError as err:
1597
- msg = "Unable to deserialize {} into model {}. ".format(
1598
- kwargs, response)
1599
- raise DeserializationError(msg + str(err))
1615
+ msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
1616
+ raise DeserializationError(msg + str(err)) from err
1600
1617
  else:
1601
1618
  try:
1602
1619
  for attr, value in attrs.items():
@@ -1605,15 +1622,16 @@ class Deserializer(object):
1605
1622
  except Exception as exp:
1606
1623
  msg = "Unable to populate response model. "
1607
1624
  msg += "Type: {}, Error: {}".format(type(response), exp)
1608
- raise DeserializationError(msg)
1625
+ raise DeserializationError(msg) from exp
1609
1626
 
1610
- def deserialize_data(self, data, data_type):
1627
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
1611
1628
  """Process data for deserialization according to data type.
1612
1629
 
1613
1630
  :param str data: The response string to be deserialized.
1614
1631
  :param str data_type: The type to deserialize to.
1615
- :raises: DeserializationError if deserialization fails.
1632
+ :raises DeserializationError: if deserialization fails.
1616
1633
  :return: Deserialized object.
1634
+ :rtype: object
1617
1635
  """
1618
1636
  if data is None:
1619
1637
  return data
@@ -1627,7 +1645,11 @@ class Deserializer(object):
1627
1645
  if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
1628
1646
  return data
1629
1647
 
1630
- is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
1648
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
1649
+ "object",
1650
+ "[]",
1651
+ r"{}",
1652
+ ]
1631
1653
  if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
1632
1654
  return None
1633
1655
  data_val = self.deserialize_type[data_type](data)
@@ -1646,26 +1668,23 @@ class Deserializer(object):
1646
1668
  except (ValueError, TypeError, AttributeError) as err:
1647
1669
  msg = "Unable to deserialize response data."
1648
1670
  msg += " Data: {}, {}".format(data, data_type)
1649
- raise_with_traceback(DeserializationError, msg, err)
1650
- else:
1651
- return self._deserialize(obj_type, data)
1671
+ raise DeserializationError(msg) from err
1672
+ return self._deserialize(obj_type, data)
1652
1673
 
1653
1674
  def deserialize_iter(self, attr, iter_type):
1654
1675
  """Deserialize an iterable.
1655
1676
 
1656
1677
  :param list attr: Iterable to be deserialized.
1657
1678
  :param str iter_type: The type of object in the iterable.
1679
+ :return: Deserialized iterable.
1658
1680
  :rtype: list
1659
1681
  """
1660
1682
  if attr is None:
1661
1683
  return None
1662
- if isinstance(attr, ET.Element): # If I receive an element here, get the children
1684
+ if isinstance(attr, ET.Element): # If I receive an element here, get the children
1663
1685
  attr = list(attr)
1664
1686
  if not isinstance(attr, (list, set)):
1665
- raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(
1666
- iter_type,
1667
- type(attr)
1668
- ))
1687
+ raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr)))
1669
1688
  return [self.deserialize_data(a, iter_type) for a in attr]
1670
1689
 
1671
1690
  def deserialize_dict(self, attr, dict_type):
@@ -1674,31 +1693,33 @@ class Deserializer(object):
1674
1693
  :param dict/list attr: Dictionary to be deserialized. Also accepts
1675
1694
  a list of key, value pairs.
1676
1695
  :param str dict_type: The object type of the items in the dictionary.
1696
+ :return: Deserialized dictionary.
1677
1697
  :rtype: dict
1678
1698
  """
1679
1699
  if isinstance(attr, list):
1680
- return {x['key']: self.deserialize_data(x['value'], dict_type) for x in attr}
1700
+ return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr}
1681
1701
 
1682
1702
  if isinstance(attr, ET.Element):
1683
1703
  # Transform <Key>value</Key> into {"Key": "value"}
1684
1704
  attr = {el.tag: el.text for el in attr}
1685
1705
  return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
1686
1706
 
1687
- def deserialize_object(self, attr, **kwargs):
1707
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
1688
1708
  """Deserialize a generic object.
1689
1709
  This will be handled as a dictionary.
1690
1710
 
1691
1711
  :param dict attr: Dictionary to be deserialized.
1712
+ :return: Deserialized object.
1692
1713
  :rtype: dict
1693
- :raises: TypeError if non-builtin datatype encountered.
1714
+ :raises TypeError: if non-builtin datatype encountered.
1694
1715
  """
1695
1716
  if attr is None:
1696
1717
  return None
1697
1718
  if isinstance(attr, ET.Element):
1698
1719
  # Do no recurse on XML, just return the tree as-is
1699
1720
  return attr
1700
- if isinstance(attr, basestring):
1701
- return self.deserialize_basic(attr, 'str')
1721
+ if isinstance(attr, str):
1722
+ return self.deserialize_basic(attr, "str")
1702
1723
  obj_type = type(attr)
1703
1724
  if obj_type in self.basic_types:
1704
1725
  return self.deserialize_basic(attr, self.basic_types[obj_type])
@@ -1709,8 +1730,7 @@ class Deserializer(object):
1709
1730
  deserialized = {}
1710
1731
  for key, value in attr.items():
1711
1732
  try:
1712
- deserialized[key] = self.deserialize_object(
1713
- value, **kwargs)
1733
+ deserialized[key] = self.deserialize_object(value, **kwargs)
1714
1734
  except ValueError:
1715
1735
  deserialized[key] = None
1716
1736
  return deserialized
@@ -1719,17 +1739,15 @@ class Deserializer(object):
1719
1739
  deserialized = []
1720
1740
  for obj in attr:
1721
1741
  try:
1722
- deserialized.append(self.deserialize_object(
1723
- obj, **kwargs))
1742
+ deserialized.append(self.deserialize_object(obj, **kwargs))
1724
1743
  except ValueError:
1725
1744
  pass
1726
1745
  return deserialized
1727
1746
 
1728
- else:
1729
- error = "Cannot deserialize generic object with type: "
1730
- raise TypeError(error + str(obj_type))
1747
+ error = "Cannot deserialize generic object with type: "
1748
+ raise TypeError(error + str(obj_type))
1731
1749
 
1732
- def deserialize_basic(self, attr, data_type):
1750
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
1733
1751
  """Deserialize basic builtin data type from string.
1734
1752
  Will attempt to convert to str, int, float and bool.
1735
1753
  This function will also accept '1', '0', 'true' and 'false' as
@@ -1737,8 +1755,9 @@ class Deserializer(object):
1737
1755
 
1738
1756
  :param str attr: response string to be deserialized.
1739
1757
  :param str data_type: deserialization data type.
1758
+ :return: Deserialized basic type.
1740
1759
  :rtype: str, int, float or bool
1741
- :raises: TypeError if string format is not valid.
1760
+ :raises TypeError: if string format is not valid.
1742
1761
  """
1743
1762
  # If we're here, data is supposed to be a basic type.
1744
1763
  # If it's still an XML node, take the text
@@ -1747,25 +1766,24 @@ class Deserializer(object):
1747
1766
  if not attr:
1748
1767
  if data_type == "str":
1749
1768
  # None or '', node <a/> is empty string.
1750
- return ''
1751
- else:
1752
- # None or '', node <a/> with a strong type is None.
1753
- # Don't try to model "empty bool" or "empty int"
1754
- return None
1769
+ return ""
1770
+ # None or '', node <a/> with a strong type is None.
1771
+ # Don't try to model "empty bool" or "empty int"
1772
+ return None
1755
1773
 
1756
- if data_type == 'bool':
1774
+ if data_type == "bool":
1757
1775
  if attr in [True, False, 1, 0]:
1758
1776
  return bool(attr)
1759
- elif isinstance(attr, basestring):
1760
- if attr.lower() in ['true', '1']:
1777
+ if isinstance(attr, str):
1778
+ if attr.lower() in ["true", "1"]:
1761
1779
  return True
1762
- elif attr.lower() in ['false', '0']:
1780
+ if attr.lower() in ["false", "0"]:
1763
1781
  return False
1764
1782
  raise TypeError("Invalid boolean value: {}".format(attr))
1765
1783
 
1766
- if data_type == 'str':
1784
+ if data_type == "str":
1767
1785
  return self.deserialize_unicode(attr)
1768
- return eval(data_type)(attr) # nosec
1786
+ return eval(data_type)(attr) # nosec # pylint: disable=eval-used
1769
1787
 
1770
1788
  @staticmethod
1771
1789
  def deserialize_unicode(data):
@@ -1773,6 +1791,7 @@ class Deserializer(object):
1773
1791
  as a string.
1774
1792
 
1775
1793
  :param str data: response string to be deserialized.
1794
+ :return: Deserialized string.
1776
1795
  :rtype: str or unicode
1777
1796
  """
1778
1797
  # We might be here because we have an enum modeled as string,
@@ -1782,12 +1801,11 @@ class Deserializer(object):
1782
1801
 
1783
1802
  # Consider this is real string
1784
1803
  try:
1785
- if isinstance(data, unicode):
1804
+ if isinstance(data, unicode): # type: ignore
1786
1805
  return data
1787
1806
  except NameError:
1788
1807
  return str(data)
1789
- else:
1790
- return str(data)
1808
+ return str(data)
1791
1809
 
1792
1810
  @staticmethod
1793
1811
  def deserialize_enum(data, enum_obj):
@@ -1799,6 +1817,7 @@ class Deserializer(object):
1799
1817
  :param str data: Response string to be deserialized. If this value is
1800
1818
  None or invalid it will be returned as-is.
1801
1819
  :param Enum enum_obj: Enum object to deserialize to.
1820
+ :return: Deserialized enum object.
1802
1821
  :rtype: Enum
1803
1822
  """
1804
1823
  if isinstance(data, enum_obj) or data is None:
@@ -1807,12 +1826,11 @@ class Deserializer(object):
1807
1826
  data = data.value
1808
1827
  if isinstance(data, int):
1809
1828
  # Workaround. We might consider remove it in the future.
1810
- # https://github.com/Azure/azure-rest-api-specs/issues/141
1811
1829
  try:
1812
1830
  return list(enum_obj.__members__.values())[data]
1813
- except IndexError:
1831
+ except IndexError as exc:
1814
1832
  error = "{!r} is not a valid index for enum {!r}"
1815
- raise DeserializationError(error.format(data, enum_obj))
1833
+ raise DeserializationError(error.format(data, enum_obj)) from exc
1816
1834
  try:
1817
1835
  return enum_obj(str(data))
1818
1836
  except ValueError:
@@ -1828,26 +1846,28 @@ class Deserializer(object):
1828
1846
  """Deserialize string into bytearray.
1829
1847
 
1830
1848
  :param str attr: response string to be deserialized.
1849
+ :return: Deserialized bytearray
1831
1850
  :rtype: bytearray
1832
- :raises: TypeError if string format invalid.
1851
+ :raises TypeError: if string format invalid.
1833
1852
  """
1834
1853
  if isinstance(attr, ET.Element):
1835
1854
  attr = attr.text
1836
- return bytearray(b64decode(attr))
1855
+ return bytearray(b64decode(attr)) # type: ignore
1837
1856
 
1838
1857
  @staticmethod
1839
1858
  def deserialize_base64(attr):
1840
1859
  """Deserialize base64 encoded string into string.
1841
1860
 
1842
1861
  :param str attr: response string to be deserialized.
1862
+ :return: Deserialized base64 string
1843
1863
  :rtype: bytearray
1844
- :raises: TypeError if string format invalid.
1864
+ :raises TypeError: if string format invalid.
1845
1865
  """
1846
1866
  if isinstance(attr, ET.Element):
1847
1867
  attr = attr.text
1848
- padding = '=' * (3 - (len(attr) + 3) % 4)
1849
- attr = attr + padding
1850
- encoded = attr.replace('-', '+').replace('_', '/')
1868
+ padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore
1869
+ attr = attr + padding # type: ignore
1870
+ encoded = attr.replace("-", "+").replace("_", "/")
1851
1871
  return b64decode(encoded)
1852
1872
 
1853
1873
  @staticmethod
@@ -1855,73 +1875,77 @@ class Deserializer(object):
1855
1875
  """Deserialize string into Decimal object.
1856
1876
 
1857
1877
  :param str attr: response string to be deserialized.
1858
- :rtype: Decimal
1859
- :raises: DeserializationError if string format invalid.
1878
+ :return: Deserialized decimal
1879
+ :raises DeserializationError: if string format invalid.
1880
+ :rtype: decimal
1860
1881
  """
1861
1882
  if isinstance(attr, ET.Element):
1862
1883
  attr = attr.text
1863
1884
  try:
1864
- return decimal.Decimal(attr)
1885
+ return decimal.Decimal(str(attr)) # type: ignore
1865
1886
  except decimal.DecimalException as err:
1866
1887
  msg = "Invalid decimal {}".format(attr)
1867
- raise_with_traceback(DeserializationError, msg, err)
1888
+ raise DeserializationError(msg) from err
1868
1889
 
1869
1890
  @staticmethod
1870
1891
  def deserialize_long(attr):
1871
1892
  """Deserialize string into long (Py2) or int (Py3).
1872
1893
 
1873
1894
  :param str attr: response string to be deserialized.
1895
+ :return: Deserialized int
1874
1896
  :rtype: long or int
1875
- :raises: ValueError if string format invalid.
1897
+ :raises ValueError: if string format invalid.
1876
1898
  """
1877
1899
  if isinstance(attr, ET.Element):
1878
1900
  attr = attr.text
1879
- return _long_type(attr)
1901
+ return _long_type(attr) # type: ignore
1880
1902
 
1881
1903
  @staticmethod
1882
1904
  def deserialize_duration(attr):
1883
1905
  """Deserialize ISO-8601 formatted string into TimeDelta object.
1884
1906
 
1885
1907
  :param str attr: response string to be deserialized.
1908
+ :return: Deserialized duration
1886
1909
  :rtype: TimeDelta
1887
- :raises: DeserializationError if string format invalid.
1910
+ :raises DeserializationError: if string format invalid.
1888
1911
  """
1889
1912
  if isinstance(attr, ET.Element):
1890
1913
  attr = attr.text
1891
1914
  try:
1892
1915
  duration = isodate.parse_duration(attr)
1893
- except(ValueError, OverflowError, AttributeError) as err:
1916
+ except (ValueError, OverflowError, AttributeError) as err:
1894
1917
  msg = "Cannot deserialize duration object."
1895
- raise_with_traceback(DeserializationError, msg, err)
1896
- else:
1897
- return duration
1918
+ raise DeserializationError(msg) from err
1919
+ return duration
1898
1920
 
1899
1921
  @staticmethod
1900
1922
  def deserialize_date(attr):
1901
1923
  """Deserialize ISO-8601 formatted string into Date object.
1902
1924
 
1903
1925
  :param str attr: response string to be deserialized.
1926
+ :return: Deserialized date
1904
1927
  :rtype: Date
1905
- :raises: DeserializationError if string format invalid.
1928
+ :raises DeserializationError: if string format invalid.
1906
1929
  """
1907
1930
  if isinstance(attr, ET.Element):
1908
1931
  attr = attr.text
1909
- if re.search(r"[^\W\d_]", attr, re.I + re.U):
1932
+ if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
1910
1933
  raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
1911
1934
  # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.
1912
- return isodate.parse_date(attr, defaultmonth=None, defaultday=None)
1935
+ return isodate.parse_date(attr, defaultmonth=0, defaultday=0)
1913
1936
 
1914
1937
  @staticmethod
1915
1938
  def deserialize_time(attr):
1916
1939
  """Deserialize ISO-8601 formatted string into time object.
1917
1940
 
1918
1941
  :param str attr: response string to be deserialized.
1942
+ :return: Deserialized time
1919
1943
  :rtype: datetime.time
1920
- :raises: DeserializationError if string format invalid.
1944
+ :raises DeserializationError: if string format invalid.
1921
1945
  """
1922
1946
  if isinstance(attr, ET.Element):
1923
1947
  attr = attr.text
1924
- if re.search(r"[^\W\d_]", attr, re.I + re.U):
1948
+ if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
1925
1949
  raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
1926
1950
  return isodate.parse_time(attr)
1927
1951
 
@@ -1930,42 +1954,42 @@ class Deserializer(object):
1930
1954
  """Deserialize RFC-1123 formatted string into Datetime object.
1931
1955
 
1932
1956
  :param str attr: response string to be deserialized.
1957
+ :return: Deserialized RFC datetime
1933
1958
  :rtype: Datetime
1934
- :raises: DeserializationError if string format invalid.
1959
+ :raises DeserializationError: if string format invalid.
1935
1960
  """
1936
1961
  if isinstance(attr, ET.Element):
1937
1962
  attr = attr.text
1938
1963
  try:
1939
- parsed_date = email.utils.parsedate_tz(attr)
1964
+ parsed_date = email.utils.parsedate_tz(attr) # type: ignore
1940
1965
  date_obj = datetime.datetime(
1941
- *parsed_date[:6],
1942
- tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0)/60))
1966
+ *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60))
1943
1967
  )
1944
1968
  if not date_obj.tzinfo:
1945
1969
  date_obj = date_obj.astimezone(tz=TZ_UTC)
1946
1970
  except ValueError as err:
1947
1971
  msg = "Cannot deserialize to rfc datetime object."
1948
- raise_with_traceback(DeserializationError, msg, err)
1949
- else:
1950
- return date_obj
1972
+ raise DeserializationError(msg) from err
1973
+ return date_obj
1951
1974
 
1952
1975
  @staticmethod
1953
1976
  def deserialize_iso(attr):
1954
1977
  """Deserialize ISO-8601 formatted string into Datetime object.
1955
1978
 
1956
1979
  :param str attr: response string to be deserialized.
1980
+ :return: Deserialized ISO datetime
1957
1981
  :rtype: Datetime
1958
- :raises: DeserializationError if string format invalid.
1982
+ :raises DeserializationError: if string format invalid.
1959
1983
  """
1960
1984
  if isinstance(attr, ET.Element):
1961
1985
  attr = attr.text
1962
1986
  try:
1963
- attr = attr.upper()
1987
+ attr = attr.upper() # type: ignore
1964
1988
  match = Deserializer.valid_date.match(attr)
1965
1989
  if not match:
1966
1990
  raise ValueError("Invalid datetime string: " + attr)
1967
1991
 
1968
- check_decimal = attr.split('.')
1992
+ check_decimal = attr.split(".")
1969
1993
  if len(check_decimal) > 1:
1970
1994
  decimal_str = ""
1971
1995
  for digit in check_decimal[1]:
@@ -1980,11 +2004,10 @@ class Deserializer(object):
1980
2004
  test_utc = date_obj.utctimetuple()
1981
2005
  if test_utc.tm_year > 9999 or test_utc.tm_year < 1:
1982
2006
  raise OverflowError("Hit max or min date")
1983
- except(ValueError, OverflowError, AttributeError) as err:
2007
+ except (ValueError, OverflowError, AttributeError) as err:
1984
2008
  msg = "Cannot deserialize datetime object."
1985
- raise_with_traceback(DeserializationError, msg, err)
1986
- else:
1987
- return date_obj
2009
+ raise DeserializationError(msg) from err
2010
+ return date_obj
1988
2011
 
1989
2012
  @staticmethod
1990
2013
  def deserialize_unix(attr):
@@ -1992,15 +2015,16 @@ class Deserializer(object):
1992
2015
  This is represented as seconds.
1993
2016
 
1994
2017
  :param int attr: Object to be serialized.
2018
+ :return: Deserialized datetime
1995
2019
  :rtype: Datetime
1996
- :raises: DeserializationError if format invalid
2020
+ :raises DeserializationError: if format invalid
1997
2021
  """
1998
2022
  if isinstance(attr, ET.Element):
1999
- attr = int(attr.text)
2023
+ attr = int(attr.text) # type: ignore
2000
2024
  try:
2025
+ attr = int(attr)
2001
2026
  date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC)
2002
2027
  except ValueError as err:
2003
2028
  msg = "Cannot deserialize to unix datetime object."
2004
- raise_with_traceback(DeserializationError, msg, err)
2005
- else:
2006
- return date_obj
2029
+ raise DeserializationError(msg) from err
2030
+ return date_obj