@autorest/python 6.12.4 → 6.13.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/autorest/codegen/models/__init__.py +2 -2
- package/autorest/codegen/models/base_builder.py +19 -6
- package/autorest/codegen/models/code_model.py +1 -21
- package/autorest/codegen/models/combined_type.py +17 -2
- package/autorest/codegen/models/model_type.py +22 -0
- package/autorest/codegen/models/operation.py +3 -2
- package/autorest/codegen/models/property.py +17 -7
- package/autorest/codegen/models/request_builder.py +1 -1
- package/autorest/codegen/serializers/model_serializer.py +41 -4
- package/autorest/codegen/templates/model_base.py.jinja2 +42 -20
- package/autorest/codegen/templates/model_dpg.py.jinja2 +27 -4
- package/autorest/codegen/templates/model_msrest.py.jinja2 +3 -1
- package/autorest/codegen/templates/packaging_templates/setup.py.jinja2 +3 -0
- package/autorest/codegen/templates/serialization.py.jinja2 +12 -19
- package/autorest/m4reformatter/__init__.py +4 -0
- package/package.json +1 -1
|
@@ -133,9 +133,9 @@ TYPE_TO_OBJECT = {
|
|
|
133
133
|
"binary": BinaryType,
|
|
134
134
|
"any": AnyType,
|
|
135
135
|
"datetime": DatetimeType,
|
|
136
|
-
"
|
|
136
|
+
"plainTime": TimeType,
|
|
137
137
|
"duration": DurationType,
|
|
138
|
-
"
|
|
138
|
+
"plainDate": DateType,
|
|
139
139
|
"bytes": ByteArraySchema,
|
|
140
140
|
"boolean": BooleanType,
|
|
141
141
|
"combined": CombinedType,
|
|
@@ -4,7 +4,17 @@
|
|
|
4
4
|
# license information.
|
|
5
5
|
# --------------------------------------------------------------------------
|
|
6
6
|
import logging
|
|
7
|
-
from typing import
|
|
7
|
+
from typing import (
|
|
8
|
+
List,
|
|
9
|
+
Dict,
|
|
10
|
+
Any,
|
|
11
|
+
Generic,
|
|
12
|
+
TypeVar,
|
|
13
|
+
Optional,
|
|
14
|
+
Union,
|
|
15
|
+
TYPE_CHECKING,
|
|
16
|
+
cast,
|
|
17
|
+
)
|
|
8
18
|
from abc import abstractmethod
|
|
9
19
|
|
|
10
20
|
from .base import BaseModel
|
|
@@ -28,11 +38,16 @@ if TYPE_CHECKING:
|
|
|
28
38
|
from .operation import Operation
|
|
29
39
|
from .request_builder import RequestBuilder
|
|
30
40
|
|
|
41
|
+
|
|
42
|
+
OverloadListType = TypeVar(
|
|
43
|
+
"OverloadListType", bound=Union[List["Operation"], List["RequestBuilder"]]
|
|
44
|
+
)
|
|
45
|
+
|
|
31
46
|
_LOGGER = logging.getLogger(__name__)
|
|
32
47
|
|
|
33
48
|
|
|
34
49
|
class BaseBuilder(
|
|
35
|
-
Generic[ParameterListType], BaseModel
|
|
50
|
+
Generic[ParameterListType, OverloadListType], BaseModel
|
|
36
51
|
): # pylint: disable=too-many-instance-attributes
|
|
37
52
|
"""Base class for Operations and Request Builders"""
|
|
38
53
|
|
|
@@ -44,16 +59,14 @@ class BaseBuilder(
|
|
|
44
59
|
name: str,
|
|
45
60
|
parameters: ParameterListType,
|
|
46
61
|
*,
|
|
47
|
-
overloads=None,
|
|
62
|
+
overloads: Optional[OverloadListType] = None,
|
|
48
63
|
) -> None:
|
|
49
64
|
super().__init__(yaml_data=yaml_data, code_model=code_model)
|
|
50
65
|
self.client = client
|
|
51
66
|
self.name = name
|
|
52
67
|
self._description: str = yaml_data.get("description", "")
|
|
53
68
|
self.parameters = parameters
|
|
54
|
-
self.overloads
|
|
55
|
-
overloads or []
|
|
56
|
-
)
|
|
69
|
+
self.overloads = overloads or cast(OverloadListType, [])
|
|
57
70
|
self._summary: str = yaml_data.get("summary", "")
|
|
58
71
|
self.want_tracing: bool = yaml_data.get("wantTracing", True)
|
|
59
72
|
self.group_name: str = yaml_data[
|
|
@@ -11,7 +11,6 @@ from .model_type import ModelType
|
|
|
11
11
|
from .combined_type import CombinedType
|
|
12
12
|
from .client import Client
|
|
13
13
|
from .request_builder import RequestBuilder, OverloadedRequestBuilder
|
|
14
|
-
from .constant_type import ConstantType
|
|
15
14
|
|
|
16
15
|
|
|
17
16
|
def _is_legacy(options) -> bool:
|
|
@@ -279,25 +278,6 @@ class CodeModel: # pylint: disable=too-many-public-methods, disable=too-many-in
|
|
|
279
278
|
|
|
280
279
|
@property
|
|
281
280
|
def need_typing_extensions(self) -> bool:
|
|
282
|
-
if self.options["models_mode"]
|
|
283
|
-
isinstance(p.type, ConstantType)
|
|
284
|
-
and (p.optional or self.options["models_mode"] == "dpg")
|
|
285
|
-
for model in self.model_types
|
|
286
|
-
for p in model.properties
|
|
287
|
-
):
|
|
288
|
-
return True
|
|
289
|
-
if any(
|
|
290
|
-
isinstance(parameter.type, ConstantType)
|
|
291
|
-
for client in self.clients
|
|
292
|
-
for og in client.operation_groups
|
|
293
|
-
for op in og.operations
|
|
294
|
-
for parameter in op.parameters.method
|
|
295
|
-
):
|
|
296
|
-
return True
|
|
297
|
-
if any(
|
|
298
|
-
isinstance(parameter.type, ConstantType)
|
|
299
|
-
for client in self.clients
|
|
300
|
-
for parameter in client.config.parameters.kwargs_to_pop
|
|
301
|
-
):
|
|
281
|
+
if self.options["models_mode"] == "dpg":
|
|
302
282
|
return True
|
|
303
283
|
return False
|
|
@@ -29,6 +29,7 @@ class CombinedType(BaseType):
|
|
|
29
29
|
super().__init__(yaml_data, code_model)
|
|
30
30
|
self.types = types # the types that this type is combining
|
|
31
31
|
self.name = yaml_data.get("name")
|
|
32
|
+
self._is_union_of_literals = all(i.type == "constant" for i in self.types)
|
|
32
33
|
|
|
33
34
|
@property
|
|
34
35
|
def serialization_type(self) -> str:
|
|
@@ -73,7 +74,20 @@ class CombinedType(BaseType):
|
|
|
73
74
|
|
|
74
75
|
Special case for enum, for instance: Union[str, "EnumName"]
|
|
75
76
|
"""
|
|
76
|
-
|
|
77
|
+
# remove duplicates
|
|
78
|
+
inside_types = list(
|
|
79
|
+
dict.fromkeys([type.type_annotation(**kwargs) for type in self.types])
|
|
80
|
+
)
|
|
81
|
+
if len(inside_types) == 1:
|
|
82
|
+
return inside_types[0]
|
|
83
|
+
if self._is_union_of_literals:
|
|
84
|
+
parsed_values = []
|
|
85
|
+
for entry in inside_types:
|
|
86
|
+
match = re.search(r"Literal\[(.*)\]", entry)
|
|
87
|
+
if match is not None:
|
|
88
|
+
parsed_values.append(match.group(1))
|
|
89
|
+
join_string = ", ".join(parsed_values)
|
|
90
|
+
return f"Literal[{join_string}]"
|
|
77
91
|
|
|
78
92
|
# If the inside types has been a Union, peel first and then re-union
|
|
79
93
|
pattern = re.compile(r"Union\[.*\]")
|
|
@@ -116,7 +130,8 @@ class CombinedType(BaseType):
|
|
|
116
130
|
return file_import
|
|
117
131
|
for type in self.types:
|
|
118
132
|
file_import.merge(type.imports(**kwargs))
|
|
119
|
-
|
|
133
|
+
if not self._is_union_of_literals:
|
|
134
|
+
file_import.add_submodule_import("typing", "Union", ImportType.STDLIB)
|
|
120
135
|
return file_import
|
|
121
136
|
|
|
122
137
|
@classmethod
|
|
@@ -76,6 +76,22 @@ class ModelType( # pylint: disable=abstract-method
|
|
|
76
76
|
self.snake_case_name: str = self.yaml_data["snakeCaseName"]
|
|
77
77
|
self.page_result_model: bool = self.yaml_data.get("pageResultModel", False)
|
|
78
78
|
|
|
79
|
+
@property
|
|
80
|
+
def flattened_property(self) -> Optional[Property]:
|
|
81
|
+
try:
|
|
82
|
+
return next(p for p in self.properties if p.flatten)
|
|
83
|
+
except StopIteration:
|
|
84
|
+
return None
|
|
85
|
+
|
|
86
|
+
@property
|
|
87
|
+
def flattened_items(self) -> List[str]:
|
|
88
|
+
return [
|
|
89
|
+
item.client_name
|
|
90
|
+
for prop in self.properties
|
|
91
|
+
if isinstance(prop.type, ModelType) and prop.flatten
|
|
92
|
+
for item in prop.type.properties
|
|
93
|
+
]
|
|
94
|
+
|
|
79
95
|
@property
|
|
80
96
|
def is_form_data(self) -> bool:
|
|
81
97
|
return any(p.is_multipart_file_input for p in self.properties)
|
|
@@ -360,3 +376,9 @@ class DPGModelType(GeneratedModelType):
|
|
|
360
376
|
@property
|
|
361
377
|
def instance_check_template(self) -> str:
|
|
362
378
|
return "isinstance({}, _model_base.Model)"
|
|
379
|
+
|
|
380
|
+
def imports(self, **kwargs: Any) -> FileImport:
|
|
381
|
+
file_import = super().imports(**kwargs)
|
|
382
|
+
if self.flattened_property:
|
|
383
|
+
file_import.add_submodule_import("typing", "Any", ImportType.STDLIB)
|
|
384
|
+
return file_import
|
|
@@ -14,6 +14,7 @@ from typing import (
|
|
|
14
14
|
Generic,
|
|
15
15
|
TypeVar,
|
|
16
16
|
cast,
|
|
17
|
+
Sequence,
|
|
17
18
|
)
|
|
18
19
|
|
|
19
20
|
from .request_builder_parameter import RequestBuilderParameter
|
|
@@ -54,7 +55,7 @@ def is_internal(target: Optional[BaseType]) -> bool:
|
|
|
54
55
|
|
|
55
56
|
|
|
56
57
|
class OperationBase( # pylint: disable=too-many-public-methods
|
|
57
|
-
Generic[ResponseType], BaseBuilder[ParameterList]
|
|
58
|
+
Generic[ResponseType], BaseBuilder[ParameterList, List["Operation"]]
|
|
58
59
|
):
|
|
59
60
|
def __init__(
|
|
60
61
|
self,
|
|
@@ -490,7 +491,7 @@ class OperationBase( # pylint: disable=too-many-public-methods
|
|
|
490
491
|
) from exc
|
|
491
492
|
|
|
492
493
|
@property
|
|
493
|
-
def success_status_codes(self) ->
|
|
494
|
+
def success_status_codes(self) -> Sequence[Union[str, int]]:
|
|
494
495
|
"""The list of all successfull status code."""
|
|
495
496
|
return sorted(
|
|
496
497
|
[code for response in self.responses for code in response.status_codes]
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
# Licensed under the MIT License. See License.txt in the project root for
|
|
4
4
|
# license information.
|
|
5
5
|
# --------------------------------------------------------------------------
|
|
6
|
-
from typing import Any, Dict, Optional, TYPE_CHECKING, List
|
|
6
|
+
from typing import Any, Dict, Optional, TYPE_CHECKING, List, cast
|
|
7
7
|
|
|
8
8
|
from .base import BaseModel
|
|
9
9
|
from .constant_type import ConstantType
|
|
@@ -40,6 +40,9 @@ class Property(BaseModel): # pylint: disable=too-many-instance-attributes
|
|
|
40
40
|
self.is_multipart_file_input: bool = yaml_data.get(
|
|
41
41
|
"isMultipartFileInput", False
|
|
42
42
|
)
|
|
43
|
+
self.flatten = self.yaml_data.get("flatten", False) and not getattr(
|
|
44
|
+
self.type, "flattened_property", False
|
|
45
|
+
)
|
|
43
46
|
|
|
44
47
|
@property
|
|
45
48
|
def pylint_disable(self) -> str:
|
|
@@ -96,6 +99,17 @@ class Property(BaseModel): # pylint: disable=too-many-instance-attributes
|
|
|
96
99
|
def is_enum_discriminator(self) -> bool:
|
|
97
100
|
return self.is_discriminator and self.type.type == "enum"
|
|
98
101
|
|
|
102
|
+
@property
|
|
103
|
+
def is_base_discriminator(self) -> bool:
|
|
104
|
+
"""If this discriminator is on the base model for polymorphic inheritance"""
|
|
105
|
+
if self.is_enum_discriminator:
|
|
106
|
+
return self.is_polymorphic and self.client_default_value is None
|
|
107
|
+
return (
|
|
108
|
+
self.is_discriminator
|
|
109
|
+
and self.is_polymorphic
|
|
110
|
+
and cast(ConstantType, self.type).value is None
|
|
111
|
+
)
|
|
112
|
+
|
|
99
113
|
def type_annotation(self, *, is_operation_file: bool = False) -> str:
|
|
100
114
|
types_type_annotation = self.type.type_annotation(
|
|
101
115
|
is_operation_file=is_operation_file
|
|
@@ -103,17 +117,13 @@ class Property(BaseModel): # pylint: disable=too-many-instance-attributes
|
|
|
103
117
|
if self.is_multipart_file_input:
|
|
104
118
|
# we only support FileType or list of FileType
|
|
105
119
|
types_type_annotation = types_type_annotation.replace("bytes", "FileType")
|
|
106
|
-
if self.
|
|
107
|
-
|
|
108
|
-
return "Literal[None]"
|
|
120
|
+
if self.is_base_discriminator:
|
|
121
|
+
return "str"
|
|
109
122
|
if self.optional and self.client_default_value is None:
|
|
110
123
|
return f"Optional[{types_type_annotation}]"
|
|
111
124
|
return types_type_annotation
|
|
112
125
|
|
|
113
126
|
def get_declaration(self, value: Any = None) -> Any:
|
|
114
|
-
if self.is_enum_discriminator:
|
|
115
|
-
# here we are the enum discriminator property on the base model
|
|
116
|
-
return None
|
|
117
127
|
return self.type.get_declaration(value)
|
|
118
128
|
|
|
119
129
|
def get_json_template_representation(
|
|
@@ -60,8 +60,8 @@ class _ModelSerializer(BaseSerializer, ABC):
|
|
|
60
60
|
def variable_documentation_string(prop: Property) -> List[str]:
|
|
61
61
|
return _documentation_string(prop, "ivar", "vartype")
|
|
62
62
|
|
|
63
|
-
def super_call(self, model: ModelType):
|
|
64
|
-
return f"super().__init__({self.properties_to_pass_to_super(model)})"
|
|
63
|
+
def super_call(self, model: ModelType) -> List[str]:
|
|
64
|
+
return [f"super().__init__({self.properties_to_pass_to_super(model)})"]
|
|
65
65
|
|
|
66
66
|
@staticmethod
|
|
67
67
|
def initialize_discriminator_property(model: ModelType, prop: Property) -> str:
|
|
@@ -197,6 +197,17 @@ class MsrestModelSerializer(_ModelSerializer):
|
|
|
197
197
|
|
|
198
198
|
|
|
199
199
|
class DpgModelSerializer(_ModelSerializer):
|
|
200
|
+
def super_call(self, model: ModelType) -> List[str]:
|
|
201
|
+
super_call = f"super().__init__({self.properties_to_pass_to_super(model)})"
|
|
202
|
+
if model.flattened_property:
|
|
203
|
+
return [
|
|
204
|
+
"_flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items}",
|
|
205
|
+
super_call,
|
|
206
|
+
"for k, v in _flattened_input.items():",
|
|
207
|
+
" setattr(self, k, v)",
|
|
208
|
+
]
|
|
209
|
+
return [super_call]
|
|
210
|
+
|
|
200
211
|
def imports(self) -> FileImport:
|
|
201
212
|
file_import = FileImport(self.code_model)
|
|
202
213
|
file_import.add_submodule_import(
|
|
@@ -238,6 +249,7 @@ class DpgModelSerializer(_ModelSerializer):
|
|
|
238
249
|
if not any(
|
|
239
250
|
p.client_name == pp.client_name
|
|
240
251
|
and p.type_annotation() == pp.type_annotation()
|
|
252
|
+
and not p.is_base_discriminator
|
|
241
253
|
for pp in parent_properties
|
|
242
254
|
)
|
|
243
255
|
]
|
|
@@ -277,7 +289,7 @@ class DpgModelSerializer(_ModelSerializer):
|
|
|
277
289
|
def initialize_properties(self, model: ModelType) -> List[str]:
|
|
278
290
|
init_args = []
|
|
279
291
|
for prop in self.get_properties_to_declare(model):
|
|
280
|
-
if prop.constant
|
|
292
|
+
if prop.constant and not prop.is_base_discriminator:
|
|
281
293
|
init_args.append(
|
|
282
294
|
f"self.{prop.client_name}: {prop.type_annotation()} = "
|
|
283
295
|
f"{prop.get_declaration()}"
|
|
@@ -289,5 +301,30 @@ class DpgModelSerializer(_ModelSerializer):
|
|
|
289
301
|
return [
|
|
290
302
|
p
|
|
291
303
|
for p in model.properties
|
|
292
|
-
if
|
|
304
|
+
if p.is_base_discriminator
|
|
305
|
+
or not p.is_discriminator
|
|
306
|
+
and not p.constant
|
|
307
|
+
and p.visibility != ["read"]
|
|
293
308
|
]
|
|
309
|
+
|
|
310
|
+
@staticmethod
|
|
311
|
+
def properties_to_pass_to_super(model: ModelType) -> str:
|
|
312
|
+
properties_to_pass_to_super = ["*args"]
|
|
313
|
+
for parent in model.parents:
|
|
314
|
+
for prop in model.properties:
|
|
315
|
+
if (
|
|
316
|
+
prop.client_name
|
|
317
|
+
in [
|
|
318
|
+
prop.client_name
|
|
319
|
+
for prop in parent.properties
|
|
320
|
+
if prop.is_base_discriminator
|
|
321
|
+
]
|
|
322
|
+
and prop.is_discriminator
|
|
323
|
+
and not prop.constant
|
|
324
|
+
and not prop.readonly
|
|
325
|
+
):
|
|
326
|
+
properties_to_pass_to_super.append(
|
|
327
|
+
f"{prop.client_name}={prop.get_declaration()}"
|
|
328
|
+
)
|
|
329
|
+
properties_to_pass_to_super.append("**kwargs")
|
|
330
|
+
return ", ".join(properties_to_pass_to_super)
|
|
@@ -5,7 +5,6 @@
|
|
|
5
5
|
# license information.
|
|
6
6
|
# --------------------------------------------------------------------------
|
|
7
7
|
# pylint: disable=protected-access, arguments-differ, signature-differs, broad-except
|
|
8
|
-
# pyright: reportGeneralTypeIssues=false
|
|
9
8
|
|
|
10
9
|
import calendar
|
|
11
10
|
import decimal
|
|
@@ -16,9 +15,11 @@ import base64
|
|
|
16
15
|
import re
|
|
17
16
|
import copy
|
|
18
17
|
import typing
|
|
18
|
+
import enum
|
|
19
19
|
import email.utils
|
|
20
20
|
from datetime import datetime, date, time, timedelta, timezone
|
|
21
21
|
from json import JSONEncoder
|
|
22
|
+
from typing_extensions import Self
|
|
22
23
|
import isodate
|
|
23
24
|
from {{ code_model.core_library }}.exceptions import DeserializationError
|
|
24
25
|
from {{ code_model.core_library }}{{ ".utils" if code_model.options["unbranded"] else "" }} import CaseInsensitiveEnumMeta
|
|
@@ -35,6 +36,7 @@ _LOGGER = logging.getLogger(__name__)
|
|
|
35
36
|
__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"]
|
|
36
37
|
|
|
37
38
|
TZ_UTC = timezone.utc
|
|
39
|
+
_T = typing.TypeVar("_T")
|
|
38
40
|
|
|
39
41
|
|
|
40
42
|
def _timedelta_as_isostr(td: timedelta) -> str:
|
|
@@ -242,7 +244,7 @@ def _deserialize_date(attr: typing.Union[str, date]) -> date:
|
|
|
242
244
|
# This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.
|
|
243
245
|
if isinstance(attr, date):
|
|
244
246
|
return attr
|
|
245
|
-
return isodate.parse_date(attr, defaultmonth=None, defaultday=None)
|
|
247
|
+
return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore
|
|
246
248
|
|
|
247
249
|
|
|
248
250
|
def _deserialize_time(attr: typing.Union[str, time]) -> time:
|
|
@@ -383,8 +385,12 @@ class _MyMutableMapping(MutableMapping[str, typing.Any]): # pylint: disable=uns
|
|
|
383
385
|
except KeyError:
|
|
384
386
|
return default
|
|
385
387
|
|
|
386
|
-
@typing.overload
|
|
387
|
-
def pop(self, key: str) -> typing.Any:
|
|
388
|
+
@typing.overload
|
|
389
|
+
def pop(self, key: str) -> typing.Any:
|
|
390
|
+
...
|
|
391
|
+
|
|
392
|
+
@typing.overload
|
|
393
|
+
def pop(self, key: str, default: _T) -> _T:
|
|
388
394
|
...
|
|
389
395
|
|
|
390
396
|
@typing.overload
|
|
@@ -405,8 +411,8 @@ class _MyMutableMapping(MutableMapping[str, typing.Any]): # pylint: disable=uns
|
|
|
405
411
|
def update(self, *args: typing.Any, **kwargs: typing.Any) -> None:
|
|
406
412
|
self._data.update(*args, **kwargs)
|
|
407
413
|
|
|
408
|
-
@typing.overload
|
|
409
|
-
def setdefault(self, key: str) ->
|
|
414
|
+
@typing.overload
|
|
415
|
+
def setdefault(self, key: str, default: None = None) -> None:
|
|
410
416
|
...
|
|
411
417
|
|
|
412
418
|
@typing.overload
|
|
@@ -446,6 +452,8 @@ def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-m
|
|
|
446
452
|
return _serialize_bytes(o, format)
|
|
447
453
|
if isinstance(o, decimal.Decimal):
|
|
448
454
|
return float(o)
|
|
455
|
+
if isinstance(o, enum.Enum):
|
|
456
|
+
return o.value
|
|
449
457
|
try:
|
|
450
458
|
# First try datetime.datetime
|
|
451
459
|
return _serialize_datetime(o, format)
|
|
@@ -512,7 +520,7 @@ class Model(_MyMutableMapping):
|
|
|
512
520
|
def copy(self) -> "Model":
|
|
513
521
|
return Model(self.__dict__)
|
|
514
522
|
|
|
515
|
-
def __new__(cls, *args: typing.Any, **kwargs: typing.Any) ->
|
|
523
|
+
def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: # pylint: disable=unused-argument
|
|
516
524
|
# we know the last three classes in mro are going to be 'Model', 'dict', and 'object'
|
|
517
525
|
mros = cls.__mro__[:-3][::-1] # ignore model, dict, and object parents, and reverse the mro order
|
|
518
526
|
attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property
|
|
@@ -554,7 +562,7 @@ class Model(_MyMutableMapping):
|
|
|
554
562
|
exist_discriminators.append(discriminator)
|
|
555
563
|
mapped_cls = cls.__mapping__.get(
|
|
556
564
|
data.get(discriminator), cls
|
|
557
|
-
) # pylint: disable=no-member
|
|
565
|
+
) # pyright: ignore # pylint: disable=no-member
|
|
558
566
|
if mapped_cls == cls:
|
|
559
567
|
return cls(data)
|
|
560
568
|
return mapped_cls._deserialize(data, exist_discriminators) # pylint: disable=protected-access
|
|
@@ -571,7 +579,7 @@ class Model(_MyMutableMapping):
|
|
|
571
579
|
if exclude_readonly:
|
|
572
580
|
readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)]
|
|
573
581
|
for k, v in self.items():
|
|
574
|
-
if exclude_readonly and k in readonly_props: # pyright: ignore
|
|
582
|
+
if exclude_readonly and k in readonly_props: # pyright: ignore
|
|
575
583
|
continue
|
|
576
584
|
is_multipart_file_input = False
|
|
577
585
|
try:
|
|
@@ -630,22 +638,22 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=R0911, R0915,
|
|
|
630
638
|
return obj
|
|
631
639
|
return _deserialize(model_deserializer, obj)
|
|
632
640
|
|
|
633
|
-
return functools.partial(_deserialize_model, annotation)
|
|
641
|
+
return functools.partial(_deserialize_model, annotation) # pyright: ignore
|
|
634
642
|
except Exception:
|
|
635
643
|
pass
|
|
636
644
|
|
|
637
645
|
# is it a literal?
|
|
638
646
|
try:
|
|
639
|
-
if annotation.__origin__ is typing.Literal:
|
|
647
|
+
if annotation.__origin__ is typing.Literal: # pyright: ignore
|
|
640
648
|
return None
|
|
641
649
|
except AttributeError:
|
|
642
650
|
pass
|
|
643
651
|
|
|
644
652
|
# is it optional?
|
|
645
653
|
try:
|
|
646
|
-
if any(a for a in annotation.__args__ if a == type(None)):
|
|
654
|
+
if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore
|
|
647
655
|
if_obj_deserializer = _get_deserialize_callable_from_annotation(
|
|
648
|
-
next(a for a in annotation.__args__ if a != type(None)), module, rf
|
|
656
|
+
next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore
|
|
649
657
|
)
|
|
650
658
|
|
|
651
659
|
def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj):
|
|
@@ -658,7 +666,13 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=R0911, R0915,
|
|
|
658
666
|
pass
|
|
659
667
|
|
|
660
668
|
if getattr(annotation, "__origin__", None) is typing.Union:
|
|
661
|
-
|
|
669
|
+
# initial ordering is we make `string` the last deserialization option, because it is often them most generic
|
|
670
|
+
deserializers = [
|
|
671
|
+
_get_deserialize_callable_from_annotation(arg, module, rf)
|
|
672
|
+
for arg in sorted(
|
|
673
|
+
annotation.__args__, key=lambda x: hasattr(x, "__name__") and x.__name__ == "str" # pyright: ignore
|
|
674
|
+
)
|
|
675
|
+
]
|
|
662
676
|
|
|
663
677
|
def _deserialize_with_union(deserializers, obj):
|
|
664
678
|
for deserializer in deserializers:
|
|
@@ -671,8 +685,10 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=R0911, R0915,
|
|
|
671
685
|
return functools.partial(_deserialize_with_union, deserializers)
|
|
672
686
|
|
|
673
687
|
try:
|
|
674
|
-
if annotation._name == "Dict":
|
|
675
|
-
value_deserializer = _get_deserialize_callable_from_annotation(
|
|
688
|
+
if annotation._name == "Dict": # pyright: ignore
|
|
689
|
+
value_deserializer = _get_deserialize_callable_from_annotation(
|
|
690
|
+
annotation.__args__[1], module, rf # pyright: ignore
|
|
691
|
+
)
|
|
676
692
|
|
|
677
693
|
def _deserialize_dict(
|
|
678
694
|
value_deserializer: typing.Optional[typing.Callable],
|
|
@@ -692,8 +708,8 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=R0911, R0915,
|
|
|
692
708
|
except (AttributeError, IndexError):
|
|
693
709
|
pass
|
|
694
710
|
try:
|
|
695
|
-
if annotation._name in ["List", "Set", "Tuple", "Sequence"]:
|
|
696
|
-
if len(annotation.__args__) > 1:
|
|
711
|
+
if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore
|
|
712
|
+
if len(annotation.__args__) > 1: # pyright: ignore
|
|
697
713
|
|
|
698
714
|
def _deserialize_multiple_sequence(
|
|
699
715
|
entry_deserializers: typing.List[typing.Optional[typing.Callable]],
|
|
@@ -707,10 +723,12 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=R0911, R0915,
|
|
|
707
723
|
)
|
|
708
724
|
|
|
709
725
|
entry_deserializers = [
|
|
710
|
-
_get_deserialize_callable_from_annotation(dt, module, rf) for dt in annotation.__args__
|
|
726
|
+
_get_deserialize_callable_from_annotation(dt, module, rf) for dt in annotation.__args__ # pyright: ignore
|
|
711
727
|
]
|
|
712
728
|
return functools.partial(_deserialize_multiple_sequence, entry_deserializers)
|
|
713
|
-
deserializer = _get_deserialize_callable_from_annotation(
|
|
729
|
+
deserializer = _get_deserialize_callable_from_annotation(
|
|
730
|
+
annotation.__args__[0], module, rf # pyright: ignore
|
|
731
|
+
)
|
|
714
732
|
|
|
715
733
|
def _deserialize_sequence(
|
|
716
734
|
deserializer: typing.Optional[typing.Callable],
|
|
@@ -802,6 +820,10 @@ class _RestField:
|
|
|
802
820
|
self._format = format
|
|
803
821
|
self._is_multipart_file_input = is_multipart_file_input
|
|
804
822
|
|
|
823
|
+
@property
|
|
824
|
+
def _class_type(self) -> typing.Any:
|
|
825
|
+
return getattr(self._type, "args", [None])[0]
|
|
826
|
+
|
|
805
827
|
@property
|
|
806
828
|
def _rest_name(self) -> str:
|
|
807
829
|
if self._rest_name_input is None:
|
|
@@ -39,6 +39,10 @@
|
|
|
39
39
|
{% endif %}
|
|
40
40
|
{% endfor %}
|
|
41
41
|
|
|
42
|
+
{% if code_model.options["models_mode"] == "dpg" and model.flattened_property %}
|
|
43
|
+
__flattened_items = ["{{ model.flattened_items|join('\", \"') }}"]
|
|
44
|
+
{% endif %}
|
|
45
|
+
|
|
42
46
|
{% if not model.internal and serializer.init_line(model) %}
|
|
43
47
|
@overload
|
|
44
48
|
def __init__(
|
|
@@ -60,8 +64,27 @@
|
|
|
60
64
|
{% set initialize_properties = serializer.initialize_properties(model) %}
|
|
61
65
|
{% if not model.internal and serializer.init_line(model) or initialize_properties %}
|
|
62
66
|
def __init__(self, *args: Any, **kwargs: Any) -> None:{{ '# pylint: disable=useless-super-delegation' if not initialize_properties else '' }}
|
|
63
|
-
|
|
64
|
-
|
|
67
|
+
{% for line in serializer.super_call(model) %}
|
|
68
|
+
{{ line }}
|
|
69
|
+
{% endfor %}
|
|
70
|
+
{% for initialize_property in initialize_properties %}
|
|
65
71
|
{{ initialize_property }}
|
|
66
|
-
|
|
67
|
-
|
|
72
|
+
{% endfor %}
|
|
73
|
+
{% if code_model.options["models_mode"] == "dpg" and model.flattened_property %}
|
|
74
|
+
{% set flattened_property_attr = model.flattened_property.client_name %}
|
|
75
|
+
|
|
76
|
+
def __getattr__(self, name: str) -> Any:
|
|
77
|
+
if name in self.__flattened_items:
|
|
78
|
+
if self.{{ flattened_property_attr }} is None: return None
|
|
79
|
+
return getattr(self.{{ flattened_property_attr }}, name)
|
|
80
|
+
raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'")
|
|
81
|
+
|
|
82
|
+
def __setattr__(self, key: str, value: Any) -> None:
|
|
83
|
+
if key in self.__flattened_items:
|
|
84
|
+
if self.{{ flattened_property_attr }} is None:
|
|
85
|
+
self.{{ flattened_property_attr }} = self._attr_to_rest_field["{{ flattened_property_attr }}"]._class_type()
|
|
86
|
+
setattr(self.properties, key, value)
|
|
87
|
+
else:
|
|
88
|
+
super().__setattr__(key, value)
|
|
89
|
+
{% endif %}
|
|
90
|
+
{% endif %}
|
|
@@ -83,7 +83,9 @@
|
|
|
83
83
|
{% endfor %}
|
|
84
84
|
{% endif %}
|
|
85
85
|
"""
|
|
86
|
-
|
|
86
|
+
{% for line in serializer.super_call(model) %}
|
|
87
|
+
{{ line }}
|
|
88
|
+
{% endfor %}
|
|
87
89
|
{% for initialize_property in initialize_properties %}
|
|
88
90
|
{{ initialize_property }}
|
|
89
91
|
{% endfor %}
|
|
@@ -170,13 +170,6 @@ class RawDeserializer:
|
|
|
170
170
|
return None
|
|
171
171
|
|
|
172
172
|
|
|
173
|
-
try:
|
|
174
|
-
basestring # type: ignore
|
|
175
|
-
unicode_str = unicode # type: ignore
|
|
176
|
-
except NameError:
|
|
177
|
-
basestring = str
|
|
178
|
-
unicode_str = str
|
|
179
|
-
|
|
180
173
|
_LOGGER = logging.getLogger(__name__)
|
|
181
174
|
|
|
182
175
|
try:
|
|
@@ -547,7 +540,7 @@ class Serializer(object):
|
|
|
547
540
|
"multiple": lambda x, y: x % y != 0,
|
|
548
541
|
}
|
|
549
542
|
|
|
550
|
-
def __init__(self, classes: Optional[Mapping[str,
|
|
543
|
+
def __init__(self, classes: Optional[Mapping[str, type]]=None):
|
|
551
544
|
self.serialize_type = {
|
|
552
545
|
"iso-8601": Serializer.serialize_iso,
|
|
553
546
|
"rfc-1123": Serializer.serialize_rfc,
|
|
@@ -563,7 +556,7 @@ class Serializer(object):
|
|
|
563
556
|
"[]": self.serialize_iter,
|
|
564
557
|
"{}": self.serialize_dict,
|
|
565
558
|
}
|
|
566
|
-
self.dependencies: Dict[str,
|
|
559
|
+
self.dependencies: Dict[str, type] = dict(classes) if classes else {}
|
|
567
560
|
self.key_transformer = full_restapi_key_transformer
|
|
568
561
|
self.client_side_validation = True
|
|
569
562
|
|
|
@@ -651,7 +644,7 @@ class Serializer(object):
|
|
|
651
644
|
else: # That's a basic type
|
|
652
645
|
# Integrate namespace if necessary
|
|
653
646
|
local_node = _create_xml_node(xml_name, xml_prefix, xml_ns)
|
|
654
|
-
local_node.text =
|
|
647
|
+
local_node.text = str(new_attr)
|
|
655
648
|
serialized.append(local_node) # type: ignore
|
|
656
649
|
else: # JSON
|
|
657
650
|
for k in reversed(keys): # type: ignore
|
|
@@ -1000,7 +993,7 @@ class Serializer(object):
|
|
|
1000
993
|
return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs)
|
|
1001
994
|
if obj_type is _long_type:
|
|
1002
995
|
return self.serialize_long(attr)
|
|
1003
|
-
if obj_type is
|
|
996
|
+
if obj_type is str:
|
|
1004
997
|
return self.serialize_unicode(attr)
|
|
1005
998
|
if obj_type is datetime.datetime:
|
|
1006
999
|
return self.serialize_iso(attr)
|
|
@@ -1376,7 +1369,7 @@ class Deserializer(object):
|
|
|
1376
1369
|
|
|
1377
1370
|
valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
|
|
1378
1371
|
|
|
1379
|
-
def __init__(self, classes: Optional[Mapping[str,
|
|
1372
|
+
def __init__(self, classes: Optional[Mapping[str, type]]=None):
|
|
1380
1373
|
self.deserialize_type = {
|
|
1381
1374
|
"iso-8601": Deserializer.deserialize_iso,
|
|
1382
1375
|
"rfc-1123": Deserializer.deserialize_rfc,
|
|
@@ -1396,7 +1389,7 @@ class Deserializer(object):
|
|
|
1396
1389
|
"duration": (isodate.Duration, datetime.timedelta),
|
|
1397
1390
|
"iso-8601": (datetime.datetime),
|
|
1398
1391
|
}
|
|
1399
|
-
self.dependencies: Dict[str,
|
|
1392
|
+
self.dependencies: Dict[str, type] = dict(classes) if classes else {}
|
|
1400
1393
|
self.key_extractors = [rest_key_extractor, xml_key_extractor]
|
|
1401
1394
|
# Additional properties only works if the "rest_key_extractor" is used to
|
|
1402
1395
|
# extract the keys. Making it to work whatever the key extractor is too much
|
|
@@ -1449,7 +1442,7 @@ class Deserializer(object):
|
|
|
1449
1442
|
|
|
1450
1443
|
response, class_name = self._classify_target(target_obj, data)
|
|
1451
1444
|
|
|
1452
|
-
if isinstance(response,
|
|
1445
|
+
if isinstance(response, str):
|
|
1453
1446
|
return self.deserialize_data(data, response)
|
|
1454
1447
|
elif isinstance(response, type) and issubclass(response, Enum):
|
|
1455
1448
|
return self.deserialize_enum(data, response)
|
|
@@ -1520,14 +1513,14 @@ class Deserializer(object):
|
|
|
1520
1513
|
if target is None:
|
|
1521
1514
|
return None, None
|
|
1522
1515
|
|
|
1523
|
-
if isinstance(target,
|
|
1516
|
+
if isinstance(target, str):
|
|
1524
1517
|
try:
|
|
1525
1518
|
target = self.dependencies[target]
|
|
1526
1519
|
except KeyError:
|
|
1527
1520
|
return target, target
|
|
1528
1521
|
|
|
1529
1522
|
try:
|
|
1530
|
-
target = target._classify(data, self.dependencies)
|
|
1523
|
+
target = target._classify(data, self.dependencies) # type: ignore
|
|
1531
1524
|
except AttributeError:
|
|
1532
1525
|
pass # Target is not a Model, no classify
|
|
1533
1526
|
return target, target.__class__.__name__ # type: ignore
|
|
@@ -1583,7 +1576,7 @@ class Deserializer(object):
|
|
|
1583
1576
|
if hasattr(raw_data, "_content_consumed"):
|
|
1584
1577
|
return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers)
|
|
1585
1578
|
|
|
1586
|
-
if isinstance(raw_data, (
|
|
1579
|
+
if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"):
|
|
1587
1580
|
return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore
|
|
1588
1581
|
return raw_data
|
|
1589
1582
|
|
|
@@ -1705,7 +1698,7 @@ class Deserializer(object):
|
|
|
1705
1698
|
if isinstance(attr, ET.Element):
|
|
1706
1699
|
# Do no recurse on XML, just return the tree as-is
|
|
1707
1700
|
return attr
|
|
1708
|
-
if isinstance(attr,
|
|
1701
|
+
if isinstance(attr, str):
|
|
1709
1702
|
return self.deserialize_basic(attr, "str")
|
|
1710
1703
|
obj_type = type(attr)
|
|
1711
1704
|
if obj_type in self.basic_types:
|
|
@@ -1762,7 +1755,7 @@ class Deserializer(object):
|
|
|
1762
1755
|
if data_type == "bool":
|
|
1763
1756
|
if attr in [True, False, 1, 0]:
|
|
1764
1757
|
return bool(attr)
|
|
1765
|
-
elif isinstance(attr,
|
|
1758
|
+
elif isinstance(attr, str):
|
|
1766
1759
|
if attr.lower() in ["true", "1"]:
|
|
1767
1760
|
return True
|
|
1768
1761
|
elif attr.lower() in ["false", "0"]:
|
|
@@ -240,6 +240,10 @@ def update_primitive( # pylint: disable=too-many-return-statements
|
|
|
240
240
|
base = _update_type_base("bytes", yaml_data)
|
|
241
241
|
base["encode"] = yaml_data["format"]
|
|
242
242
|
return base
|
|
243
|
+
if type_group == "date":
|
|
244
|
+
return _update_type_base("plainDate", yaml_data)
|
|
245
|
+
if type_group == "time":
|
|
246
|
+
return _update_type_base("plainTime", yaml_data)
|
|
243
247
|
return _update_type_base(type_group, yaml_data)
|
|
244
248
|
|
|
245
249
|
|