@autorest/python 6.13.16 → 6.13.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/autorest/__init__.py +4 -12
- package/autorest/_utils.py +5 -20
- package/autorest/black/__init__.py +1 -3
- package/autorest/codegen/__init__.py +29 -96
- package/autorest/codegen/models/__init__.py +1 -3
- package/autorest/codegen/models/base.py +2 -6
- package/autorest/codegen/models/base_builder.py +2 -6
- package/autorest/codegen/models/client.py +24 -86
- package/autorest/codegen/models/code_model.py +13 -53
- package/autorest/codegen/models/combined_type.py +3 -9
- package/autorest/codegen/models/constant_type.py +4 -14
- package/autorest/codegen/models/credential_types.py +11 -35
- package/autorest/codegen/models/dictionary_type.py +4 -12
- package/autorest/codegen/models/enum_type.py +10 -31
- package/autorest/codegen/models/imports.py +15 -41
- package/autorest/codegen/models/list_type.py +7 -23
- package/autorest/codegen/models/lro_operation.py +6 -18
- package/autorest/codegen/models/lro_paging_operation.py +1 -3
- package/autorest/codegen/models/model_type.py +21 -73
- package/autorest/codegen/models/operation.py +41 -139
- package/autorest/codegen/models/operation_group.py +21 -60
- package/autorest/codegen/models/paging_operation.py +13 -43
- package/autorest/codegen/models/parameter.py +16 -54
- package/autorest/codegen/models/parameter_list.py +27 -103
- package/autorest/codegen/models/primitive_types.py +15 -49
- package/autorest/codegen/models/property.py +8 -28
- package/autorest/codegen/models/request_builder.py +11 -39
- package/autorest/codegen/models/request_builder_parameter.py +4 -13
- package/autorest/codegen/models/response.py +16 -57
- package/autorest/codegen/serializers/__init__.py +46 -150
- package/autorest/codegen/serializers/builder_serializer.py +113 -402
- package/autorest/codegen/serializers/client_serializer.py +25 -78
- package/autorest/codegen/serializers/enum_serializer.py +1 -3
- package/autorest/codegen/serializers/general_serializer.py +6 -22
- package/autorest/codegen/serializers/import_serializer.py +13 -40
- package/autorest/codegen/serializers/metadata_serializer.py +7 -21
- package/autorest/codegen/serializers/model_init_serializer.py +1 -5
- package/autorest/codegen/serializers/model_serializer.py +13 -51
- package/autorest/codegen/serializers/operation_groups_serializer.py +1 -3
- package/autorest/codegen/serializers/operations_init_serializer.py +2 -8
- package/autorest/codegen/serializers/parameter_serializer.py +8 -26
- package/autorest/codegen/serializers/request_builders_serializer.py +1 -3
- package/autorest/codegen/serializers/sample_serializer.py +13 -39
- package/autorest/codegen/serializers/test_serializer.py +10 -30
- package/autorest/codegen/serializers/types_serializer.py +1 -3
- package/autorest/jsonrpc/__init__.py +1 -3
- package/autorest/jsonrpc/server.py +3 -9
- package/autorest/jsonrpc/stdstream.py +4 -12
- package/autorest/m2r/__init__.py +2 -6
- package/autorest/m4reformatter/__init__.py +72 -237
- package/autorest/multiapi/__init__.py +4 -11
- package/autorest/multiapi/models/client.py +7 -21
- package/autorest/multiapi/models/code_model.py +9 -28
- package/autorest/multiapi/models/config.py +1 -3
- package/autorest/multiapi/models/global_parameters.py +6 -16
- package/autorest/multiapi/models/imports.py +5 -9
- package/autorest/multiapi/models/operation_mixin_group.py +5 -17
- package/autorest/multiapi/serializers/__init__.py +10 -31
- package/autorest/multiapi/serializers/import_serializer.py +8 -24
- package/autorest/multiapi/utils.py +2 -6
- package/autorest/multiclient/__init__.py +1 -3
- package/autorest/postprocess/__init__.py +9 -29
- package/autorest/postprocess/get_all.py +1 -3
- package/autorest/postprocess/venvtools.py +1 -3
- package/autorest/preprocess/__init__.py +40 -126
- package/install.py +2 -0
- package/package.json +1 -1
- package/prepare.py +3 -1
- package/requirements.txt +1 -1
- package/run_cadl.py +1 -3
- package/setup.py +25 -25
- package/start.py +2 -0
- package/venvtools.py +15 -10
|
@@ -57,17 +57,11 @@ class CodeModel: # pylint: disable=too-many-public-methods, disable=too-many-in
|
|
|
57
57
|
for type_yaml in yaml_data.get("types", []):
|
|
58
58
|
build_type(yaml_data=type_yaml, code_model=self)
|
|
59
59
|
self.clients: List[Client] = [
|
|
60
|
-
Client.from_yaml(client_yaml_data, self)
|
|
61
|
-
for client_yaml_data in yaml_data["clients"]
|
|
60
|
+
Client.from_yaml(client_yaml_data, self) for client_yaml_data in yaml_data["clients"]
|
|
62
61
|
]
|
|
63
62
|
self.subnamespace_to_clients: Dict[str, List[Client]] = {
|
|
64
|
-
subnamespace: [
|
|
65
|
-
|
|
66
|
-
for client_yaml in client_yamls
|
|
67
|
-
]
|
|
68
|
-
for subnamespace, client_yamls in yaml_data.get(
|
|
69
|
-
"subnamespaceToClients", {}
|
|
70
|
-
).items()
|
|
63
|
+
subnamespace: [Client.from_yaml(client_yaml, self, is_subclient=True) for client_yaml in client_yamls]
|
|
64
|
+
for subnamespace, client_yamls in yaml_data.get("subnamespaceToClients", {}).items()
|
|
71
65
|
}
|
|
72
66
|
if self.options["models_mode"] and self.model_types:
|
|
73
67
|
self.sort_model_types()
|
|
@@ -80,11 +74,7 @@ class CodeModel: # pylint: disable=too-many-public-methods, disable=too-many-in
|
|
|
80
74
|
|
|
81
75
|
@property
|
|
82
76
|
def has_form_data(self) -> bool:
|
|
83
|
-
return any(
|
|
84
|
-
og.has_form_data_body
|
|
85
|
-
for client in self.clients
|
|
86
|
-
for og in client.operation_groups
|
|
87
|
-
)
|
|
77
|
+
return any(og.has_form_data_body for client in self.clients for og in client.operation_groups)
|
|
88
78
|
|
|
89
79
|
@property
|
|
90
80
|
def has_etag(self) -> bool:
|
|
@@ -94,25 +84,15 @@ class CodeModel: # pylint: disable=too-many-public-methods, disable=too-many-in
|
|
|
94
84
|
def has_operations(self) -> bool:
|
|
95
85
|
if any(c for c in self.clients if c.has_operations):
|
|
96
86
|
return True
|
|
97
|
-
return any(
|
|
98
|
-
c
|
|
99
|
-
for clients in self.subnamespace_to_clients.values()
|
|
100
|
-
for c in clients
|
|
101
|
-
if c.has_operations
|
|
102
|
-
)
|
|
87
|
+
return any(c for clients in self.subnamespace_to_clients.values() for c in clients if c.has_operations)
|
|
103
88
|
|
|
104
89
|
@property
|
|
105
90
|
def has_non_abstract_operations(self) -> bool:
|
|
106
91
|
return any(c for c in self.clients if c.has_non_abstract_operations) or any(
|
|
107
|
-
c
|
|
108
|
-
for cs in self.subnamespace_to_clients.values()
|
|
109
|
-
for c in cs
|
|
110
|
-
if c.has_non_abstract_operations
|
|
92
|
+
c for cs in self.subnamespace_to_clients.values() for c in cs if c.has_non_abstract_operations
|
|
111
93
|
)
|
|
112
94
|
|
|
113
|
-
def lookup_request_builder(
|
|
114
|
-
self, request_builder_id: int
|
|
115
|
-
) -> Union[RequestBuilder, OverloadedRequestBuilder]:
|
|
95
|
+
def lookup_request_builder(self, request_builder_id: int) -> Union[RequestBuilder, OverloadedRequestBuilder]:
|
|
116
96
|
"""Find the request builder based off of id"""
|
|
117
97
|
for client in self.clients:
|
|
118
98
|
try:
|
|
@@ -140,12 +120,7 @@ class CodeModel: # pylint: disable=too-many-public-methods, disable=too-many-in
|
|
|
140
120
|
return True
|
|
141
121
|
if async_mode:
|
|
142
122
|
return self.need_mixin_abc
|
|
143
|
-
return
|
|
144
|
-
self.need_request_converter
|
|
145
|
-
or self.need_mixin_abc
|
|
146
|
-
or self.has_etag
|
|
147
|
-
or self.has_form_data
|
|
148
|
-
)
|
|
123
|
+
return self.need_request_converter or self.need_mixin_abc or self.has_etag or self.has_form_data
|
|
149
124
|
|
|
150
125
|
@property
|
|
151
126
|
def need_request_converter(self) -> bool:
|
|
@@ -164,10 +139,7 @@ class CodeModel: # pylint: disable=too-many-public-methods, disable=too-many-in
|
|
|
164
139
|
"""Get the name of the operations folder that holds operations."""
|
|
165
140
|
name = "operations"
|
|
166
141
|
if self.options["version_tolerant"] and not any(
|
|
167
|
-
og
|
|
168
|
-
for client in self.clients
|
|
169
|
-
for og in client.operation_groups
|
|
170
|
-
if not og.is_mixin
|
|
142
|
+
og for client in self.clients for og in client.operation_groups if not og.is_mixin
|
|
171
143
|
):
|
|
172
144
|
name = f"_{name}"
|
|
173
145
|
return name
|
|
@@ -196,8 +168,7 @@ class CodeModel: # pylint: disable=too-many-public-methods, disable=too-many-in
|
|
|
196
168
|
self._model_types = [
|
|
197
169
|
t
|
|
198
170
|
for t in self.types_map.values()
|
|
199
|
-
if isinstance(t, ModelType)
|
|
200
|
-
and not (self.options["models_mode"] == "dpg" and t.page_result_model)
|
|
171
|
+
if isinstance(t, ModelType) and not (self.options["models_mode"] == "dpg" and t.page_result_model)
|
|
201
172
|
]
|
|
202
173
|
return self._model_types
|
|
203
174
|
|
|
@@ -227,9 +198,7 @@ class CodeModel: # pylint: disable=too-many-public-methods, disable=too-many-in
|
|
|
227
198
|
if current.id in seen_schema_yaml_ids:
|
|
228
199
|
return []
|
|
229
200
|
if current.name in seen_schema_names:
|
|
230
|
-
raise ValueError(
|
|
231
|
-
f"We have already generated a schema with name {current.name}"
|
|
232
|
-
)
|
|
201
|
+
raise ValueError(f"We have already generated a schema with name {current.name}")
|
|
233
202
|
ancestors = [current]
|
|
234
203
|
if current.parents:
|
|
235
204
|
for parent in current.parents:
|
|
@@ -237,12 +206,7 @@ class CodeModel: # pylint: disable=too-many-public-methods, disable=too-many-in
|
|
|
237
206
|
continue
|
|
238
207
|
seen_schema_names.add(current.name)
|
|
239
208
|
seen_schema_yaml_ids.add(current.id)
|
|
240
|
-
ancestors = (
|
|
241
|
-
self._sort_model_types_helper(
|
|
242
|
-
parent, seen_schema_names, seen_schema_yaml_ids
|
|
243
|
-
)
|
|
244
|
-
+ ancestors
|
|
245
|
-
)
|
|
209
|
+
ancestors = self._sort_model_types_helper(parent, seen_schema_names, seen_schema_yaml_ids) + ancestors
|
|
246
210
|
seen_schema_names.add(current.name)
|
|
247
211
|
seen_schema_yaml_ids.add(current.id)
|
|
248
212
|
return ancestors
|
|
@@ -257,11 +221,7 @@ class CodeModel: # pylint: disable=too-many-public-methods, disable=too-many-in
|
|
|
257
221
|
seen_schema_yaml_ids: Set[int] = set()
|
|
258
222
|
sorted_object_schemas: List[ModelType] = []
|
|
259
223
|
for schema in sorted(self.model_types, key=lambda x: x.name.lower()):
|
|
260
|
-
sorted_object_schemas.extend(
|
|
261
|
-
self._sort_model_types_helper(
|
|
262
|
-
schema, seen_schema_names, seen_schema_yaml_ids
|
|
263
|
-
)
|
|
264
|
-
)
|
|
224
|
+
sorted_object_schemas.extend(self._sort_model_types_helper(schema, seen_schema_names, seen_schema_yaml_ids))
|
|
265
225
|
self.model_types = sorted_object_schemas
|
|
266
226
|
|
|
267
227
|
@property
|
|
@@ -51,9 +51,7 @@ class CombinedType(BaseType):
|
|
|
51
51
|
def client_default_value(self) -> Any:
|
|
52
52
|
return self.yaml_data.get("clientDefaultValue")
|
|
53
53
|
|
|
54
|
-
def description(
|
|
55
|
-
self, *, is_operation_file: bool # pylint: disable=unused-argument
|
|
56
|
-
) -> str:
|
|
54
|
+
def description(self, *, is_operation_file: bool) -> str: # pylint: disable=unused-argument
|
|
57
55
|
if len(self.types) == 2:
|
|
58
56
|
return f"Is either a {self.types[0].type_description} type or a {self.types[1].type_description} type."
|
|
59
57
|
return f"Is one of the following types: {', '.join([t.type_description for t in self.types])}"
|
|
@@ -75,9 +73,7 @@ class CombinedType(BaseType):
|
|
|
75
73
|
Special case for enum, for instance: Union[str, "EnumName"]
|
|
76
74
|
"""
|
|
77
75
|
# remove duplicates
|
|
78
|
-
inside_types = list(
|
|
79
|
-
dict.fromkeys([type.type_annotation(**kwargs) for type in self.types])
|
|
80
|
-
)
|
|
76
|
+
inside_types = list(dict.fromkeys([type.type_annotation(**kwargs) for type in self.types]))
|
|
81
77
|
if len(inside_types) == 1:
|
|
82
78
|
return inside_types[0]
|
|
83
79
|
if self._is_union_of_literals:
|
|
@@ -135,9 +131,7 @@ class CombinedType(BaseType):
|
|
|
135
131
|
return file_import
|
|
136
132
|
|
|
137
133
|
@classmethod
|
|
138
|
-
def from_yaml(
|
|
139
|
-
cls, yaml_data: Dict[str, Any], code_model: "CodeModel"
|
|
140
|
-
) -> "BaseType":
|
|
134
|
+
def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "BaseType":
|
|
141
135
|
from . import build_type
|
|
142
136
|
|
|
143
137
|
return cls(
|
|
@@ -76,22 +76,14 @@ class ConstantType(BaseType):
|
|
|
76
76
|
return self.value_type.docstring_type(**kwargs)
|
|
77
77
|
|
|
78
78
|
def type_annotation(self, **kwargs: Any) -> str:
|
|
79
|
-
return (
|
|
80
|
-
f"Literal[{self.get_declaration()}]"
|
|
81
|
-
if self._is_literal
|
|
82
|
-
else self.value_type.type_annotation(**kwargs)
|
|
83
|
-
)
|
|
79
|
+
return f"Literal[{self.get_declaration()}]" if self._is_literal else self.value_type.type_annotation(**kwargs)
|
|
84
80
|
|
|
85
81
|
@property
|
|
86
82
|
def _is_literal(self) -> bool:
|
|
87
|
-
return isinstance(
|
|
88
|
-
self.value_type, (IntegerType, BinaryType, StringType, BooleanType)
|
|
89
|
-
)
|
|
83
|
+
return isinstance(self.value_type, (IntegerType, BinaryType, StringType, BooleanType))
|
|
90
84
|
|
|
91
85
|
@classmethod
|
|
92
|
-
def from_yaml(
|
|
93
|
-
cls, yaml_data: Dict[str, Any], code_model: "CodeModel"
|
|
94
|
-
) -> "ConstantType":
|
|
86
|
+
def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "ConstantType":
|
|
95
87
|
"""Constructs a ConstantType from yaml data.
|
|
96
88
|
|
|
97
89
|
:param yaml_data: the yaml data from which we will construct this schema
|
|
@@ -134,9 +126,7 @@ class ConstantType(BaseType):
|
|
|
134
126
|
def imports(self, **kwargs: Any) -> FileImport:
|
|
135
127
|
file_import = self._imports_shared(**kwargs)
|
|
136
128
|
if self._is_literal:
|
|
137
|
-
file_import.add_submodule_import(
|
|
138
|
-
"typing", "Literal", ImportType.STDLIB, TypingSection.REGULAR
|
|
139
|
-
)
|
|
129
|
+
file_import.add_submodule_import("typing", "Literal", ImportType.STDLIB, TypingSection.REGULAR)
|
|
140
130
|
return file_import
|
|
141
131
|
|
|
142
132
|
@property
|
|
@@ -57,9 +57,7 @@ class BearerTokenCredentialPolicyType(_CredentialPolicyBaseType):
|
|
|
57
57
|
return f"policies.{policy_name}(self.credential, *self.credential_scopes, **kwargs)"
|
|
58
58
|
|
|
59
59
|
@classmethod
|
|
60
|
-
def from_yaml(
|
|
61
|
-
cls, yaml_data: Dict[str, Any], code_model: "CodeModel"
|
|
62
|
-
) -> "BearerTokenCredentialPolicyType":
|
|
60
|
+
def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "BearerTokenCredentialPolicyType":
|
|
63
61
|
return cls(yaml_data, code_model, yaml_data["credentialScopes"])
|
|
64
62
|
|
|
65
63
|
|
|
@@ -85,27 +83,17 @@ class KeyCredentialPolicyType(_CredentialPolicyBaseType):
|
|
|
85
83
|
|
|
86
84
|
@property
|
|
87
85
|
def credential_name(self) -> str:
|
|
88
|
-
return
|
|
89
|
-
"AzureKeyCredential"
|
|
90
|
-
if self.code_model.is_azure_flavor
|
|
91
|
-
else "ServiceKeyCredential"
|
|
92
|
-
)
|
|
86
|
+
return "AzureKeyCredential" if self.code_model.is_azure_flavor else "ServiceKeyCredential"
|
|
93
87
|
|
|
94
88
|
def call(self, async_mode: bool) -> str:
|
|
95
89
|
params = f'"{self.key}", '
|
|
96
90
|
if self.scheme:
|
|
97
91
|
params += f'prefix="{self.scheme}", '
|
|
98
|
-
return (
|
|
99
|
-
f"policies.{self.credential_name}Policy(self.credential, {params}**kwargs)"
|
|
100
|
-
)
|
|
92
|
+
return f"policies.{self.credential_name}Policy(self.credential, {params}**kwargs)"
|
|
101
93
|
|
|
102
94
|
@classmethod
|
|
103
|
-
def from_yaml(
|
|
104
|
-
cls, yaml_data
|
|
105
|
-
) -> "KeyCredentialPolicyType":
|
|
106
|
-
return cls(
|
|
107
|
-
yaml_data, code_model, yaml_data["key"], yaml_data.get("scheme", None)
|
|
108
|
-
)
|
|
95
|
+
def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "KeyCredentialPolicyType":
|
|
96
|
+
return cls(yaml_data, code_model, yaml_data["key"], yaml_data.get("scheme", None))
|
|
109
97
|
|
|
110
98
|
|
|
111
99
|
CredentialPolicyType = TypeVar(
|
|
@@ -118,9 +106,7 @@ CredentialPolicyType = TypeVar(
|
|
|
118
106
|
)
|
|
119
107
|
|
|
120
108
|
|
|
121
|
-
class CredentialType(
|
|
122
|
-
Generic[CredentialPolicyType], BaseType
|
|
123
|
-
): # pylint:disable=abstract-method
|
|
109
|
+
class CredentialType(Generic[CredentialPolicyType], BaseType): # pylint:disable=abstract-method
|
|
124
110
|
"""Store info about the type of the credential. Can be either an KeyCredential or a TokenCredential"""
|
|
125
111
|
|
|
126
112
|
def __init__(
|
|
@@ -132,9 +118,7 @@ class CredentialType(
|
|
|
132
118
|
super().__init__(yaml_data, code_model)
|
|
133
119
|
self.policy = policy
|
|
134
120
|
|
|
135
|
-
def description(
|
|
136
|
-
self, *, is_operation_file: bool # pylint: disable=unused-argument
|
|
137
|
-
) -> str:
|
|
121
|
+
def description(self, *, is_operation_file: bool) -> str: # pylint: disable=unused-argument
|
|
138
122
|
return ""
|
|
139
123
|
|
|
140
124
|
def get_json_template_representation(
|
|
@@ -144,9 +128,7 @@ class CredentialType(
|
|
|
144
128
|
client_default_value_declaration: Optional[str] = None,
|
|
145
129
|
description: Optional[str] = None,
|
|
146
130
|
) -> Any:
|
|
147
|
-
raise TypeError(
|
|
148
|
-
"You should not try to get a JSON template representation of a CredentialSchema"
|
|
149
|
-
)
|
|
131
|
+
raise TypeError("You should not try to get a JSON template representation of a CredentialSchema")
|
|
150
132
|
|
|
151
133
|
def docstring_text(self, **kwargs: Any) -> str:
|
|
152
134
|
return "credential"
|
|
@@ -156,17 +138,13 @@ class CredentialType(
|
|
|
156
138
|
return self.docstring_type()
|
|
157
139
|
|
|
158
140
|
@classmethod
|
|
159
|
-
def from_yaml(
|
|
160
|
-
cls, yaml_data: Dict[str, Any], code_model: "CodeModel"
|
|
161
|
-
) -> "CredentialType":
|
|
141
|
+
def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "CredentialType":
|
|
162
142
|
from . import build_type
|
|
163
143
|
|
|
164
144
|
return cls(
|
|
165
145
|
yaml_data,
|
|
166
146
|
code_model,
|
|
167
|
-
policy=cast(
|
|
168
|
-
CredentialPolicyType, build_type(yaml_data["policy"], code_model)
|
|
169
|
-
),
|
|
147
|
+
policy=cast(CredentialPolicyType, build_type(yaml_data["policy"], code_model)),
|
|
170
148
|
)
|
|
171
149
|
|
|
172
150
|
|
|
@@ -225,9 +203,7 @@ class KeyCredentialType(
|
|
|
225
203
|
"""Type for an KeyCredential"""
|
|
226
204
|
|
|
227
205
|
def docstring_type(self, **kwargs: Any) -> str: # pylint: disable=unused-argument
|
|
228
|
-
return
|
|
229
|
-
f"~{self.code_model.core_library}.credentials.{self.policy.credential_name}"
|
|
230
|
-
)
|
|
206
|
+
return f"~{self.code_model.core_library}.credentials.{self.policy.credential_name}"
|
|
231
207
|
|
|
232
208
|
def type_annotation(self, **kwargs: Any) -> str: # pylint: disable=unused-argument
|
|
233
209
|
return self.policy.credential_name
|
|
@@ -89,19 +89,13 @@ class DictionaryType(BaseType):
|
|
|
89
89
|
|
|
90
90
|
if isinstance(self.element_type, ModelType):
|
|
91
91
|
is_polymorphic_subtype = (
|
|
92
|
-
self.element_type.discriminator_value
|
|
93
|
-
and not self.element_type.discriminated_subtypes
|
|
92
|
+
self.element_type.discriminator_value and not self.element_type.discriminated_subtypes
|
|
94
93
|
)
|
|
95
|
-
if (
|
|
96
|
-
self.element_type.name not in (m.name for m in polymorphic_subtypes)
|
|
97
|
-
and is_polymorphic_subtype
|
|
98
|
-
):
|
|
94
|
+
if self.element_type.name not in (m.name for m in polymorphic_subtypes) and is_polymorphic_subtype:
|
|
99
95
|
polymorphic_subtypes.append(self.element_type)
|
|
100
96
|
|
|
101
97
|
@classmethod
|
|
102
|
-
def from_yaml(
|
|
103
|
-
cls, yaml_data: Dict[str, Any], code_model: "CodeModel"
|
|
104
|
-
) -> "DictionaryType":
|
|
98
|
+
def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "DictionaryType":
|
|
105
99
|
"""Constructs a DictionaryType from yaml data.
|
|
106
100
|
|
|
107
101
|
:param yaml_data: the yaml data from which we will construct this schema
|
|
@@ -124,9 +118,7 @@ class DictionaryType(BaseType):
|
|
|
124
118
|
|
|
125
119
|
def imports(self, **kwargs: Any) -> FileImport:
|
|
126
120
|
file_import = FileImport(self.code_model)
|
|
127
|
-
file_import.add_submodule_import(
|
|
128
|
-
"typing", "Dict", ImportType.STDLIB, TypingSection.CONDITIONAL
|
|
129
|
-
)
|
|
121
|
+
file_import.add_submodule_import("typing", "Dict", ImportType.STDLIB, TypingSection.CONDITIONAL)
|
|
130
122
|
file_import.merge(self.element_type.imports(**kwargs))
|
|
131
123
|
return file_import
|
|
132
124
|
|
|
@@ -78,19 +78,13 @@ class EnumValue(BaseType):
|
|
|
78
78
|
def imports(self, **kwargs: Any) -> FileImport:
|
|
79
79
|
file_import = FileImport(self.code_model)
|
|
80
80
|
file_import.merge(self.value_type.imports(**kwargs))
|
|
81
|
-
file_import.add_submodule_import(
|
|
82
|
-
|
|
83
|
-
)
|
|
84
|
-
file_import.add_submodule_import(
|
|
85
|
-
"._enums", self.enum_type.name, ImportType.LOCAL, TypingSection.REGULAR
|
|
86
|
-
)
|
|
81
|
+
file_import.add_submodule_import("typing", "Literal", ImportType.STDLIB, TypingSection.REGULAR)
|
|
82
|
+
file_import.add_submodule_import("._enums", self.enum_type.name, ImportType.LOCAL, TypingSection.REGULAR)
|
|
87
83
|
|
|
88
84
|
return file_import
|
|
89
85
|
|
|
90
86
|
@classmethod
|
|
91
|
-
def from_yaml(
|
|
92
|
-
cls, yaml_data: Dict[str, Any], code_model: "CodeModel"
|
|
93
|
-
) -> "EnumValue":
|
|
87
|
+
def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "EnumValue":
|
|
94
88
|
"""Constructs an EnumValue from yaml data.
|
|
95
89
|
|
|
96
90
|
:param yaml_data: the yaml data from which we will construct this object
|
|
@@ -133,9 +127,7 @@ class EnumType(BaseType):
|
|
|
133
127
|
self.values = values
|
|
134
128
|
self.value_type = value_type
|
|
135
129
|
self.internal: bool = self.yaml_data.get("internal", False)
|
|
136
|
-
self.cross_language_definition_id: Optional[str] = self.yaml_data.get(
|
|
137
|
-
"crossLanguageDefinitionId"
|
|
138
|
-
)
|
|
130
|
+
self.cross_language_definition_id: Optional[str] = self.yaml_data.get("crossLanguageDefinitionId")
|
|
139
131
|
|
|
140
132
|
def __lt__(self, other):
|
|
141
133
|
return self.name.lower() < other.name.lower()
|
|
@@ -149,9 +141,7 @@ class EnumType(BaseType):
|
|
|
149
141
|
"""
|
|
150
142
|
return self.value_type.serialization_type
|
|
151
143
|
|
|
152
|
-
def description(
|
|
153
|
-
self, *, is_operation_file: bool # pylint: disable=unused-argument
|
|
154
|
-
) -> str:
|
|
144
|
+
def description(self, *, is_operation_file: bool) -> str: # pylint: disable=unused-argument
|
|
155
145
|
possible_values = [self.get_declaration(v.value) for v in self.values]
|
|
156
146
|
if not possible_values:
|
|
157
147
|
return ""
|
|
@@ -161,8 +151,7 @@ class EnumType(BaseType):
|
|
|
161
151
|
possible_values_str = " and ".join(possible_values)
|
|
162
152
|
else:
|
|
163
153
|
possible_values_str = (
|
|
164
|
-
", ".join(possible_values[: len(possible_values) - 1])
|
|
165
|
-
+ f", and {possible_values[-1]}"
|
|
154
|
+
", ".join(possible_values[: len(possible_values) - 1]) + f", and {possible_values[-1]}"
|
|
166
155
|
)
|
|
167
156
|
|
|
168
157
|
enum_description = f"Known values are: {possible_values_str}."
|
|
@@ -219,16 +208,12 @@ class EnumType(BaseType):
|
|
|
219
208
|
def instance_check_template(self) -> str:
|
|
220
209
|
return self.value_type.instance_check_template
|
|
221
210
|
|
|
222
|
-
def fill_instance_from_yaml(
|
|
223
|
-
self, yaml_data: Dict[str, Any], code_model: "CodeModel"
|
|
224
|
-
) -> None:
|
|
211
|
+
def fill_instance_from_yaml(self, yaml_data: Dict[str, Any], code_model: "CodeModel") -> None:
|
|
225
212
|
for value in yaml_data["values"]:
|
|
226
213
|
self.values.append(EnumValue.from_yaml(value, code_model))
|
|
227
214
|
|
|
228
215
|
@classmethod
|
|
229
|
-
def from_yaml(
|
|
230
|
-
cls, yaml_data: Dict[str, Any], code_model: "CodeModel"
|
|
231
|
-
) -> "EnumType":
|
|
216
|
+
def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "EnumType":
|
|
232
217
|
raise ValueError(
|
|
233
218
|
"You shouldn't call from_yaml for EnumType to avoid recursion. "
|
|
234
219
|
"Please initial a blank EnumType, then call .fill_instance_from_yaml on the created type."
|
|
@@ -238,9 +223,7 @@ class EnumType(BaseType):
|
|
|
238
223
|
operation = kwargs.pop("operation", False)
|
|
239
224
|
file_import = FileImport(self.code_model)
|
|
240
225
|
if self.code_model.options["models_mode"]:
|
|
241
|
-
file_import.add_submodule_import(
|
|
242
|
-
"typing", "Union", ImportType.STDLIB, TypingSection.CONDITIONAL
|
|
243
|
-
)
|
|
226
|
+
file_import.add_submodule_import("typing", "Union", ImportType.STDLIB, TypingSection.CONDITIONAL)
|
|
244
227
|
if not operation:
|
|
245
228
|
file_import.add_submodule_import(
|
|
246
229
|
"..",
|
|
@@ -258,10 +241,6 @@ class EnumType(BaseType):
|
|
|
258
241
|
"models",
|
|
259
242
|
ImportType.LOCAL,
|
|
260
243
|
alias="_models",
|
|
261
|
-
typing_section=(
|
|
262
|
-
TypingSection.TYPING
|
|
263
|
-
if kwargs.get("model_typing")
|
|
264
|
-
else TypingSection.REGULAR
|
|
265
|
-
),
|
|
244
|
+
typing_section=(TypingSection.TYPING if kwargs.get("model_typing") else TypingSection.REGULAR),
|
|
266
245
|
)
|
|
267
246
|
return file_import
|
|
@@ -30,12 +30,8 @@ class TypingSection(str, Enum):
|
|
|
30
30
|
|
|
31
31
|
class MsrestImportType(Enum):
|
|
32
32
|
Module = auto() # import _serialization.py or msrest.serialization as Module
|
|
33
|
-
Serializer = (
|
|
34
|
-
|
|
35
|
-
) # from _serialization.py or msrest.serialization import Serializer
|
|
36
|
-
SerializerDeserializer = (
|
|
37
|
-
auto()
|
|
38
|
-
) # from _serialization.py or msrest.serialization import Serializer and Deserializer
|
|
33
|
+
Serializer = auto() # from _serialization.py or msrest.serialization import Serializer
|
|
34
|
+
SerializerDeserializer = auto() # from _serialization.py or msrest.serialization import Serializer and Deserializer
|
|
39
35
|
|
|
40
36
|
|
|
41
37
|
class ImportModel:
|
|
@@ -47,9 +43,7 @@ class ImportModel:
|
|
|
47
43
|
*,
|
|
48
44
|
submodule_name: Optional[str] = None,
|
|
49
45
|
alias: Optional[str] = None,
|
|
50
|
-
version_modules: Optional[
|
|
51
|
-
Tuple[Tuple[Tuple[int, int], str, Optional[str]]]
|
|
52
|
-
] = None,
|
|
46
|
+
version_modules: Optional[Tuple[Tuple[Tuple[int, int], str, Optional[str]]]] = None,
|
|
53
47
|
):
|
|
54
48
|
self.typing_section = typing_section
|
|
55
49
|
self.import_type = import_type
|
|
@@ -109,23 +103,15 @@ class FileImport:
|
|
|
109
103
|
]
|
|
110
104
|
if all(l not in mod_name for l in core_libraries):
|
|
111
105
|
# this is to make sure we don't tack on core libraries when we don't need to
|
|
112
|
-
import_model.module_name =
|
|
113
|
-
f"{self.code_model.core_library}{'.' if mod_name else ''}{mod_name}"
|
|
114
|
-
)
|
|
106
|
+
import_model.module_name = f"{self.code_model.core_library}{'.' if mod_name else ''}{mod_name}"
|
|
115
107
|
if not any(
|
|
116
108
|
i
|
|
117
109
|
for i in self.imports
|
|
118
|
-
if all(
|
|
119
|
-
getattr(i, attr) == getattr(import_model, attr)
|
|
120
|
-
for attr in dir(i)
|
|
121
|
-
if attr[0] != "_"
|
|
122
|
-
)
|
|
110
|
+
if all(getattr(i, attr) == getattr(import_model, attr) for attr in dir(i) if attr[0] != "_")
|
|
123
111
|
):
|
|
124
112
|
self.imports.append(import_model)
|
|
125
113
|
|
|
126
|
-
def get_imports_from_section(
|
|
127
|
-
self, typing_section: TypingSection
|
|
128
|
-
) -> List[ImportModel]:
|
|
114
|
+
def get_imports_from_section(self, typing_section: TypingSection) -> List[ImportModel]:
|
|
129
115
|
return [i for i in self.imports if i.typing_section == typing_section]
|
|
130
116
|
|
|
131
117
|
def add_submodule_import(
|
|
@@ -135,9 +121,7 @@ class FileImport:
|
|
|
135
121
|
import_type: ImportType,
|
|
136
122
|
typing_section: TypingSection = TypingSection.REGULAR,
|
|
137
123
|
alias: Optional[str] = None,
|
|
138
|
-
version_modules: Optional[
|
|
139
|
-
Tuple[Tuple[Tuple[int, int], str, Optional[str]]]
|
|
140
|
-
] = None,
|
|
124
|
+
version_modules: Optional[Tuple[Tuple[Tuple[int, int], str, Optional[str]]]] = None,
|
|
141
125
|
) -> None:
|
|
142
126
|
"""Add an import to this import block."""
|
|
143
127
|
self._append_import(
|
|
@@ -174,9 +158,7 @@ class FileImport:
|
|
|
174
158
|
type_value: str,
|
|
175
159
|
async_type_value: Optional[str] = None,
|
|
176
160
|
):
|
|
177
|
-
self.type_definitions[type_name] = TypeDefinition(
|
|
178
|
-
type_value, async_type_value or type_value
|
|
179
|
-
)
|
|
161
|
+
self.type_definitions[type_name] = TypeDefinition(type_value, async_type_value or type_value)
|
|
180
162
|
|
|
181
163
|
def merge(self, file_import: "FileImport") -> None:
|
|
182
164
|
"""Merge the given file import format."""
|
|
@@ -269,9 +251,9 @@ class FileImport:
|
|
|
269
251
|
name_import = (i.submodule_name, i.alias)
|
|
270
252
|
else:
|
|
271
253
|
name_import = i.submodule_name
|
|
272
|
-
retval.setdefault(i.typing_section, {}).setdefault(
|
|
273
|
-
|
|
274
|
-
)
|
|
254
|
+
retval.setdefault(i.typing_section, {}).setdefault(i.import_type, {}).setdefault(i.module_name, set()).add(
|
|
255
|
+
name_import
|
|
256
|
+
)
|
|
275
257
|
return retval
|
|
276
258
|
|
|
277
259
|
def add_msrest_import(
|
|
@@ -283,24 +265,16 @@ class FileImport:
|
|
|
283
265
|
):
|
|
284
266
|
if self.code_model.options["client_side_validation"]:
|
|
285
267
|
if msrest_import_type == MsrestImportType.Module:
|
|
286
|
-
self.add_import(
|
|
287
|
-
"msrest.serialization", ImportType.SDKCORE, typing_section
|
|
288
|
-
)
|
|
268
|
+
self.add_import("msrest.serialization", ImportType.SDKCORE, typing_section)
|
|
289
269
|
else:
|
|
290
|
-
self.add_submodule_import(
|
|
291
|
-
"msrest", "Serializer", ImportType.THIRDPARTY, typing_section
|
|
292
|
-
)
|
|
270
|
+
self.add_submodule_import("msrest", "Serializer", ImportType.THIRDPARTY, typing_section)
|
|
293
271
|
if msrest_import_type == MsrestImportType.SerializerDeserializer:
|
|
294
|
-
self.add_submodule_import(
|
|
295
|
-
"msrest", "Deserializer", ImportType.THIRDPARTY, typing_section
|
|
296
|
-
)
|
|
272
|
+
self.add_submodule_import("msrest", "Deserializer", ImportType.THIRDPARTY, typing_section)
|
|
297
273
|
else:
|
|
298
274
|
if self.code_model.options["multiapi"]:
|
|
299
275
|
relative_path += "."
|
|
300
276
|
if msrest_import_type == MsrestImportType.Module:
|
|
301
|
-
self.add_submodule_import(
|
|
302
|
-
relative_path, "_serialization", ImportType.LOCAL, typing_section
|
|
303
|
-
)
|
|
277
|
+
self.add_submodule_import(relative_path, "_serialization", ImportType.LOCAL, typing_section)
|
|
304
278
|
else:
|
|
305
279
|
self.add_submodule_import(
|
|
306
280
|
f"{relative_path}_serialization",
|
|
@@ -69,19 +69,13 @@ class ListType(BaseType):
|
|
|
69
69
|
return ", ".join(attrs_list)
|
|
70
70
|
|
|
71
71
|
def docstring_type(self, **kwargs: Any) -> str:
|
|
72
|
-
if
|
|
73
|
-
self.code_model.options["version_tolerant"]
|
|
74
|
-
and self.element_type.xml_metadata
|
|
75
|
-
):
|
|
72
|
+
if self.code_model.options["version_tolerant"] and self.element_type.xml_metadata:
|
|
76
73
|
# this means we're version tolerant XML, we just return the XML element
|
|
77
74
|
return self.element_type.docstring_type(**kwargs)
|
|
78
75
|
return f"list[{self.element_type.docstring_type(**kwargs)}]"
|
|
79
76
|
|
|
80
77
|
def docstring_text(self, **kwargs: Any) -> str:
|
|
81
|
-
if
|
|
82
|
-
self.code_model.options["version_tolerant"]
|
|
83
|
-
and self.element_type.xml_metadata
|
|
84
|
-
):
|
|
78
|
+
if self.code_model.options["version_tolerant"] and self.element_type.xml_metadata:
|
|
85
79
|
# this means we're version tolerant XML, we just return the XML element
|
|
86
80
|
return self.element_type.docstring_text(**kwargs)
|
|
87
81
|
return f"list of {self.element_type.docstring_text(**kwargs)}"
|
|
@@ -118,13 +112,9 @@ class ListType(BaseType):
|
|
|
118
112
|
|
|
119
113
|
if isinstance(self.element_type, ModelType):
|
|
120
114
|
is_polymorphic_subtype = (
|
|
121
|
-
self.element_type.discriminator_value
|
|
122
|
-
and not self.element_type.discriminated_subtypes
|
|
115
|
+
self.element_type.discriminator_value and not self.element_type.discriminated_subtypes
|
|
123
116
|
)
|
|
124
|
-
if (
|
|
125
|
-
self.element_type.name not in (m.name for m in polymorphic_subtypes)
|
|
126
|
-
and is_polymorphic_subtype
|
|
127
|
-
):
|
|
117
|
+
if self.element_type.name not in (m.name for m in polymorphic_subtypes) and is_polymorphic_subtype:
|
|
128
118
|
polymorphic_subtypes.append(self.element_type)
|
|
129
119
|
|
|
130
120
|
@property
|
|
@@ -132,17 +122,13 @@ class ListType(BaseType):
|
|
|
132
122
|
return "isinstance({}, list)"
|
|
133
123
|
|
|
134
124
|
@classmethod
|
|
135
|
-
def from_yaml(
|
|
136
|
-
cls, yaml_data: Dict[str, Any], code_model: "CodeModel"
|
|
137
|
-
) -> "ListType":
|
|
125
|
+
def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "ListType":
|
|
138
126
|
from . import build_type
|
|
139
127
|
|
|
140
128
|
return cls(
|
|
141
129
|
yaml_data=yaml_data,
|
|
142
130
|
code_model=code_model,
|
|
143
|
-
element_type=build_type(
|
|
144
|
-
yaml_data=yaml_data["elementType"], code_model=code_model
|
|
145
|
-
),
|
|
131
|
+
element_type=build_type(yaml_data=yaml_data["elementType"], code_model=code_model),
|
|
146
132
|
)
|
|
147
133
|
|
|
148
134
|
def imports(self, **kwargs: Any) -> FileImport:
|
|
@@ -152,9 +138,7 @@ class ListType(BaseType):
|
|
|
152
138
|
and self.element_type.is_xml
|
|
153
139
|
and not self.code_model.options["models_mode"]
|
|
154
140
|
):
|
|
155
|
-
file_import.add_submodule_import(
|
|
156
|
-
"typing", "List", ImportType.STDLIB, TypingSection.CONDITIONAL
|
|
157
|
-
)
|
|
141
|
+
file_import.add_submodule_import("typing", "List", ImportType.STDLIB, TypingSection.CONDITIONAL)
|
|
158
142
|
file_import.merge(self.element_type.imports(**kwargs))
|
|
159
143
|
return file_import
|
|
160
144
|
|