@autorest/python 6.1.1 → 6.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/ChangeLog.md CHANGED
@@ -1,5 +1,57 @@
1
1
  # Release History
2
2
 
3
+ ### 2022-08-31 - 6.1.4
4
+
5
+ | Library | Min Version |
6
+ | ----------------------------------------------------------------------- | ----------- |
7
+ | `@autorest/core` | `3.8.4` |
8
+ | `@autorest/modelerfour` | `4.23.5` |
9
+ | `azure-core` dep of generated code | `1.24.0` |
10
+ | `isodate` dep of generated code | `0.6.1` |
11
+ | `msrest` dep of generated code (If generating legacy code) | `0.7.1` |
12
+ | `azure-mgmt-core` dep of generated code (If generating mgmt plane code) | `1.3.2` |
13
+
14
+ **Bug Fixes**
15
+
16
+ - Fix generation failure for `format: password` #1404
17
+ - Fix `content_type` error when paging with body #1407
18
+ - Fix excessive warning level logging in vendored `failsafe_deserialize` #1419
19
+
20
+ **Other Changes**
21
+
22
+ - Upgrade min dependency for `azure-mgmt-core` to `1.3.2` #1404
23
+
24
+ ### 2022-08-22 - 6.1.3
25
+
26
+ | Library | Min Version |
27
+ | ----------------------------------------------------------------------- | ----------- |
28
+ | `@autorest/core` | `3.8.4` |
29
+ | `@autorest/modelerfour` | `4.23.5` |
30
+ | `azure-core` dep of generated code | `1.24.0` |
31
+ | `isodate` dep of generated code | `0.6.1` |
32
+ | `msrest` dep of generated code (If generating legacy code) | `0.7.1` |
33
+ | `azure-mgmt-core` dep of generated code (If generating mgmt plane code) | `1.3.2` |
34
+
35
+ **Bug Fixes**
36
+
37
+ - Fix circular recursion for lropaging #1400
38
+
39
+ ### 2022-08-16 - 6.1.2
40
+
41
+ | Library | Min Version |
42
+ | ----------------------------------------------------------------------- | ----------- |
43
+ | `@autorest/core` | `3.8.1` |
44
+ | `@autorest/modelerfour` | `4.23.5` |
45
+ | `azure-core` dep of generated code | `1.24.0` |
46
+ | `isodate` dep of generated code | `0.6.1` |
47
+ | `msrest` dep of generated code (If generating legacy code) | `0.7.1` |
48
+ | `azure-mgmt-core` dep of generated code (If generating mgmt plane code) | `1.3.0` |
49
+
50
+ **Bug Fixes**
51
+
52
+ - Correctly document polymorphic page responses #1389
53
+ - Add `__version__` to `__init__.py` for multiapi #1393
54
+
3
55
  ### 2022-07-20 - 6.1.1
4
56
 
5
57
  | Library | Min Version |
package/README.md CHANGED
@@ -1,4 +1,4 @@
1
- # Generating with Autorest for Python v5.0.0
1
+ # Generating with Autorest for Python
2
2
 
3
3
  See [here](https://github.com/Azure/autorest.python/wiki/Generating-with-autorest-for-python-v5.0.0) for Python-specific docs, and [here] for general docs
4
4
 
@@ -5,6 +5,7 @@
5
5
  # --------------------------------------------------------------------------
6
6
  import logging
7
7
  from pathlib import Path
8
+ import json
8
9
  from abc import ABC, abstractmethod
9
10
  from typing import Any, Dict, Union
10
11
 
@@ -21,7 +22,19 @@ _LOGGER = logging.getLogger(__name__)
21
22
  class ReaderAndWriter:
22
23
  def __init__(self, *, output_folder: Union[str, Path], **kwargs: Any) -> None:
23
24
  self.output_folder = Path(output_folder)
25
+ try:
26
+ with open(
27
+ Path(self.output_folder) / Path("..") / Path("python.json"), "r"
28
+ ) as fd:
29
+ python_json = json.load(fd)
30
+ except Exception: # pylint: disable=broad-except
31
+ python_json = {}
24
32
  self.options = kwargs
33
+ if python_json:
34
+ _LOGGER.warning(
35
+ "Loading python.json file. This behavior will be depreacted"
36
+ )
37
+ self.options.update(python_json)
25
38
 
26
39
  def read_file(self, path: Union[str, Path]) -> str:
27
40
  """How does one read a file in cadl?"""
@@ -3,6 +3,7 @@
3
3
  # Licensed under the MIT License. See License.txt in the project root for
4
4
  # license information.
5
5
  # --------------------------------------------------------------------------
6
+ from typing import Any, Dict
6
7
  import re
7
8
  import argparse
8
9
 
@@ -52,5 +53,29 @@ def parse_args(need_cadl_file: bool = True):
52
53
  help="Serialized cadl file",
53
54
  required=need_cadl_file,
54
55
  )
55
-
56
+ parser.add_argument(
57
+ "--debug",
58
+ dest="debug",
59
+ help="Debug mode",
60
+ required=False,
61
+ action="store_true",
62
+ )
56
63
  return parser.parse_args()
64
+
65
+
66
+ def get_body_type_for_description(body_parameter: Dict[str, Any]) -> str:
67
+ if body_parameter["type"]["type"] == "binary":
68
+ return "binary"
69
+ if body_parameter["type"]["type"] == "string":
70
+ return "string"
71
+ return "JSON"
72
+
73
+
74
+ # used if we want to get a string / binary type etc
75
+ KNOWN_TYPES: Dict[str, Dict[str, Any]] = {
76
+ "string": {"type": "string"},
77
+ "binary": {"type": "binary"},
78
+ "anydict": {"type": "dict", "elementType": {"type": "any"}},
79
+ }
80
+
81
+ JSON_REGEXP = re.compile(r"^(application|text)/(.+\+)?json$")
@@ -0,0 +1,128 @@
1
+ # -------------------------------------------------------------------------
2
+ # Copyright (c) Microsoft Corporation. All rights reserved.
3
+ # Licensed under the MIT License. See License.txt in the project root for
4
+ # license information.
5
+ # --------------------------------------------------------------------------
6
+ import logging
7
+ from typing import Any, Dict, List
8
+ from .. import YamlUpdatePlugin
9
+ from .._utils import parse_args
10
+
11
+ _LOGGER = logging.getLogger(__name__)
12
+
13
+ OAUTH_TYPE = "OAuth2"
14
+ KEY_TYPE = "Key"
15
+
16
+
17
+ def get_azure_key_credential(key: str) -> Dict[str, Any]:
18
+ return {
19
+ "type": KEY_TYPE,
20
+ "policy": {"type": "AzureKeyCredentialPolicy", "key": key},
21
+ }
22
+
23
+
24
+ class CadlFlags(YamlUpdatePlugin): # pylint: disable=abstract-method
25
+ """A plugin to apply flags from backdoor python.json into cadl yaml file"""
26
+
27
+ def update_yaml(self, yaml_data: Dict[str, Any]) -> None:
28
+ """Convert in place the YAML str."""
29
+ if self.options.get("add-credential"):
30
+ self.update_credential(yaml_data)
31
+ if self.options.get("namespace"):
32
+ yaml_data["client"]["namespace"] = self.options["namespace"]
33
+ if self.options.get("title"):
34
+ yaml_data["client"]["name"] = self.options["title"]
35
+
36
+ def get_credential_scopes_from_flags(self, auth_policy: str) -> List[str]:
37
+ if self.options.get("azure-arm"):
38
+ return ["https://management.azure.com/.default"]
39
+ credential_scopes_temp = self.options.get("credential-scopes")
40
+ credential_scopes = (
41
+ credential_scopes_temp.split(",") if credential_scopes_temp else None
42
+ )
43
+ if self.options.get("credential-scopes", False) and not credential_scopes:
44
+ raise ValueError(
45
+ "--credential-scopes takes a list of scopes in comma separated format. "
46
+ "For example: --credential-scopes=https://cognitiveservices.azure.com/.default"
47
+ )
48
+ if not credential_scopes:
49
+ _LOGGER.warning(
50
+ "You have default credential policy %s "
51
+ "but not the --credential-scopes flag set while generating non-management plane code. "
52
+ "This is not recommend because it forces the customer to pass credential scopes "
53
+ "through kwargs if they want to authenticate.",
54
+ auth_policy,
55
+ )
56
+ credential_scopes = []
57
+ return credential_scopes
58
+
59
+ def get_token_credential(self, credential_scopes: List[str]) -> Dict[str, Any]:
60
+ return {
61
+ "type": OAUTH_TYPE,
62
+ "policy": {
63
+ "type": "ARMChallengeAuthenticationPolicy"
64
+ if self.options.get("azure-arm")
65
+ else "BearerTokenCredentialPolicy",
66
+ "credentialScopes": credential_scopes,
67
+ },
68
+ }
69
+
70
+ def update_credential_from_flags(self) -> Dict[str, Any]:
71
+ default_auth_policy = (
72
+ "ARMChallengeAuthenticationPolicy"
73
+ if self.options.get("azure-arm")
74
+ else "BearerTokenCredentialPolicy"
75
+ )
76
+ auth_policy = (
77
+ self.options.get("credential-default-policy-type") or default_auth_policy
78
+ )
79
+ credential_scopes = self.get_credential_scopes_from_flags(auth_policy)
80
+ key = self.options.get("credential-key-header-name")
81
+ if auth_policy.lower() in (
82
+ "armchallengeauthenticationpolicy",
83
+ "bearertokencredentialpolicy",
84
+ ):
85
+ if key:
86
+ raise ValueError(
87
+ "You have passed in a credential key header name with default credential policy type "
88
+ f"{auth_policy}. This is not allowed, since credential key header "
89
+ "name is tied with AzureKeyCredentialPolicy. Instead, with this policy it is recommend you "
90
+ "pass in --credential-scopes."
91
+ )
92
+ return self.get_token_credential(credential_scopes)
93
+ # Otherwise you have AzureKeyCredentialPolicy
94
+ if self.options.get("credential-scopes"):
95
+ raise ValueError(
96
+ "You have passed in credential scopes with default credential policy type "
97
+ "AzureKeyCredentialPolicy. This is not allowed, since credential scopes is tied with "
98
+ f"{default_auth_policy}. Instead, with this policy "
99
+ "you must pass in --credential-key-header-name."
100
+ )
101
+ if not key:
102
+ key = "api-key"
103
+ _LOGGER.info(
104
+ "Defaulting the AzureKeyCredentialPolicy header's name to 'api-key'"
105
+ )
106
+ return get_azure_key_credential(key)
107
+
108
+ def update_credential(self, yaml_data: Dict[str, Any]) -> None:
109
+ credential_type = self.update_credential_from_flags()
110
+ yaml_data["types"].append(credential_type)
111
+ credential = {
112
+ "type": credential_type,
113
+ "optional": False,
114
+ "description": "Credential needed for the client to connect to Azure.",
115
+ "clientName": "credential",
116
+ "location": "other",
117
+ "restApiName": "credential",
118
+ "implementation": "Client",
119
+ "skipUrlEncoding": True,
120
+ "inOverload": False,
121
+ }
122
+ yaml_data["client"]["parameters"].append(credential)
123
+
124
+
125
+ if __name__ == "__main__":
126
+ # CADL pipeline will call this
127
+ args = parse_args()
128
+ CadlFlags(output_folder=args.output_folder, cadl_file=args.cadl_file).process()
@@ -122,7 +122,7 @@ class CodeGenerator(Plugin):
122
122
  @staticmethod
123
123
  def _build_package_dependency() -> Dict[str, str]:
124
124
  return {
125
- "dependency_azure_mgmt_core": "azure-mgmt-core<2.0.0,>=1.3.0",
125
+ "dependency_azure_mgmt_core": "azure-mgmt-core<2.0.0,>=1.3.2",
126
126
  "dependency_azure_core": "azure-core<2.0.0,>=1.24.0",
127
127
  "dependency_msrest": "msrest>=0.7.1",
128
128
  }
@@ -5,10 +5,8 @@
5
5
  # --------------------------------------------------------------------------
6
6
 
7
7
  DEFAULT_HEADER_TEXT = (
8
- "# --------------------------------------------------------------------------\n"
9
- "# Copyright (c) Microsoft Corporation. All rights reserved.\n"
10
- "# Licensed under the MIT License. See License.txt in the project root for license information.\n"
11
- "# Code generated by Microsoft (R) Python Code Generator.\n"
12
- "# Changes may cause incorrect behavior and will be lost if the code is regenerated.\n"
13
- "# --------------------------------------------------------------------------"
8
+ "Copyright (c) Microsoft Corporation. All rights reserved.\n"
9
+ "Licensed under the MIT License. See License.txt in the project root for license information.\n"
10
+ "Code generated by Microsoft (R) Python Code Generator.\n"
11
+ "Changes may cause incorrect behavior and will be lost if the code is regenerated."
14
12
  )
@@ -140,6 +140,7 @@ TYPE_TO_OBJECT = {
140
140
  "AzureKeyCredentialPolicy": AzureKeyCredentialPolicyType,
141
141
  "any-object": AnyObjectType,
142
142
  "unixtime": UnixTimeType,
143
+ "credential": StringType,
143
144
  }
144
145
  _LOGGER = logging.getLogger(__name__)
145
146
 
@@ -10,6 +10,7 @@ from .base_type import BaseType
10
10
 
11
11
  if TYPE_CHECKING:
12
12
  from .code_model import CodeModel
13
+ from .model_type import ModelType
13
14
 
14
15
 
15
16
  class CombinedType(BaseType):
@@ -82,6 +83,9 @@ class CombinedType(BaseType):
82
83
  "You shouldn't get a JSON template representation of multiple types"
83
84
  )
84
85
 
86
+ def get_polymorphic_subtypes(self, polymorphic_subtypes: List["ModelType"]) -> None:
87
+ raise ValueError("You shouldn't get polymorphic subtypes of multiple types")
88
+
85
89
  @property
86
90
  def instance_check_template(self) -> str:
87
91
  """Template of what an instance check of a variable for this type would look like"""
@@ -61,6 +61,7 @@ class ModelType(BaseType): # pylint: disable=too-many-instance-attributes
61
61
  "discriminatorValue"
62
62
  )
63
63
  self._created_json_template_representation = False
64
+ self._got_polymorphic_subtypes = False
64
65
  self.is_public: bool = self.yaml_data.get("isPublic", True)
65
66
  self.snake_case_name: str = self.yaml_data["snakeCaseName"]
66
67
 
@@ -71,16 +72,20 @@ class ModelType(BaseType): # pylint: disable=too-many-instance-attributes
71
72
  @property
72
73
  def serialization_type(self) -> str:
73
74
  if self.code_model.options["models_mode"]:
74
- return self.name
75
+ return (
76
+ self.name
77
+ if self.is_public
78
+ else f"{self.code_model.models_filename}.{self.name}"
79
+ )
75
80
  return "object"
76
81
 
77
82
  def type_annotation(self, **kwargs: Any) -> str:
78
83
  if self.code_model.options["models_mode"]:
79
84
  is_operation_file = kwargs.pop("is_operation_file", False)
80
- if self.is_public:
81
- retval = f"_models.{self.name}"
82
- return retval if is_operation_file else f'"{retval}"'
83
- return self.name if is_operation_file else f'"{self.name}"'
85
+ retval = f"_models.{self.name}"
86
+ if not self.is_public:
87
+ retval = f"{self.code_model.models_filename}.{retval}"
88
+ return retval if is_operation_file else f'"{retval}"'
84
89
  return "ET.Element" if self.is_xml else "JSON"
85
90
 
86
91
  def docstring_type(self, **kwargs: Any) -> str:
@@ -167,9 +172,13 @@ class ModelType(BaseType): # pylint: disable=too-many-instance-attributes
167
172
  )
168
173
 
169
174
  def get_polymorphic_subtypes(self, polymorphic_subtypes: List["ModelType"]) -> None:
175
+
170
176
  is_polymorphic_subtype = (
171
177
  self.discriminator_value and not self.discriminated_subtypes
172
178
  )
179
+ if self._got_polymorphic_subtypes:
180
+ return
181
+ self._got_polymorphic_subtypes = True
173
182
  if (
174
183
  self.name not in (m.name for m in polymorphic_subtypes)
175
184
  and is_polymorphic_subtype
@@ -179,6 +188,7 @@ class ModelType(BaseType): # pylint: disable=too-many-instance-attributes
179
188
  discriminated_subtype.get_polymorphic_subtypes(polymorphic_subtypes)
180
189
  for property in self.properties:
181
190
  property.get_polymorphic_subtypes(polymorphic_subtypes)
191
+ self._got_polymorphic_subtypes = False
182
192
 
183
193
  @classmethod
184
194
  def from_yaml(
@@ -244,16 +254,9 @@ class ModelType(BaseType): # pylint: disable=too-many-instance-attributes
244
254
  relative_path = kwargs.pop("relative_path", None)
245
255
  if self.code_model.options["models_mode"] and relative_path:
246
256
  # add import for models in operations file
247
- if self.is_public:
248
- file_import.add_submodule_import(
249
- relative_path, "models", ImportType.LOCAL, alias="_models"
250
- )
251
- else:
252
- file_import.add_submodule_import(
253
- f"{relative_path}models.{self.code_model.models_filename}",
254
- self.name,
255
- ImportType.LOCAL,
256
- )
257
+ file_import.add_submodule_import(
258
+ relative_path, "models", ImportType.LOCAL, alias="_models"
259
+ )
257
260
  if self.code_model.options["models_mode"]:
258
261
  return file_import
259
262
  file_import.add_submodule_import(
@@ -306,6 +306,10 @@ class Parameter(_ParameterBase):
306
306
  def xml_serialization_ctxt(self) -> str:
307
307
  return self.type.xml_serialization_ctxt or ""
308
308
 
309
+ @property
310
+ def is_content_type(self) -> bool:
311
+ return bool(self.rest_api_name) and self.rest_api_name.lower() == "content-type"
312
+
309
313
  @property
310
314
  def method_location(self) -> ParameterMethodLocation:
311
315
  if not self.in_method_signature:
@@ -314,7 +318,7 @@ class Parameter(_ParameterBase):
314
318
  return ParameterMethodLocation.POSITIONAL
315
319
  if self.constant:
316
320
  return ParameterMethodLocation.KWARG
317
- if self.rest_api_name == "Content-Type":
321
+ if self.is_content_type:
318
322
  if self.in_overload:
319
323
  return ParameterMethodLocation.KEYWORD_ONLY
320
324
  return ParameterMethodLocation.KWARG
@@ -110,14 +110,7 @@ class Property(BaseModel): # pylint: disable=too-many-instance-attributes
110
110
  from .model_type import ModelType
111
111
 
112
112
  if isinstance(self.type, ModelType):
113
- is_polymorphic_subtype = (
114
- self.type.discriminator_value and not self.type.discriminated_subtypes
115
- )
116
- if (
117
- self.type.name not in (m.name for m in polymorphic_subtypes)
118
- and is_polymorphic_subtype
119
- ):
120
- polymorphic_subtypes.append(self.type)
113
+ self.type.get_polymorphic_subtypes(polymorphic_subtypes)
121
114
 
122
115
  @property
123
116
  def validation(self) -> Optional[Dict[str, Any]]:
@@ -95,7 +95,7 @@ class RequestBuilderParameter(Parameter):
95
95
  ) -> None:
96
96
  super().__init__(yaml_data, code_model, type)
97
97
  # we don't want any default content type behavior in request builder
98
- if self.rest_api_name == "Content-Type":
98
+ if self.is_content_type:
99
99
  self.client_default_value = None
100
100
  if self.grouped_by and self.client_name[0] == "_":
101
101
  # we don't want hidden parameters for grouped by in request builders
@@ -55,6 +55,10 @@ class Response(BaseModel):
55
55
  self.type = type
56
56
  self.nullable = yaml_data.get("nullable")
57
57
 
58
+ def get_polymorphic_subtypes(self, polymorphic_subtypes: List["ModelType"]) -> None:
59
+ if self.type:
60
+ self.type.get_polymorphic_subtypes(polymorphic_subtypes)
61
+
58
62
  def get_json_template_representation(self) -> Any:
59
63
  if not self.type:
60
64
  return None
@@ -137,6 +141,9 @@ class PagingResponse(Response):
137
141
  super().__init__(*args, **kwargs)
138
142
  self.item_type = self.code_model.lookup_type(id(self.yaml_data["itemType"]))
139
143
 
144
+ def get_polymorphic_subtypes(self, polymorphic_subtypes: List["ModelType"]) -> None:
145
+ return self.item_type.get_polymorphic_subtypes(polymorphic_subtypes)
146
+
140
147
  def get_json_template_representation(self) -> Any:
141
148
  return self.item_type.get_json_template_representation()
142
149
 
@@ -3,11 +3,8 @@
3
3
  # Licensed under the MIT License. See License.txt in the project root for
4
4
  # license information.
5
5
  # --------------------------------------------------------------------------
6
- import re
7
6
  from typing import TypeVar, Dict
8
7
 
9
- JSON_REGEXP = re.compile(r"^(application|text)/(.+\+)?json$")
10
-
11
8
  T = TypeVar("T")
12
9
  OrderedSet = Dict[T, None]
13
10
 
@@ -531,7 +531,7 @@ class _OperationSerializer(
531
531
  polymorphic_subtypes: List[ModelType] = []
532
532
  if not response.type:
533
533
  continue
534
- response.type.get_polymorphic_subtypes(polymorphic_subtypes)
534
+ response.get_polymorphic_subtypes(polymorphic_subtypes)
535
535
  if polymorphic_subtypes:
536
536
  # we just assume one kind of polymorphic body for input
537
537
  discriminator_name = cast(
@@ -691,7 +691,9 @@ class _OperationSerializer(
691
691
  if (
692
692
  not body_param.default_content_type
693
693
  and not next(
694
- p for p in builder.parameters if p.rest_api_name == "Content-Type"
694
+ p
695
+ for p in builder.parameters
696
+ if p.rest_api_name.lower() == "content-type"
695
697
  ).optional
696
698
  ):
697
699
  content_types = "'" + "', '".join(body_param.content_types) + "'"
@@ -706,8 +708,13 @@ class _OperationSerializer(
706
708
  retval.extend(self._serialize_body_parameter(builder))
707
709
  return retval
708
710
 
709
- def _initialize_overloads(self, builder: OperationType) -> List[str]:
711
+ def _initialize_overloads(
712
+ self, builder: OperationType, is_paging: bool = False
713
+ ) -> List[str]:
710
714
  retval: List[str] = []
715
+ # For paging, we put body parameter in local place outside `prepare_request`
716
+ if is_paging:
717
+ return retval
711
718
  same_content_type = (
712
719
  len(
713
720
  set(
@@ -881,6 +888,7 @@ class _OperationSerializer(
881
888
  request_builder: RequestBuilderType,
882
889
  template_url: Optional[str] = None,
883
890
  is_next_request: bool = False,
891
+ is_paging: bool = False,
884
892
  ) -> List[str]:
885
893
  retval = []
886
894
  if builder.parameters.grouped:
@@ -891,7 +899,7 @@ class _OperationSerializer(
891
899
  retval.extend(_serialize_flattened_body(builder.parameters.body_parameter))
892
900
  if builder.overloads:
893
901
  # we are only dealing with two overloads. If there are three, we generate an abstract operation
894
- retval.extend(self._initialize_overloads(builder))
902
+ retval.extend(self._initialize_overloads(builder, is_paging=is_paging))
895
903
  elif builder.parameters.has_body:
896
904
  # non-overloaded body
897
905
  retval.extend(self._create_body_parameter(builder))
@@ -904,8 +912,12 @@ class _OperationSerializer(
904
912
  retval.extend(self._postprocess_http_request(builder, template_url))
905
913
  return retval
906
914
 
907
- def call_request_builder(self, builder: OperationType) -> List[str]:
908
- return self._call_request_builder_helper(builder, builder.request_builder)
915
+ def call_request_builder(
916
+ self, builder: OperationType, is_paging: bool = False
917
+ ) -> List[str]:
918
+ return self._call_request_builder_helper(
919
+ builder, builder.request_builder, is_paging=is_paging
920
+ )
909
921
 
910
922
  def response_headers_and_deserialization(
911
923
  self,
@@ -1114,6 +1126,8 @@ class _PagingOperationSerializer(
1114
1126
  def decorators(self, builder: PagingOperationType) -> List[str]:
1115
1127
  """Decorators for the method"""
1116
1128
  retval: List[str] = []
1129
+ if builder.is_overload:
1130
+ return ["@overload"]
1117
1131
  if self.code_model.options["tracing"] and builder.want_tracing:
1118
1132
  retval.append("@distributed_trace")
1119
1133
  return retval
@@ -1166,10 +1180,14 @@ class _PagingOperationSerializer(
1166
1180
  return retval
1167
1181
 
1168
1182
  def _prepare_request_callback(self, builder: PagingOperationType) -> List[str]:
1169
- retval = ["def prepare_request(next_link=None):"]
1183
+ retval = self._initialize_overloads(builder)
1184
+ retval.append("def prepare_request(next_link=None):")
1170
1185
  retval.append(" if not next_link:")
1171
1186
  retval.extend(
1172
- [f" {line}" for line in self.call_request_builder(builder)]
1187
+ [
1188
+ f" {line}"
1189
+ for line in self.call_request_builder(builder, is_paging=True)
1190
+ ]
1173
1191
  )
1174
1192
  retval.append("")
1175
1193
  retval.append(" else:")
@@ -1,5 +1,8 @@
1
1
  {% import 'operation_tools.jinja2' as op_tools with context %}
2
2
  {# actual template starts here #}
3
+ {% if operation.overloads and operation.public %}
4
+ {{ op_tools.generate_overloads(operation_serializer, operation) }}
5
+ {% endif %}
3
6
  {{ operation_serializer.method_signature_and_response_type_annotation(operation) }}
4
7
  {% if operation.public %}
5
8
  {{ op_tools.description(operation, operation_serializer) | indent }}{% endif %}
@@ -18,4 +21,4 @@
18
21
  {% endif %}
19
22
  {% if not code_model.options["version_tolerant"] %}
20
23
  {{ operation_serializer.get_metadata_url(operation) -}}
21
- {% endif %}
24
+ {% endif %}
@@ -1524,7 +1524,7 @@ class Deserializer(object):
1524
1524
  try:
1525
1525
  return self(target_obj, data, content_type=content_type)
1526
1526
  except:
1527
- _LOGGER.warning(
1527
+ _LOGGER.debug(
1528
1528
  "Ran into a deserialization error. Ignoring since this is failsafe deserialization",
1529
1529
  exc_info=True
1530
1530
  )
@@ -2003,4 +2003,4 @@ class Deserializer(object):
2003
2003
  msg = "Cannot deserialize to unix datetime object."
2004
2004
  raise_with_traceback(DeserializationError, msg, err)
2005
2005
  else:
2006
- return date_obj
2006
+ return date_obj
@@ -6,28 +6,25 @@
6
6
  # --------------------------------------------------------------------------
7
7
  """The modelerfour reformatter autorest plugin.
8
8
  """
9
- import re
10
9
  import copy
11
10
  import logging
12
11
  from typing import Callable, Dict, Any, Iterable, List, Optional, Set
13
12
 
14
- from .._utils import to_snake_case
13
+ from .._utils import (
14
+ to_snake_case,
15
+ KNOWN_TYPES,
16
+ get_body_type_for_description,
17
+ JSON_REGEXP,
18
+ )
15
19
  from .. import YamlUpdatePluginAutorest
16
20
 
17
- JSON_REGEXP = re.compile(r"^(application|text)/(.+\+)?json$")
21
+
18
22
  ORIGINAL_ID_TO_UPDATED_TYPE: Dict[int, Dict[str, Any]] = {}
19
23
  OAUTH_TYPE = "OAuth2"
20
24
  KEY_TYPE = "Key"
21
25
 
22
26
  _LOGGER = logging.getLogger(__name__)
23
27
 
24
- # used if we want to get a string / binary type etc
25
- KNOWN_TYPES: Dict[str, Dict[str, Any]] = {
26
- "string": {"type": "string"},
27
- "binary": {"type": "binary"},
28
- "anydict": {"type": "dict", "elementType": {"type": "any"}},
29
- }
30
-
31
28
 
32
29
  def is_body(yaml_data: Dict[str, Any]) -> bool:
33
30
  """Return true if passed in parameter is a body param"""
@@ -329,14 +326,6 @@ def get_all_body_types(yaml_data: Dict[str, Any]) -> List[Dict[str, Any]]:
329
326
  return list(seen_body_types.values())
330
327
 
331
328
 
332
- def get_body_type_for_description(body_parameter: Dict[str, Any]) -> str:
333
- if body_parameter["type"]["type"] == "binary":
334
- return "binary"
335
- if body_parameter["type"]["type"] == "string":
336
- return "string"
337
- return "JSON"
338
-
339
-
340
329
  def add_lro_information(operation: Dict[str, Any], yaml_data: Dict[str, Any]) -> None:
341
330
  operation["discriminator"] = "lro"
342
331
  extensions = yaml_data["extensions"]
@@ -479,7 +468,7 @@ class M4Reformatter(
479
468
  group_name, yaml_data, body_type, content_types=content_types
480
469
  )
481
470
  for parameter in overload["parameters"]:
482
- if parameter["restApiName"] == "Content-Type":
471
+ if parameter["restApiName"].lower() == "content-type":
483
472
  parameter["clientDefaultValue"] = overload["bodyParameter"][
484
473
  "defaultContentType"
485
474
  ]
@@ -557,23 +546,6 @@ class M4Reformatter(
557
546
  else None
558
547
  )
559
548
  content_types = None
560
- if ( # pylint: disable=too-many-boolean-expressions
561
- body_parameter
562
- and body_parameter["type"]["type"] != "combined"
563
- and yaml_data.get("requestMediaTypes")
564
- and any(
565
- ct for ct in yaml_data["requestMediaTypes"] if JSON_REGEXP.match(ct)
566
- )
567
- and body_parameter["type"]["type"] in ("model", "dict", "list")
568
- and not body_parameter["type"]["xmlMetadata"]
569
- and not body_parameter.get("flattened")
570
- and not body_parameter.get("groupedBy")
571
- ):
572
- combined_type = update_types(
573
- [body_parameter["type"], KNOWN_TYPES["binary"]]
574
- )
575
- body_parameter["type"] = combined_type
576
- content_types = body_parameter["contentTypes"]
577
549
  operation = self._update_operation_helper(group_name, yaml_data, body_parameter)
578
550
  operation["overloads"] = self.update_overloads(
579
551
  group_name, yaml_data, body_parameter, content_types=content_types
@@ -843,7 +815,10 @@ class M4Reformatter(
843
815
  continue
844
816
  if is_body(param):
845
817
  continue
846
- if param["language"]["default"].get("serializedName") == "Content-Type":
818
+ if (
819
+ param["language"]["default"].get("serializedName").lower()
820
+ == "content-type"
821
+ ):
847
822
  param = self._update_content_type_parameter(
848
823
  param,
849
824
  body_parameter,
@@ -15,4 +15,8 @@ try:
15
15
  patch_sdk()
16
16
  except ImportError:
17
17
  pass
18
+
19
+ from ._version import VERSION
20
+
21
+ __version__ = VERSION
18
22
  {% endif %}
@@ -5,13 +5,107 @@
5
5
  # --------------------------------------------------------------------------
6
6
  """The preprocessing autorest plugin.
7
7
  """
8
+ import copy
8
9
  from typing import Callable, Dict, Any, List, Optional
10
+
9
11
  from .._utils import to_snake_case
10
12
  from .helpers import pad_reserved_words, add_redefined_builtin_info
11
13
  from .python_mappings import PadType
12
14
 
13
15
  from .. import YamlUpdatePlugin, YamlUpdatePluginAutorest
14
- from .._utils import parse_args
16
+ from .._utils import parse_args, get_body_type_for_description, JSON_REGEXP, KNOWN_TYPES
17
+
18
+
19
+ def add_body_param_type(code_model: Dict[str, Any], body_parameter: Dict[str, Any]):
20
+ if (
21
+ body_parameter
22
+ and body_parameter["type"]["type"] in ("model", "dict", "list")
23
+ and any(
24
+ ct for ct in body_parameter.get("contentTypes", []) if JSON_REGEXP.match(ct)
25
+ )
26
+ and not body_parameter["type"].get("xmlMetadata")
27
+ and not any(t for t in ["flattened", "groupedBy"] if body_parameter.get(t))
28
+ ):
29
+ body_parameter["type"] = {
30
+ "type": "combined",
31
+ "types": [body_parameter["type"], KNOWN_TYPES["binary"]],
32
+ }
33
+ code_model["types"].append(body_parameter["type"])
34
+
35
+
36
+ def update_overload_section(
37
+ overload: Dict[str, Any],
38
+ yaml_data: Dict[str, Any],
39
+ section: str,
40
+ ):
41
+ for overload_s, original_s in zip(overload[section], yaml_data[section]):
42
+ if overload_s.get("type"):
43
+ overload_s["type"] = original_s["type"]
44
+ if overload_s.get("headers"):
45
+ for overload_h, original_h in zip(
46
+ overload_s["headers"], original_s["headers"]
47
+ ):
48
+ if overload_h.get("type"):
49
+ overload_h["type"] = original_h["type"]
50
+
51
+
52
+ def add_overload(yaml_data: Dict[str, Any], body_type: Dict[str, Any]):
53
+ overload = copy.deepcopy(yaml_data)
54
+ overload["isOverload"] = True
55
+ overload["bodyParameter"]["type"] = body_type
56
+
57
+ overload["overloads"] = []
58
+
59
+ # for yaml sync, we need to make sure all of the responses, parameters, and exceptions' types have the same yaml id
60
+ for overload_p, original_p in zip(overload["parameters"], yaml_data["parameters"]):
61
+ overload_p["type"] = original_p["type"]
62
+ update_overload_section(overload, yaml_data, "responses")
63
+ update_overload_section(overload, yaml_data, "exceptions")
64
+
65
+ # update content type to be an overloads content type
66
+ content_type_param = next(
67
+ p for p in overload["parameters"] if p["restApiName"].lower() == "content-type"
68
+ )
69
+ content_type_param["inOverload"] = True
70
+ content_type_param["inDocstring"] = True
71
+ body_type_description = get_body_type_for_description(overload["bodyParameter"])
72
+ content_type_param[
73
+ "description"
74
+ ] = f"Body Parameter content-type. Content type parameter for {body_type_description} body."
75
+ content_types = yaml_data["bodyParameter"]["contentTypes"]
76
+ if body_type["type"] == "binary" and len(content_types) > 1:
77
+ content_types = "'" + "', '".join(content_types) + "'"
78
+ content_type_param["description"] += f" Known values are: {content_types}."
79
+ return overload
80
+
81
+
82
+ def add_overloads_for_body_param(yaml_data: Dict[str, Any]) -> None:
83
+ """If we added a body parameter type, add overloads for that type"""
84
+ body_parameter = yaml_data["bodyParameter"]
85
+ if not (
86
+ body_parameter["type"]["type"] == "combined"
87
+ and len(yaml_data["bodyParameter"]["type"]["types"])
88
+ > len(yaml_data["overloads"])
89
+ ):
90
+ return
91
+ for body_type in body_parameter["type"]["types"]:
92
+ if any(
93
+ o
94
+ for o in yaml_data["overloads"]
95
+ if id(o["bodyParameter"]["type"]) == id(body_type)
96
+ ):
97
+ continue
98
+ yaml_data["overloads"].append(add_overload(yaml_data, body_type))
99
+ content_type_param = next(
100
+ p for p in yaml_data["parameters"] if p["restApiName"].lower() == "content-type"
101
+ )
102
+ content_type_param["inOverload"] = False
103
+ content_type_param["inOverriden"] = True
104
+ content_type_param["inDocstring"] = True
105
+ content_type_param[
106
+ "clientDefaultValue"
107
+ ] = None # make it none bc it will be overriden, we depend on default of overloads
108
+ content_type_param["optional"] = True
15
109
 
16
110
 
17
111
  def _remove_paging_maxpagesize(yaml_data: Dict[str, Any]) -> None:
@@ -103,7 +197,7 @@ class PreProcessPlugin(YamlUpdatePlugin): # pylint: disable=abstract-method
103
197
 
104
198
  def get_operation_updater(
105
199
  self, yaml_data: Dict[str, Any]
106
- ) -> Callable[[Dict[str, Any]], None]:
200
+ ) -> Callable[[Dict[str, Any], Dict[str, Any]], None]:
107
201
  if yaml_data["discriminator"] == "lropaging":
108
202
  return self.update_lro_paging_operation
109
203
  if yaml_data["discriminator"] == "lro":
@@ -112,7 +206,13 @@ class PreProcessPlugin(YamlUpdatePlugin): # pylint: disable=abstract-method
112
206
  return self.update_paging_operation
113
207
  return self.update_operation
114
208
 
115
- def update_operation(self, yaml_data: Dict[str, Any]) -> None:
209
+ def update_operation(
210
+ self,
211
+ code_model: Dict[str, Any],
212
+ yaml_data: Dict[str, Any],
213
+ *,
214
+ is_overload: bool = False,
215
+ ) -> None:
116
216
  yaml_data["groupName"] = pad_reserved_words(
117
217
  yaml_data["groupName"], PadType.OPERATION_GROUP
118
218
  )
@@ -123,6 +223,7 @@ class PreProcessPlugin(YamlUpdatePlugin): # pylint: disable=abstract-method
123
223
  yaml_data["description"], yaml_data["name"]
124
224
  )
125
225
  yaml_data["summary"] = update_description(yaml_data.get("summary", ""))
226
+ body_parameter = yaml_data.get("bodyParameter")
126
227
  for parameter in yaml_data["parameters"]:
127
228
  update_parameter(parameter)
128
229
  if yaml_data.get("bodyParameter"):
@@ -130,9 +231,13 @@ class PreProcessPlugin(YamlUpdatePlugin): # pylint: disable=abstract-method
130
231
  for entry in yaml_data["bodyParameter"].get("entries", []):
131
232
  update_parameter(entry)
132
233
  for overload in yaml_data.get("overloads", []):
133
- self.update_operation(overload)
234
+ self.update_operation(code_model, overload, is_overload=True)
134
235
  for response in yaml_data.get("responses", []):
135
236
  response["discriminator"] = "operation"
237
+ if body_parameter and not is_overload:
238
+ # if we have a JSON body, we add a binary overload
239
+ add_body_param_type(code_model, body_parameter)
240
+ add_overloads_for_body_param(yaml_data)
136
241
 
137
242
  def _update_lro_operation_helper(self, yaml_data: Dict[str, Any]) -> None:
138
243
  azure_arm = self.options.get("azure-arm", False)
@@ -157,23 +262,38 @@ class PreProcessPlugin(YamlUpdatePlugin): # pylint: disable=abstract-method
157
262
  else "azure.core.polling.async_base_polling.AsyncLROBasePolling"
158
263
  )
159
264
 
160
- def update_lro_paging_operation(self, yaml_data: Dict[str, Any]) -> None:
161
- self.update_lro_operation(yaml_data)
162
- self.update_paging_operation(yaml_data)
265
+ def update_lro_paging_operation(
266
+ self,
267
+ code_model: Dict[str, Any],
268
+ yaml_data: Dict[str, Any],
269
+ is_overload: bool = False,
270
+ ) -> None:
271
+ self.update_lro_operation(code_model, yaml_data, is_overload=is_overload)
272
+ self.update_paging_operation(code_model, yaml_data, is_overload=is_overload)
163
273
  yaml_data["discriminator"] = "lropaging"
164
274
  for response in yaml_data.get("responses", []):
165
275
  response["discriminator"] = "lropaging"
166
276
  for overload in yaml_data.get("overloads", []):
167
- self.update_lro_paging_operation(overload)
277
+ self.update_lro_paging_operation(code_model, overload, is_overload=True)
168
278
 
169
- def update_lro_operation(self, yaml_data: Dict[str, Any]) -> None:
170
- self.update_operation(yaml_data)
279
+ def update_lro_operation(
280
+ self,
281
+ code_model: Dict[str, Any],
282
+ yaml_data: Dict[str, Any],
283
+ is_overload: bool = False,
284
+ ) -> None:
285
+ self.update_operation(code_model, yaml_data, is_overload=is_overload)
171
286
  self._update_lro_operation_helper(yaml_data)
172
287
  for overload in yaml_data.get("overloads", []):
173
288
  self._update_lro_operation_helper(overload)
174
289
 
175
- def update_paging_operation(self, yaml_data: Dict[str, Any]) -> None:
176
- self.update_operation(yaml_data)
290
+ def update_paging_operation(
291
+ self,
292
+ code_model: Dict[str, Any],
293
+ yaml_data: Dict[str, Any],
294
+ is_overload: bool = False,
295
+ ) -> None:
296
+ self.update_operation(code_model, yaml_data, is_overload=is_overload)
177
297
  if not yaml_data.get("pagerSync"):
178
298
  yaml_data["pagerSync"] = "azure.core.paging.ItemPaged"
179
299
  if not yaml_data.get("pagerAsync"):
@@ -208,7 +328,7 @@ class PreProcessPlugin(YamlUpdatePlugin): # pylint: disable=abstract-method
208
328
  update_paging_response(response)
209
329
  response["itemType"] = item_type
210
330
  for overload in yaml_data.get("overloads", []):
211
- self.update_paging_operation(overload)
331
+ self.update_paging_operation(code_model, overload, is_overload=True)
212
332
 
213
333
  def update_operation_groups(self, yaml_data: Dict[str, Any]) -> None:
214
334
  operation_groups_yaml_data = yaml_data["operationGroups"]
@@ -223,13 +343,13 @@ class PreProcessPlugin(YamlUpdatePlugin): # pylint: disable=abstract-method
223
343
  yaml_data, operation_group["className"]
224
344
  )
225
345
  for operation in operation_group["operations"]:
226
- self.get_operation_updater(operation)(operation)
346
+ self.get_operation_updater(operation)(yaml_data, operation)
227
347
 
228
348
  def update_yaml(self, yaml_data: Dict[str, Any]) -> None:
229
349
  """Convert in place the YAML str."""
230
350
  update_client(yaml_data["client"])
231
- update_types(yaml_data["types"])
232
351
  self.update_operation_groups(yaml_data)
352
+ update_types(yaml_data["types"])
233
353
 
234
354
 
235
355
  class PreProcessPluginAutorest(YamlUpdatePluginAutorest, PreProcessPlugin):
package/package.json CHANGED
@@ -1,44 +1,45 @@
1
1
  {
2
- "name": "@autorest/python",
3
- "version": "6.1.1",
4
- "description": "The Python extension for generators in AutoRest.",
5
- "scripts": {
6
- "prepare": "node run-python3.js prepare.py",
7
- "start": "node run-python3.js start.py",
8
- "install": "node run-python3.js install.py",
9
- "debug": "node run-python3.js start.py --debug"
10
- },
11
- "repository": {
12
- "type": "git",
13
- "url": "https://github.com/Azure/autorest.python/tree/autorestv3"
14
- },
15
- "readme": "https://github.com/Azure/autorest.python/blob/autorestv3/README.md",
16
- "keywords": [
17
- "autorest",
18
- "python"
19
- ],
20
- "author": "Microsoft Corporation",
21
- "license": "MIT",
22
- "bugs": {
23
- "url": "https://github.com/Azure/autorest.python/issues"
24
- },
25
- "homepage": "https://github.com/Azure/autorest.python/blob/autorestv3/README.md",
26
- "dependencies": {
27
- "@autorest/system-requirements": "~1.0.0"
28
- },
29
- "devDependencies": {
30
- "@microsoft.azure/autorest.testserver": "^3.3.31"
31
- },
32
- "files": [
33
- "autorest/**/*.py",
34
- "autorest/**/*.jinja2",
35
- "setup.py",
36
- "install.py",
37
- "prepare.py",
38
- "start.py",
39
- "venvtools.py",
40
- "run-python3.js",
41
- "requirements.txt",
42
- "run_cadl.py"
43
- ]
2
+ "name": "@autorest/python",
3
+ "version": "6.1.4",
4
+ "description": "The Python extension for generators in AutoRest.",
5
+ "scripts": {
6
+ "prepare": "node run-python3.js prepare.py",
7
+ "start": "node run-python3.js start.py",
8
+ "install": "node run-python3.js install.py",
9
+ "debug": "node run-python3.js start.py --debug"
10
+ },
11
+ "repository": {
12
+ "type": "git",
13
+ "url": "https://github.com/Azure/autorest.python/tree/autorestv3"
14
+ },
15
+ "readme": "https://github.com/Azure/autorest.python/blob/autorestv3/README.md",
16
+ "keywords": [
17
+ "autorest",
18
+ "python"
19
+ ],
20
+ "author": "Microsoft Corporation",
21
+ "license": "MIT",
22
+ "bugs": {
23
+ "url": "https://github.com/Azure/autorest.python/issues"
24
+ },
25
+ "homepage": "https://github.com/Azure/autorest.python/blob/autorestv3/README.md",
26
+ "dependencies": {
27
+ "@autorest/system-requirements": "~1.0.0"
28
+ },
29
+ "devDependencies": {
30
+ "@microsoft.azure/autorest.testserver": "^3.3.38",
31
+ "typescript": "^4.7.4"
32
+ },
33
+ "files": [
34
+ "autorest/**/*.py",
35
+ "autorest/**/*.jinja2",
36
+ "setup.py",
37
+ "install.py",
38
+ "prepare.py",
39
+ "start.py",
40
+ "venvtools.py",
41
+ "run-python3.js",
42
+ "requirements.txt",
43
+ "run_cadl.py"
44
+ ]
44
45
  }
package/run_cadl.py CHANGED
@@ -5,11 +5,14 @@
5
5
  # --------------------------------------------------------------------------
6
6
  import sys
7
7
  import venv
8
+ import logging
8
9
  from pathlib import Path
9
10
  from venvtools import python_run
10
11
 
11
12
  _ROOT_DIR = Path(__file__).parent
12
13
 
14
+ _LOGGER = logging.getLogger(__name__)
15
+
13
16
  if __name__ == "__main__":
14
17
  venv_path = _ROOT_DIR / "venv"
15
18
  venv_prexists = venv_path.exists()
@@ -19,8 +22,22 @@ if __name__ == "__main__":
19
22
  env_builder = venv.EnvBuilder(with_pip=True)
20
23
  venv_context = env_builder.ensure_directories(venv_path)
21
24
 
25
+ if "--debug" in sys.argv:
26
+ try:
27
+ import debugpy # pylint: disable=import-outside-toplevel
28
+ except ImportError:
29
+ raise SystemExit(
30
+ "Please pip install ptvsd in order to use VSCode debugging"
31
+ )
32
+
33
+ # 5678 is the default attach port in the VS Code debug configurations
34
+ debugpy.listen(("localhost", 5678))
35
+ debugpy.wait_for_client()
36
+ breakpoint() # pylint: disable=undefined-variable
37
+
22
38
  # run m2r
23
39
  python_run(venv_context, "autorest.m2r.__init__", command=sys.argv[1:])
24
40
  python_run(venv_context, "autorest.preprocess.__init__", command=sys.argv[1:])
41
+ python_run(venv_context, "autorest.cadlflags.__init__", command=sys.argv[1:])
25
42
  python_run(venv_context, "autorest.codegen.__init__", command=sys.argv[1:])
26
43
  python_run(venv_context, "autorest.black.__init__", command=sys.argv[1:])