@autorest/python 6.43.0 → 6.44.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. package/generator/build/lib/pygen/codegen/models/__init__.py +2 -0
  2. package/generator/build/lib/pygen/codegen/models/code_model.py +15 -0
  3. package/generator/build/lib/pygen/codegen/models/primitive_types.py +33 -0
  4. package/generator/build/lib/pygen/codegen/models/property.py +1 -0
  5. package/generator/build/lib/pygen/codegen/serializers/builder_serializer.py +1 -2
  6. package/generator/build/lib/pygen/codegen/serializers/general_serializer.py +29 -11
  7. package/generator/build/lib/pygen/codegen/serializers/model_serializer.py +3 -0
  8. package/generator/build/lib/pygen/codegen/templates/model_base.py.jinja2 +64 -1
  9. package/generator/build/lib/pygen/codegen/templates/packaging_templates/pyproject.toml.jinja2 +3 -0
  10. package/generator/build/lib/pygen/codegen/templates/packaging_templates/setup.py.jinja2 +3 -0
  11. package/generator/build/lib/pygen/codegen/templates/utils.py.jinja2 +5 -4
  12. package/generator/build/lib/pygen/preprocess/__init__.py +23 -12
  13. package/generator/dist/pygen-0.1.0-py3-none-any.whl +0 -0
  14. package/generator/pygen/codegen/models/__init__.py +2 -0
  15. package/generator/pygen/codegen/models/code_model.py +15 -0
  16. package/generator/pygen/codegen/models/primitive_types.py +33 -0
  17. package/generator/pygen/codegen/models/property.py +1 -0
  18. package/generator/pygen/codegen/serializers/builder_serializer.py +1 -2
  19. package/generator/pygen/codegen/serializers/general_serializer.py +29 -11
  20. package/generator/pygen/codegen/serializers/model_serializer.py +3 -0
  21. package/generator/pygen/codegen/templates/model_base.py.jinja2 +64 -1
  22. package/generator/pygen/codegen/templates/packaging_templates/pyproject.toml.jinja2 +3 -0
  23. package/generator/pygen/codegen/templates/packaging_templates/setup.py.jinja2 +3 -0
  24. package/generator/pygen/codegen/templates/utils.py.jinja2 +5 -4
  25. package/generator/pygen/preprocess/__init__.py +23 -12
  26. package/package.json +2 -2
  27. package/scripts/__pycache__/venvtools.cpython-310.pyc +0 -0
@@ -31,6 +31,7 @@ from .primitive_types import (
31
31
  SdkCoreType,
32
32
  DecimalType,
33
33
  MultiPartFileType,
34
+ ExternalType,
34
35
  )
35
36
  from .enum_type import EnumType, EnumValue
36
37
  from .base import BaseType
@@ -151,6 +152,7 @@ TYPE_TO_OBJECT = {
151
152
  "credential": StringType,
152
153
  "sdkcore": SdkCoreType,
153
154
  "multipartfile": MultiPartFileType,
155
+ "external": ExternalType,
154
156
  }
155
157
  _LOGGER = logging.getLogger(__name__)
156
158
 
@@ -10,6 +10,7 @@ from .base import BaseType
10
10
  from .enum_type import EnumType
11
11
  from .model_type import ModelType, UsageFlags
12
12
  from .combined_type import CombinedType
13
+ from .primitive_types import ExternalType
13
14
  from .client import Client
14
15
  from .request_builder import RequestBuilder, OverloadedRequestBuilder
15
16
  from .operation_group import OperationGroup
@@ -101,6 +102,7 @@ class CodeModel: # pylint: disable=too-many-public-methods, disable=too-many-in
101
102
  self._operations_folder_name: dict[str, str] = {}
102
103
  self._relative_import_path: dict[str, str] = {}
103
104
  self.metadata: dict[str, Any] = yaml_data.get("metadata", {})
105
+ self.has_external_type = any(isinstance(t, ExternalType) for t in self.types_map.values())
104
106
 
105
107
  @staticmethod
106
108
  def get_imported_namespace_for_client(imported_namespace: str, async_mode: bool = False) -> str:
@@ -488,3 +490,16 @@ class CodeModel: # pylint: disable=too-many-public-methods, disable=too-many-in
488
490
  @property
489
491
  def has_operation_named_list(self) -> bool:
490
492
  return any(o.name.lower() == "list" for c in self.clients for og in c.operation_groups for o in og.operations)
493
+
494
+ @property
495
+ def has_padded_model_property(self) -> bool:
496
+ for model_type in self.model_types:
497
+ for prop in model_type.properties:
498
+ if prop.original_tsp_name:
499
+ return True
500
+ return False
501
+
502
+ @property
503
+ def external_types(self) -> list[ExternalType]:
504
+ """All of the external types"""
505
+ return [t for t in self.types_map.values() if isinstance(t, ExternalType)]
@@ -615,6 +615,39 @@ class SdkCoreType(PrimitiveType):
615
615
  return self.name
616
616
 
617
617
 
618
+ class ExternalType(PrimitiveType):
619
+ def __init__(self, yaml_data: dict[str, Any], code_model: "CodeModel") -> None:
620
+ super().__init__(yaml_data=yaml_data, code_model=code_model)
621
+ external_type_info = yaml_data.get("externalTypeInfo", {})
622
+ self.identity = external_type_info.get("identity", "")
623
+ self.submodule = ".".join(self.identity.split(".")[:-1])
624
+ self.min_version = external_type_info.get("minVersion", "")
625
+ self.package_name = external_type_info.get("package", "")
626
+
627
+ def docstring_type(self, **kwargs: Any) -> str:
628
+ return f"~{self.identity}"
629
+
630
+ def type_annotation(self, **kwargs: Any) -> str:
631
+ return self.identity
632
+
633
+ def imports(self, **kwargs: Any) -> FileImport:
634
+ file_import = super().imports(**kwargs)
635
+ file_import.add_import(self.submodule, ImportType.THIRDPARTY, TypingSection.REGULAR)
636
+ return file_import
637
+
638
+ @property
639
+ def instance_check_template(self) -> str:
640
+ return f"isinstance({{}}, {self.identity})"
641
+
642
+ def serialization_type(self, **kwargs: Any) -> str:
643
+ return self.identity
644
+
645
+ @property
646
+ def default_template_representation_declaration(self) -> str:
647
+ value = f"{self.identity}(...)"
648
+ return f'"{value}"' if self.code_model.for_test else value
649
+
650
+
618
651
  class MultiPartFileType(PrimitiveType):
619
652
  def __init__(self, yaml_data: dict[str, Any], code_model: "CodeModel") -> None:
620
653
  super().__init__(yaml_data=yaml_data, code_model=code_model)
@@ -39,6 +39,7 @@ class Property(BaseModel): # pylint: disable=too-many-instance-attributes
39
39
  self.flattened_names: list[str] = yaml_data.get("flattenedNames", [])
40
40
  self.is_multipart_file_input: bool = yaml_data.get("isMultipartFileInput", False)
41
41
  self.flatten = self.yaml_data.get("flatten", False) and not getattr(self.type, "flattened_property", False)
42
+ self.original_tsp_name: Optional[str] = self.yaml_data.get("originalTspName")
42
43
 
43
44
  def pylint_disable(self) -> str:
44
45
  retval: str = ""
@@ -679,7 +679,7 @@ class _OperationSerializer(_BuilderBaseSerializer[OperationType]):
679
679
  ")",
680
680
  f"_file_fields: list[str] = {file_fields}",
681
681
  f"_data_fields: list[str] = {data_fields}",
682
- "_files, _data = prepare_multipart_form_data(_body, _file_fields, _data_fields)",
682
+ "_files = prepare_multipart_form_data(_body, _file_fields, _data_fields)",
683
683
  ]
684
684
  )
685
685
  return retval
@@ -861,7 +861,6 @@ class _OperationSerializer(_BuilderBaseSerializer[OperationType]):
861
861
  retval.append(f" {client_name}=_{client_name},")
862
862
  elif request_builder.has_form_data_body:
863
863
  retval.append(" files=_files,")
864
- retval.append(" data=_data,")
865
864
  elif request_builder.overloads:
866
865
  seen_body_params = set()
867
866
  for overload in request_builder.overloads:
@@ -23,7 +23,7 @@ VERSION_MAP = {
23
23
  "msrest": "0.7.1",
24
24
  "isodate": "0.6.1",
25
25
  "azure-mgmt-core": "1.6.0",
26
- "azure-core": "1.35.0",
26
+ "azure-core": "1.36.0",
27
27
  "typing-extensions": "4.6.0",
28
28
  "corehttp": "1.0.0b6",
29
29
  }
@@ -57,7 +57,16 @@ class GeneralSerializer(BaseSerializer):
57
57
  m = re.search(r"[>=]=?([\d.]+(?:[a-z]+\d+)?)", s)
58
58
  return parse_version(m.group(1)) if m else parse_version("0")
59
59
 
60
- def _keep_pyproject_fields(self, file_content: str) -> dict:
60
+ def _update_version_map(self, version_map: dict[str, str], dep_name: str, dep: str) -> None:
61
+ # For tracked dependencies, check if the version is higher than our default
62
+ default_version = parse_version(version_map[dep_name])
63
+ dep_version = self._extract_min_dependency(dep)
64
+ # If the version is higher than the default, update VERSION_MAP
65
+ # with higher min dependency version
66
+ if dep_version > default_version:
67
+ version_map[dep_name] = str(dep_version)
68
+
69
+ def external_lib_version_map(self, file_content: str, additional_version_map: dict[str, str]) -> dict:
61
70
  # Load the pyproject.toml file if it exists and extract fields to keep.
62
71
  result: dict = {"KEEP_FIELDS": {}}
63
72
  try:
@@ -80,15 +89,11 @@ class GeneralSerializer(BaseSerializer):
80
89
  for dep in loaded_pyproject_toml["project"]["dependencies"]:
81
90
  dep_name = re.split(r"[<>=\[]", dep)[0].strip()
82
91
 
83
- # Check if dependency is one we track in VERSION_MAP
92
+ # Check if dependency is one we track in version map
84
93
  if dep_name in VERSION_MAP:
85
- # For tracked dependencies, check if the version is higher than our default
86
- default_version = parse_version(VERSION_MAP[dep_name])
87
- dep_version = self._extract_min_dependency(dep)
88
- # If the version is higher than the default, update VERSION_MAP
89
- # with higher min dependency version
90
- if dep_version > default_version:
91
- VERSION_MAP[dep_name] = str(dep_version)
94
+ self._update_version_map(VERSION_MAP, dep_name, dep)
95
+ elif dep_name in additional_version_map:
96
+ self._update_version_map(additional_version_map, dep_name, dep)
92
97
  else:
93
98
  # Keep non-default dependencies
94
99
  kept_deps.append(dep)
@@ -107,9 +112,20 @@ class GeneralSerializer(BaseSerializer):
107
112
  def serialize_package_file(self, template_name: str, file_content: str, **kwargs: Any) -> str:
108
113
  template = self.env.get_template(template_name)
109
114
 
115
+ additional_version_map = {}
116
+ if self.code_model.has_external_type:
117
+ for item in self.code_model.external_types:
118
+ if item.package_name:
119
+ if item.min_version:
120
+ additional_version_map[item.package_name] = item.min_version
121
+ else:
122
+ # Use "0" as a placeholder when min_version is not specified for external types.
123
+ # This allows the dependency to be included without a specific version constraint.
124
+ additional_version_map[item.package_name] = "0"
125
+
110
126
  # Add fields to keep from an existing pyproject.toml
111
127
  if template_name == "pyproject.toml.jinja2":
112
- params = self._keep_pyproject_fields(file_content)
128
+ params = self.external_lib_version_map(file_content, additional_version_map)
113
129
  else:
114
130
  params = {}
115
131
 
@@ -126,6 +142,7 @@ class GeneralSerializer(BaseSerializer):
126
142
  dev_status = "4 - Beta"
127
143
  else:
128
144
  dev_status = "5 - Production/Stable"
145
+
129
146
  params |= {
130
147
  "code_model": self.code_model,
131
148
  "dev_status": dev_status,
@@ -136,6 +153,7 @@ class GeneralSerializer(BaseSerializer):
136
153
  "VERSION_MAP": VERSION_MAP,
137
154
  "MIN_PYTHON_VERSION": MIN_PYTHON_VERSION,
138
155
  "MAX_PYTHON_VERSION": MAX_PYTHON_VERSION,
156
+ "ADDITIONAL_DEPENDENCIES": [f"{item[0]}>={item[1]}" for item in additional_version_map.items()],
139
157
  }
140
158
  params |= {"options": self.code_model.options}
141
159
  params |= kwargs
@@ -333,6 +333,9 @@ class DpgModelSerializer(_ModelSerializer):
333
333
  if prop.xml_metadata:
334
334
  args.append(f"xml={prop.xml_metadata}")
335
335
 
336
+ if prop.original_tsp_name:
337
+ args.append(f'original_tsp_name="{prop.original_tsp_name}"')
338
+
336
339
  field = "rest_discriminator" if prop.is_discriminator else "rest_field"
337
340
  type_ignore = (
338
341
  " # type: ignore"
@@ -25,6 +25,9 @@ from {{ code_model.core_library }}.exceptions import DeserializationError
25
25
  from {{ code_model.core_library }}{{ "" if code_model.is_azure_flavor else ".utils" }} import CaseInsensitiveEnumMeta
26
26
  from {{ code_model.core_library }}.{{ "" if code_model.is_azure_flavor else "runtime." }}pipeline import PipelineResponse
27
27
  from {{ code_model.core_library }}.serialization import _Null
28
+ {% if code_model.has_external_type %}
29
+ from {{ code_model.core_library }}.serialization import TypeHandlerRegistry
30
+ {% endif %}
28
31
  from {{ code_model.core_library }}.rest import HttpResponse
29
32
 
30
33
  _LOGGER = logging.getLogger(__name__)
@@ -34,6 +37,10 @@ __all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"]
34
37
  TZ_UTC = timezone.utc
35
38
  _T = typing.TypeVar("_T")
36
39
 
40
+ {% if code_model.has_external_type %}
41
+ TYPE_HANDLER_REGISTRY = TypeHandlerRegistry()
42
+ {% endif %}
43
+
37
44
 
38
45
  def _timedelta_as_isostr(td: timedelta) -> str:
39
46
  """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S'
@@ -158,6 +165,11 @@ class SdkJSONEncoder(JSONEncoder):
158
165
  except AttributeError:
159
166
  # This will be raised when it hits value.total_seconds in the method above
160
167
  pass
168
+ {% if code_model.has_external_type %}
169
+ custom_serializer = TYPE_HANDLER_REGISTRY.get_serializer(o)
170
+ if custom_serializer:
171
+ return custom_serializer(o)
172
+ {% endif %}
161
173
  return super(SdkJSONEncoder, self).default(o)
162
174
 
163
175
 
@@ -313,7 +325,13 @@ def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] =
313
325
  return _deserialize_int_as_str
314
326
  if rf and rf._format:
315
327
  return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format)
328
+ {% if code_model.has_external_type %}
329
+ if _DESERIALIZE_MAPPING.get(annotation): # pyright: ignore
330
+ return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore
331
+ return TYPE_HANDLER_REGISTRY.get_deserializer(annotation) # pyright: ignore
332
+ {% else %}
316
333
  return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore
334
+ {% endif %}
317
335
 
318
336
 
319
337
  def _get_type_alias_type(module_name: str, alias_name: str):
@@ -507,6 +525,14 @@ def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-m
507
525
  except AttributeError:
508
526
  # This will be raised when it hits value.total_seconds in the method above
509
527
  pass
528
+ {% if code_model.has_external_type %}
529
+
530
+ # Check if there's a custom serializer for the type
531
+ custom_serializer = TYPE_HANDLER_REGISTRY.get_serializer(o)
532
+ if custom_serializer:
533
+ return custom_serializer(o)
534
+
535
+ {% endif %}
510
536
  return o
511
537
 
512
538
 
@@ -636,6 +662,12 @@ class Model(_MyMutableMapping):
636
662
  if not rf._rest_name_input:
637
663
  rf._rest_name_input = attr
638
664
  cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items())
665
+ {% if code_model.has_padded_model_property %}
666
+ cls._backcompat_attr_to_rest_field: dict[str, _RestField] = {
667
+ Model._get_backcompat_attribute_name(cls._attr_to_rest_field, attr): rf for attr, rf in cls
668
+ ._attr_to_rest_field.items()
669
+ }
670
+ {% endif %}
639
671
  cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}")
640
672
 
641
673
  return super().__new__(cls)
@@ -645,6 +677,18 @@ class Model(_MyMutableMapping):
645
677
  if hasattr(base, "__mapping__"):
646
678
  base.__mapping__[discriminator or cls.__name__] = cls # type: ignore
647
679
 
680
+ {% if code_model.has_padded_model_property %}
681
+ @classmethod
682
+ def _get_backcompat_attribute_name(cls, attr_to_rest_field: dict[str, "_RestField"], attr_name: str) -> str:
683
+ rest_field_obj = attr_to_rest_field.get(attr_name) # pylint: disable=protected-access
684
+ if rest_field_obj is None:
685
+ return attr_name
686
+ original_tsp_name = getattr(rest_field_obj, "_original_tsp_name", None) # pylint: disable=protected-access
687
+ if original_tsp_name:
688
+ return original_tsp_name
689
+ return attr_name
690
+ {% endif %}
691
+
648
692
  @classmethod
649
693
  def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]:
650
694
  for v in cls.__dict__.values():
@@ -971,6 +1015,9 @@ def _failsafe_deserialize_xml(
971
1015
  return None
972
1016
 
973
1017
 
1018
+ {% if code_model.has_padded_model_property %}
1019
+ # pylint: disable=too-many-instance-attributes
1020
+ {% endif %}
974
1021
  class _RestField:
975
1022
  def __init__(
976
1023
  self,
@@ -983,6 +1030,9 @@ class _RestField:
983
1030
  format: typing.Optional[str] = None,
984
1031
  is_multipart_file_input: bool = False,
985
1032
  xml: typing.Optional[dict[str, typing.Any]] = None,
1033
+ {% if code_model.has_padded_model_property %}
1034
+ original_tsp_name: typing.Optional[str] = None,
1035
+ {% endif %}
986
1036
  ):
987
1037
  self._type = type
988
1038
  self._rest_name_input = name
@@ -994,10 +1044,17 @@ class _RestField:
994
1044
  self._format = format
995
1045
  self._is_multipart_file_input = is_multipart_file_input
996
1046
  self._xml = xml if xml is not None else {}
1047
+ {% if code_model.has_padded_model_property %}
1048
+ self._original_tsp_name = original_tsp_name
1049
+ {% endif %}
997
1050
 
998
1051
  @property
999
1052
  def _class_type(self) -> typing.Any:
1000
- return getattr(self._type, "args", [None])[0]
1053
+ result = getattr(self._type, "args", [None])[0]
1054
+ # type may be wrapped by nested functools.partial so we need to check for that
1055
+ if isinstance(result, functools.partial):
1056
+ return getattr(result, "args", [None])[0]
1057
+ return result
1001
1058
 
1002
1059
  @property
1003
1060
  def _rest_name(self) -> str:
@@ -1045,6 +1102,9 @@ def rest_field(
1045
1102
  format: typing.Optional[str] = None,
1046
1103
  is_multipart_file_input: bool = False,
1047
1104
  xml: typing.Optional[dict[str, typing.Any]] = None,
1105
+ {% if code_model.has_padded_model_property %}
1106
+ original_tsp_name: typing.Optional[str] = None,
1107
+ {% endif %}
1048
1108
  ) -> typing.Any:
1049
1109
  return _RestField(
1050
1110
  name=name,
@@ -1054,6 +1114,9 @@ def rest_field(
1054
1114
  format=format,
1055
1115
  is_multipart_file_input=is_multipart_file_input,
1056
1116
  xml=xml,
1117
+ {% if code_model.has_padded_model_property %}
1118
+ original_tsp_name=original_tsp_name,
1119
+ {% endif %}
1057
1120
  )
1058
1121
 
1059
1122
 
@@ -56,6 +56,9 @@ dependencies = [
56
56
  "{{ dep }}",
57
57
  {% endfor %}
58
58
  {% endif %}
59
+ {% for dep in ADDITIONAL_DEPENDENCIES %}
60
+ "{{ dep }}",
61
+ {% endfor %}
59
62
  ]
60
63
  dynamic = [
61
64
  {% if options.get('package-mode') %}"version", {% endif %}"readme"
@@ -108,6 +108,9 @@ setup(
108
108
  "corehttp[requests]>={{ VERSION_MAP["corehttp"] }}",
109
109
  {% endif %}
110
110
  "typing-extensions>={{ VERSION_MAP['typing-extensions'] }}",
111
+ {% for dep in ADDITIONAL_DEPENDENCIES %}
112
+ {{ dep }},
113
+ {% endfor %}
111
114
  ],
112
115
  {% if options["package-mode"] %}
113
116
  python_requires=">={{ MIN_PYTHON_VERSION }}",
@@ -78,9 +78,8 @@ def serialize_multipart_data_entry(data_entry: Any) -> Any:
78
78
 
79
79
  def prepare_multipart_form_data(
80
80
  body: Mapping[str, Any], multipart_fields: list[str], data_fields: list[str]
81
- ) -> tuple[list[FileType], dict[str, Any]]:
81
+ ) -> list[FileType]:
82
82
  files: list[FileType] = []
83
- data: dict[str, Any] = {}
84
83
  for multipart_field in multipart_fields:
85
84
  multipart_entry = body.get(multipart_field)
86
85
  if isinstance(multipart_entry, list):
@@ -88,10 +87,12 @@ def prepare_multipart_form_data(
88
87
  elif multipart_entry:
89
88
  files.append((multipart_field, multipart_entry))
90
89
 
90
+ # if files is empty, sdk core library can't handle multipart/form-data correctly, so
91
+ # we put data fields into files with filename as None to avoid that scenario.
91
92
  for data_field in data_fields:
92
93
  data_entry = body.get(data_field)
93
94
  if data_entry:
94
- data[data_field] = serialize_multipart_data_entry(data_entry)
95
+ files.append((data_field, str(serialize_multipart_data_entry(data_entry))))
95
96
 
96
- return files, data
97
+ return files
97
98
  {% endif %}
@@ -236,7 +236,7 @@ class PreProcessPlugin(YamlUpdatePlugin):
236
236
  body_parameter["type"]["types"].insert(1, any_obj_list_or_dict)
237
237
  code_model["types"].append(body_parameter["type"])
238
238
 
239
- def pad_reserved_words(self, name: str, pad_type: PadType):
239
+ def pad_reserved_words(self, name: str, pad_type: PadType, yaml_type: dict[str, Any]) -> str:
240
240
  # we want to pad hidden variables as well
241
241
  if not name:
242
242
  # we'll pass in empty operation groups sometime etc.
@@ -250,6 +250,10 @@ class PreProcessPlugin(YamlUpdatePlugin):
250
250
  name_prefix = "_" if name[0] == "_" else ""
251
251
  name = name[1:] if name[0] == "_" else name
252
252
  if name.lower() in reserved_words[pad_type]:
253
+ if self.is_tsp and name.lower() in TSP_RESERVED_WORDS.get(pad_type, []):
254
+ # to maintain backcompat for cases where we pad in tsp but not in autorest,
255
+ # if we have a tsp reserved word, we also want to keep track of the original name for backcompat
256
+ yaml_type["originalTspName"] = name_prefix + name
253
257
  return name_prefix + name + pad_type
254
258
  return name_prefix + name
255
259
 
@@ -257,11 +261,13 @@ class PreProcessPlugin(YamlUpdatePlugin):
257
261
  for type in yaml_data:
258
262
  for property in type.get("properties", []):
259
263
  property["description"] = update_description(property.get("description", ""))
260
- property["clientName"] = self.pad_reserved_words(property["clientName"].lower(), PadType.PROPERTY)
264
+ property["clientName"] = self.pad_reserved_words(
265
+ property["clientName"].lower(), PadType.PROPERTY, property
266
+ )
261
267
  add_redefined_builtin_info(property["clientName"], property)
262
268
  if type.get("name"):
263
269
  pad_type = PadType.MODEL if type["type"] == "model" else PadType.ENUM_CLASS
264
- name = self.pad_reserved_words(type["name"], pad_type)
270
+ name = self.pad_reserved_words(type["name"], pad_type, type)
265
271
  type["name"] = name[0].upper() + name[1:]
266
272
  type["description"] = update_description(type.get("description", ""), type["name"])
267
273
  type["snakeCaseName"] = to_snake_case(type["name"])
@@ -269,7 +275,7 @@ class PreProcessPlugin(YamlUpdatePlugin):
269
275
  # we're enums
270
276
  values_to_add = []
271
277
  for value in type["values"]:
272
- padded_name = self.pad_reserved_words(value["name"].lower(), PadType.ENUM_VALUE).upper()
278
+ padded_name = self.pad_reserved_words(value["name"].lower(), PadType.ENUM_VALUE, value).upper()
273
279
  if self.version_tolerant:
274
280
  if padded_name[0] in "0123456789":
275
281
  padded_name = "ENUM_" + padded_name
@@ -364,12 +370,14 @@ class PreProcessPlugin(YamlUpdatePlugin):
364
370
  def update_parameter(self, yaml_data: dict[str, Any]) -> None:
365
371
  yaml_data["description"] = update_description(yaml_data.get("description", ""))
366
372
  if not (yaml_data["location"] == "header" and yaml_data["clientName"] in ("content_type", "accept")):
367
- yaml_data["clientName"] = self.pad_reserved_words(yaml_data["clientName"].lower(), PadType.PARAMETER)
373
+ yaml_data["clientName"] = self.pad_reserved_words(
374
+ yaml_data["clientName"].lower(), PadType.PARAMETER, yaml_data
375
+ )
368
376
  if yaml_data.get("propertyToParameterName"):
369
377
  # need to create a new one with padded keys and values
370
378
  yaml_data["propertyToParameterName"] = {
371
- self.pad_reserved_words(prop, PadType.PROPERTY): self.pad_reserved_words(
372
- param_name, PadType.PARAMETER
379
+ self.pad_reserved_words(prop, PadType.PROPERTY, yaml_data): self.pad_reserved_words(
380
+ param_name, PadType.PARAMETER, yaml_data
373
381
  ).lower()
374
382
  for prop, param_name in yaml_data["propertyToParameterName"].items()
375
383
  }
@@ -390,15 +398,17 @@ class PreProcessPlugin(YamlUpdatePlugin):
390
398
  *,
391
399
  is_overload: bool = False,
392
400
  ) -> None:
393
- yaml_data["groupName"] = self.pad_reserved_words(yaml_data["groupName"], PadType.OPERATION_GROUP)
401
+ yaml_data["groupName"] = self.pad_reserved_words(yaml_data["groupName"], PadType.OPERATION_GROUP, yaml_data)
394
402
  yaml_data["groupName"] = to_snake_case(yaml_data["groupName"])
395
403
  yaml_data["name"] = yaml_data["name"].lower()
396
404
  if yaml_data.get("isLroInitialOperation") is True:
397
405
  yaml_data["name"] = (
398
- "_" + self.pad_reserved_words(extract_original_name(yaml_data["name"]), PadType.METHOD) + "_initial"
406
+ "_"
407
+ + self.pad_reserved_words(extract_original_name(yaml_data["name"]), PadType.METHOD, yaml_data)
408
+ + "_initial"
399
409
  )
400
410
  else:
401
- yaml_data["name"] = self.pad_reserved_words(yaml_data["name"], PadType.METHOD)
411
+ yaml_data["name"] = self.pad_reserved_words(yaml_data["name"], PadType.METHOD, yaml_data)
402
412
  yaml_data["description"] = update_description(yaml_data["description"], yaml_data["name"])
403
413
  yaml_data["summary"] = update_description(yaml_data.get("summary", ""))
404
414
  body_parameter = yaml_data.get("bodyParameter")
@@ -485,7 +495,7 @@ class PreProcessPlugin(YamlUpdatePlugin):
485
495
  item_type = item_type or yaml_data["itemType"]["elementType"]
486
496
  if yaml_data.get("nextOperation"):
487
497
  yaml_data["nextOperation"]["groupName"] = self.pad_reserved_words(
488
- yaml_data["nextOperation"]["groupName"], PadType.OPERATION_GROUP
498
+ yaml_data["nextOperation"]["groupName"], PadType.OPERATION_GROUP, yaml_data["nextOperation"]
489
499
  )
490
500
  yaml_data["nextOperation"]["groupName"] = to_snake_case(yaml_data["nextOperation"]["groupName"])
491
501
  for response in yaml_data["nextOperation"].get("responses", []):
@@ -503,10 +513,11 @@ class PreProcessPlugin(YamlUpdatePlugin):
503
513
  operation_group["identifyName"] = self.pad_reserved_words(
504
514
  operation_group.get("name", operation_group["propertyName"]),
505
515
  PadType.OPERATION_GROUP,
516
+ operation_group,
506
517
  )
507
518
  operation_group["identifyName"] = to_snake_case(operation_group["identifyName"])
508
519
  operation_group["propertyName"] = self.pad_reserved_words(
509
- operation_group["propertyName"], PadType.OPERATION_GROUP
520
+ operation_group["propertyName"], PadType.OPERATION_GROUP, operation_group
510
521
  )
511
522
  operation_group["propertyName"] = to_snake_case(operation_group["propertyName"])
512
523
  operation_group["className"] = update_operation_group_class_name(
@@ -31,6 +31,7 @@ from .primitive_types import (
31
31
  SdkCoreType,
32
32
  DecimalType,
33
33
  MultiPartFileType,
34
+ ExternalType,
34
35
  )
35
36
  from .enum_type import EnumType, EnumValue
36
37
  from .base import BaseType
@@ -151,6 +152,7 @@ TYPE_TO_OBJECT = {
151
152
  "credential": StringType,
152
153
  "sdkcore": SdkCoreType,
153
154
  "multipartfile": MultiPartFileType,
155
+ "external": ExternalType,
154
156
  }
155
157
  _LOGGER = logging.getLogger(__name__)
156
158
 
@@ -10,6 +10,7 @@ from .base import BaseType
10
10
  from .enum_type import EnumType
11
11
  from .model_type import ModelType, UsageFlags
12
12
  from .combined_type import CombinedType
13
+ from .primitive_types import ExternalType
13
14
  from .client import Client
14
15
  from .request_builder import RequestBuilder, OverloadedRequestBuilder
15
16
  from .operation_group import OperationGroup
@@ -101,6 +102,7 @@ class CodeModel: # pylint: disable=too-many-public-methods, disable=too-many-in
101
102
  self._operations_folder_name: dict[str, str] = {}
102
103
  self._relative_import_path: dict[str, str] = {}
103
104
  self.metadata: dict[str, Any] = yaml_data.get("metadata", {})
105
+ self.has_external_type = any(isinstance(t, ExternalType) for t in self.types_map.values())
104
106
 
105
107
  @staticmethod
106
108
  def get_imported_namespace_for_client(imported_namespace: str, async_mode: bool = False) -> str:
@@ -488,3 +490,16 @@ class CodeModel: # pylint: disable=too-many-public-methods, disable=too-many-in
488
490
  @property
489
491
  def has_operation_named_list(self) -> bool:
490
492
  return any(o.name.lower() == "list" for c in self.clients for og in c.operation_groups for o in og.operations)
493
+
494
+ @property
495
+ def has_padded_model_property(self) -> bool:
496
+ for model_type in self.model_types:
497
+ for prop in model_type.properties:
498
+ if prop.original_tsp_name:
499
+ return True
500
+ return False
501
+
502
+ @property
503
+ def external_types(self) -> list[ExternalType]:
504
+ """All of the external types"""
505
+ return [t for t in self.types_map.values() if isinstance(t, ExternalType)]
@@ -615,6 +615,39 @@ class SdkCoreType(PrimitiveType):
615
615
  return self.name
616
616
 
617
617
 
618
+ class ExternalType(PrimitiveType):
619
+ def __init__(self, yaml_data: dict[str, Any], code_model: "CodeModel") -> None:
620
+ super().__init__(yaml_data=yaml_data, code_model=code_model)
621
+ external_type_info = yaml_data.get("externalTypeInfo", {})
622
+ self.identity = external_type_info.get("identity", "")
623
+ self.submodule = ".".join(self.identity.split(".")[:-1])
624
+ self.min_version = external_type_info.get("minVersion", "")
625
+ self.package_name = external_type_info.get("package", "")
626
+
627
+ def docstring_type(self, **kwargs: Any) -> str:
628
+ return f"~{self.identity}"
629
+
630
+ def type_annotation(self, **kwargs: Any) -> str:
631
+ return self.identity
632
+
633
+ def imports(self, **kwargs: Any) -> FileImport:
634
+ file_import = super().imports(**kwargs)
635
+ file_import.add_import(self.submodule, ImportType.THIRDPARTY, TypingSection.REGULAR)
636
+ return file_import
637
+
638
+ @property
639
+ def instance_check_template(self) -> str:
640
+ return f"isinstance({{}}, {self.identity})"
641
+
642
+ def serialization_type(self, **kwargs: Any) -> str:
643
+ return self.identity
644
+
645
+ @property
646
+ def default_template_representation_declaration(self) -> str:
647
+ value = f"{self.identity}(...)"
648
+ return f'"{value}"' if self.code_model.for_test else value
649
+
650
+
618
651
  class MultiPartFileType(PrimitiveType):
619
652
  def __init__(self, yaml_data: dict[str, Any], code_model: "CodeModel") -> None:
620
653
  super().__init__(yaml_data=yaml_data, code_model=code_model)
@@ -39,6 +39,7 @@ class Property(BaseModel): # pylint: disable=too-many-instance-attributes
39
39
  self.flattened_names: list[str] = yaml_data.get("flattenedNames", [])
40
40
  self.is_multipart_file_input: bool = yaml_data.get("isMultipartFileInput", False)
41
41
  self.flatten = self.yaml_data.get("flatten", False) and not getattr(self.type, "flattened_property", False)
42
+ self.original_tsp_name: Optional[str] = self.yaml_data.get("originalTspName")
42
43
 
43
44
  def pylint_disable(self) -> str:
44
45
  retval: str = ""
@@ -679,7 +679,7 @@ class _OperationSerializer(_BuilderBaseSerializer[OperationType]):
679
679
  ")",
680
680
  f"_file_fields: list[str] = {file_fields}",
681
681
  f"_data_fields: list[str] = {data_fields}",
682
- "_files, _data = prepare_multipart_form_data(_body, _file_fields, _data_fields)",
682
+ "_files = prepare_multipart_form_data(_body, _file_fields, _data_fields)",
683
683
  ]
684
684
  )
685
685
  return retval
@@ -861,7 +861,6 @@ class _OperationSerializer(_BuilderBaseSerializer[OperationType]):
861
861
  retval.append(f" {client_name}=_{client_name},")
862
862
  elif request_builder.has_form_data_body:
863
863
  retval.append(" files=_files,")
864
- retval.append(" data=_data,")
865
864
  elif request_builder.overloads:
866
865
  seen_body_params = set()
867
866
  for overload in request_builder.overloads:
@@ -23,7 +23,7 @@ VERSION_MAP = {
23
23
  "msrest": "0.7.1",
24
24
  "isodate": "0.6.1",
25
25
  "azure-mgmt-core": "1.6.0",
26
- "azure-core": "1.35.0",
26
+ "azure-core": "1.36.0",
27
27
  "typing-extensions": "4.6.0",
28
28
  "corehttp": "1.0.0b6",
29
29
  }
@@ -57,7 +57,16 @@ class GeneralSerializer(BaseSerializer):
57
57
  m = re.search(r"[>=]=?([\d.]+(?:[a-z]+\d+)?)", s)
58
58
  return parse_version(m.group(1)) if m else parse_version("0")
59
59
 
60
- def _keep_pyproject_fields(self, file_content: str) -> dict:
60
+ def _update_version_map(self, version_map: dict[str, str], dep_name: str, dep: str) -> None:
61
+ # For tracked dependencies, check if the version is higher than our default
62
+ default_version = parse_version(version_map[dep_name])
63
+ dep_version = self._extract_min_dependency(dep)
64
+ # If the version is higher than the default, update VERSION_MAP
65
+ # with higher min dependency version
66
+ if dep_version > default_version:
67
+ version_map[dep_name] = str(dep_version)
68
+
69
+ def external_lib_version_map(self, file_content: str, additional_version_map: dict[str, str]) -> dict:
61
70
  # Load the pyproject.toml file if it exists and extract fields to keep.
62
71
  result: dict = {"KEEP_FIELDS": {}}
63
72
  try:
@@ -80,15 +89,11 @@ class GeneralSerializer(BaseSerializer):
80
89
  for dep in loaded_pyproject_toml["project"]["dependencies"]:
81
90
  dep_name = re.split(r"[<>=\[]", dep)[0].strip()
82
91
 
83
- # Check if dependency is one we track in VERSION_MAP
92
+ # Check if dependency is one we track in version map
84
93
  if dep_name in VERSION_MAP:
85
- # For tracked dependencies, check if the version is higher than our default
86
- default_version = parse_version(VERSION_MAP[dep_name])
87
- dep_version = self._extract_min_dependency(dep)
88
- # If the version is higher than the default, update VERSION_MAP
89
- # with higher min dependency version
90
- if dep_version > default_version:
91
- VERSION_MAP[dep_name] = str(dep_version)
94
+ self._update_version_map(VERSION_MAP, dep_name, dep)
95
+ elif dep_name in additional_version_map:
96
+ self._update_version_map(additional_version_map, dep_name, dep)
92
97
  else:
93
98
  # Keep non-default dependencies
94
99
  kept_deps.append(dep)
@@ -107,9 +112,20 @@ class GeneralSerializer(BaseSerializer):
107
112
  def serialize_package_file(self, template_name: str, file_content: str, **kwargs: Any) -> str:
108
113
  template = self.env.get_template(template_name)
109
114
 
115
+ additional_version_map = {}
116
+ if self.code_model.has_external_type:
117
+ for item in self.code_model.external_types:
118
+ if item.package_name:
119
+ if item.min_version:
120
+ additional_version_map[item.package_name] = item.min_version
121
+ else:
122
+ # Use "0" as a placeholder when min_version is not specified for external types.
123
+ # This allows the dependency to be included without a specific version constraint.
124
+ additional_version_map[item.package_name] = "0"
125
+
110
126
  # Add fields to keep from an existing pyproject.toml
111
127
  if template_name == "pyproject.toml.jinja2":
112
- params = self._keep_pyproject_fields(file_content)
128
+ params = self.external_lib_version_map(file_content, additional_version_map)
113
129
  else:
114
130
  params = {}
115
131
 
@@ -126,6 +142,7 @@ class GeneralSerializer(BaseSerializer):
126
142
  dev_status = "4 - Beta"
127
143
  else:
128
144
  dev_status = "5 - Production/Stable"
145
+
129
146
  params |= {
130
147
  "code_model": self.code_model,
131
148
  "dev_status": dev_status,
@@ -136,6 +153,7 @@ class GeneralSerializer(BaseSerializer):
136
153
  "VERSION_MAP": VERSION_MAP,
137
154
  "MIN_PYTHON_VERSION": MIN_PYTHON_VERSION,
138
155
  "MAX_PYTHON_VERSION": MAX_PYTHON_VERSION,
156
+ "ADDITIONAL_DEPENDENCIES": [f"{item[0]}>={item[1]}" for item in additional_version_map.items()],
139
157
  }
140
158
  params |= {"options": self.code_model.options}
141
159
  params |= kwargs
@@ -333,6 +333,9 @@ class DpgModelSerializer(_ModelSerializer):
333
333
  if prop.xml_metadata:
334
334
  args.append(f"xml={prop.xml_metadata}")
335
335
 
336
+ if prop.original_tsp_name:
337
+ args.append(f'original_tsp_name="{prop.original_tsp_name}"')
338
+
336
339
  field = "rest_discriminator" if prop.is_discriminator else "rest_field"
337
340
  type_ignore = (
338
341
  " # type: ignore"
@@ -25,6 +25,9 @@ from {{ code_model.core_library }}.exceptions import DeserializationError
25
25
  from {{ code_model.core_library }}{{ "" if code_model.is_azure_flavor else ".utils" }} import CaseInsensitiveEnumMeta
26
26
  from {{ code_model.core_library }}.{{ "" if code_model.is_azure_flavor else "runtime." }}pipeline import PipelineResponse
27
27
  from {{ code_model.core_library }}.serialization import _Null
28
+ {% if code_model.has_external_type %}
29
+ from {{ code_model.core_library }}.serialization import TypeHandlerRegistry
30
+ {% endif %}
28
31
  from {{ code_model.core_library }}.rest import HttpResponse
29
32
 
30
33
  _LOGGER = logging.getLogger(__name__)
@@ -34,6 +37,10 @@ __all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"]
34
37
  TZ_UTC = timezone.utc
35
38
  _T = typing.TypeVar("_T")
36
39
 
40
+ {% if code_model.has_external_type %}
41
+ TYPE_HANDLER_REGISTRY = TypeHandlerRegistry()
42
+ {% endif %}
43
+
37
44
 
38
45
  def _timedelta_as_isostr(td: timedelta) -> str:
39
46
  """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S'
@@ -158,6 +165,11 @@ class SdkJSONEncoder(JSONEncoder):
158
165
  except AttributeError:
159
166
  # This will be raised when it hits value.total_seconds in the method above
160
167
  pass
168
+ {% if code_model.has_external_type %}
169
+ custom_serializer = TYPE_HANDLER_REGISTRY.get_serializer(o)
170
+ if custom_serializer:
171
+ return custom_serializer(o)
172
+ {% endif %}
161
173
  return super(SdkJSONEncoder, self).default(o)
162
174
 
163
175
 
@@ -313,7 +325,13 @@ def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] =
313
325
  return _deserialize_int_as_str
314
326
  if rf and rf._format:
315
327
  return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format)
328
+ {% if code_model.has_external_type %}
329
+ if _DESERIALIZE_MAPPING.get(annotation): # pyright: ignore
330
+ return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore
331
+ return TYPE_HANDLER_REGISTRY.get_deserializer(annotation) # pyright: ignore
332
+ {% else %}
316
333
  return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore
334
+ {% endif %}
317
335
 
318
336
 
319
337
  def _get_type_alias_type(module_name: str, alias_name: str):
@@ -507,6 +525,14 @@ def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-m
507
525
  except AttributeError:
508
526
  # This will be raised when it hits value.total_seconds in the method above
509
527
  pass
528
+ {% if code_model.has_external_type %}
529
+
530
+ # Check if there's a custom serializer for the type
531
+ custom_serializer = TYPE_HANDLER_REGISTRY.get_serializer(o)
532
+ if custom_serializer:
533
+ return custom_serializer(o)
534
+
535
+ {% endif %}
510
536
  return o
511
537
 
512
538
 
@@ -636,6 +662,12 @@ class Model(_MyMutableMapping):
636
662
  if not rf._rest_name_input:
637
663
  rf._rest_name_input = attr
638
664
  cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items())
665
+ {% if code_model.has_padded_model_property %}
666
+ cls._backcompat_attr_to_rest_field: dict[str, _RestField] = {
667
+ Model._get_backcompat_attribute_name(cls._attr_to_rest_field, attr): rf for attr, rf in cls
668
+ ._attr_to_rest_field.items()
669
+ }
670
+ {% endif %}
639
671
  cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}")
640
672
 
641
673
  return super().__new__(cls)
@@ -645,6 +677,18 @@ class Model(_MyMutableMapping):
645
677
  if hasattr(base, "__mapping__"):
646
678
  base.__mapping__[discriminator or cls.__name__] = cls # type: ignore
647
679
 
680
+ {% if code_model.has_padded_model_property %}
681
+ @classmethod
682
+ def _get_backcompat_attribute_name(cls, attr_to_rest_field: dict[str, "_RestField"], attr_name: str) -> str:
683
+ rest_field_obj = attr_to_rest_field.get(attr_name) # pylint: disable=protected-access
684
+ if rest_field_obj is None:
685
+ return attr_name
686
+ original_tsp_name = getattr(rest_field_obj, "_original_tsp_name", None) # pylint: disable=protected-access
687
+ if original_tsp_name:
688
+ return original_tsp_name
689
+ return attr_name
690
+ {% endif %}
691
+
648
692
  @classmethod
649
693
  def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]:
650
694
  for v in cls.__dict__.values():
@@ -971,6 +1015,9 @@ def _failsafe_deserialize_xml(
971
1015
  return None
972
1016
 
973
1017
 
1018
+ {% if code_model.has_padded_model_property %}
1019
+ # pylint: disable=too-many-instance-attributes
1020
+ {% endif %}
974
1021
  class _RestField:
975
1022
  def __init__(
976
1023
  self,
@@ -983,6 +1030,9 @@ class _RestField:
983
1030
  format: typing.Optional[str] = None,
984
1031
  is_multipart_file_input: bool = False,
985
1032
  xml: typing.Optional[dict[str, typing.Any]] = None,
1033
+ {% if code_model.has_padded_model_property %}
1034
+ original_tsp_name: typing.Optional[str] = None,
1035
+ {% endif %}
986
1036
  ):
987
1037
  self._type = type
988
1038
  self._rest_name_input = name
@@ -994,10 +1044,17 @@ class _RestField:
994
1044
  self._format = format
995
1045
  self._is_multipart_file_input = is_multipart_file_input
996
1046
  self._xml = xml if xml is not None else {}
1047
+ {% if code_model.has_padded_model_property %}
1048
+ self._original_tsp_name = original_tsp_name
1049
+ {% endif %}
997
1050
 
998
1051
  @property
999
1052
  def _class_type(self) -> typing.Any:
1000
- return getattr(self._type, "args", [None])[0]
1053
+ result = getattr(self._type, "args", [None])[0]
1054
+ # type may be wrapped by nested functools.partial so we need to check for that
1055
+ if isinstance(result, functools.partial):
1056
+ return getattr(result, "args", [None])[0]
1057
+ return result
1001
1058
 
1002
1059
  @property
1003
1060
  def _rest_name(self) -> str:
@@ -1045,6 +1102,9 @@ def rest_field(
1045
1102
  format: typing.Optional[str] = None,
1046
1103
  is_multipart_file_input: bool = False,
1047
1104
  xml: typing.Optional[dict[str, typing.Any]] = None,
1105
+ {% if code_model.has_padded_model_property %}
1106
+ original_tsp_name: typing.Optional[str] = None,
1107
+ {% endif %}
1048
1108
  ) -> typing.Any:
1049
1109
  return _RestField(
1050
1110
  name=name,
@@ -1054,6 +1114,9 @@ def rest_field(
1054
1114
  format=format,
1055
1115
  is_multipart_file_input=is_multipart_file_input,
1056
1116
  xml=xml,
1117
+ {% if code_model.has_padded_model_property %}
1118
+ original_tsp_name=original_tsp_name,
1119
+ {% endif %}
1057
1120
  )
1058
1121
 
1059
1122
 
@@ -56,6 +56,9 @@ dependencies = [
56
56
  "{{ dep }}",
57
57
  {% endfor %}
58
58
  {% endif %}
59
+ {% for dep in ADDITIONAL_DEPENDENCIES %}
60
+ "{{ dep }}",
61
+ {% endfor %}
59
62
  ]
60
63
  dynamic = [
61
64
  {% if options.get('package-mode') %}"version", {% endif %}"readme"
@@ -108,6 +108,9 @@ setup(
108
108
  "corehttp[requests]>={{ VERSION_MAP["corehttp"] }}",
109
109
  {% endif %}
110
110
  "typing-extensions>={{ VERSION_MAP['typing-extensions'] }}",
111
+ {% for dep in ADDITIONAL_DEPENDENCIES %}
112
+ {{ dep }},
113
+ {% endfor %}
111
114
  ],
112
115
  {% if options["package-mode"] %}
113
116
  python_requires=">={{ MIN_PYTHON_VERSION }}",
@@ -78,9 +78,8 @@ def serialize_multipart_data_entry(data_entry: Any) -> Any:
78
78
 
79
79
  def prepare_multipart_form_data(
80
80
  body: Mapping[str, Any], multipart_fields: list[str], data_fields: list[str]
81
- ) -> tuple[list[FileType], dict[str, Any]]:
81
+ ) -> list[FileType]:
82
82
  files: list[FileType] = []
83
- data: dict[str, Any] = {}
84
83
  for multipart_field in multipart_fields:
85
84
  multipart_entry = body.get(multipart_field)
86
85
  if isinstance(multipart_entry, list):
@@ -88,10 +87,12 @@ def prepare_multipart_form_data(
88
87
  elif multipart_entry:
89
88
  files.append((multipart_field, multipart_entry))
90
89
 
90
+ # if files is empty, sdk core library can't handle multipart/form-data correctly, so
91
+ # we put data fields into files with filename as None to avoid that scenario.
91
92
  for data_field in data_fields:
92
93
  data_entry = body.get(data_field)
93
94
  if data_entry:
94
- data[data_field] = serialize_multipart_data_entry(data_entry)
95
+ files.append((data_field, str(serialize_multipart_data_entry(data_entry))))
95
96
 
96
- return files, data
97
+ return files
97
98
  {% endif %}
@@ -236,7 +236,7 @@ class PreProcessPlugin(YamlUpdatePlugin):
236
236
  body_parameter["type"]["types"].insert(1, any_obj_list_or_dict)
237
237
  code_model["types"].append(body_parameter["type"])
238
238
 
239
- def pad_reserved_words(self, name: str, pad_type: PadType):
239
+ def pad_reserved_words(self, name: str, pad_type: PadType, yaml_type: dict[str, Any]) -> str:
240
240
  # we want to pad hidden variables as well
241
241
  if not name:
242
242
  # we'll pass in empty operation groups sometime etc.
@@ -250,6 +250,10 @@ class PreProcessPlugin(YamlUpdatePlugin):
250
250
  name_prefix = "_" if name[0] == "_" else ""
251
251
  name = name[1:] if name[0] == "_" else name
252
252
  if name.lower() in reserved_words[pad_type]:
253
+ if self.is_tsp and name.lower() in TSP_RESERVED_WORDS.get(pad_type, []):
254
+ # to maintain backcompat for cases where we pad in tsp but not in autorest,
255
+ # if we have a tsp reserved word, we also want to keep track of the original name for backcompat
256
+ yaml_type["originalTspName"] = name_prefix + name
253
257
  return name_prefix + name + pad_type
254
258
  return name_prefix + name
255
259
 
@@ -257,11 +261,13 @@ class PreProcessPlugin(YamlUpdatePlugin):
257
261
  for type in yaml_data:
258
262
  for property in type.get("properties", []):
259
263
  property["description"] = update_description(property.get("description", ""))
260
- property["clientName"] = self.pad_reserved_words(property["clientName"].lower(), PadType.PROPERTY)
264
+ property["clientName"] = self.pad_reserved_words(
265
+ property["clientName"].lower(), PadType.PROPERTY, property
266
+ )
261
267
  add_redefined_builtin_info(property["clientName"], property)
262
268
  if type.get("name"):
263
269
  pad_type = PadType.MODEL if type["type"] == "model" else PadType.ENUM_CLASS
264
- name = self.pad_reserved_words(type["name"], pad_type)
270
+ name = self.pad_reserved_words(type["name"], pad_type, type)
265
271
  type["name"] = name[0].upper() + name[1:]
266
272
  type["description"] = update_description(type.get("description", ""), type["name"])
267
273
  type["snakeCaseName"] = to_snake_case(type["name"])
@@ -269,7 +275,7 @@ class PreProcessPlugin(YamlUpdatePlugin):
269
275
  # we're enums
270
276
  values_to_add = []
271
277
  for value in type["values"]:
272
- padded_name = self.pad_reserved_words(value["name"].lower(), PadType.ENUM_VALUE).upper()
278
+ padded_name = self.pad_reserved_words(value["name"].lower(), PadType.ENUM_VALUE, value).upper()
273
279
  if self.version_tolerant:
274
280
  if padded_name[0] in "0123456789":
275
281
  padded_name = "ENUM_" + padded_name
@@ -364,12 +370,14 @@ class PreProcessPlugin(YamlUpdatePlugin):
364
370
  def update_parameter(self, yaml_data: dict[str, Any]) -> None:
365
371
  yaml_data["description"] = update_description(yaml_data.get("description", ""))
366
372
  if not (yaml_data["location"] == "header" and yaml_data["clientName"] in ("content_type", "accept")):
367
- yaml_data["clientName"] = self.pad_reserved_words(yaml_data["clientName"].lower(), PadType.PARAMETER)
373
+ yaml_data["clientName"] = self.pad_reserved_words(
374
+ yaml_data["clientName"].lower(), PadType.PARAMETER, yaml_data
375
+ )
368
376
  if yaml_data.get("propertyToParameterName"):
369
377
  # need to create a new one with padded keys and values
370
378
  yaml_data["propertyToParameterName"] = {
371
- self.pad_reserved_words(prop, PadType.PROPERTY): self.pad_reserved_words(
372
- param_name, PadType.PARAMETER
379
+ self.pad_reserved_words(prop, PadType.PROPERTY, yaml_data): self.pad_reserved_words(
380
+ param_name, PadType.PARAMETER, yaml_data
373
381
  ).lower()
374
382
  for prop, param_name in yaml_data["propertyToParameterName"].items()
375
383
  }
@@ -390,15 +398,17 @@ class PreProcessPlugin(YamlUpdatePlugin):
390
398
  *,
391
399
  is_overload: bool = False,
392
400
  ) -> None:
393
- yaml_data["groupName"] = self.pad_reserved_words(yaml_data["groupName"], PadType.OPERATION_GROUP)
401
+ yaml_data["groupName"] = self.pad_reserved_words(yaml_data["groupName"], PadType.OPERATION_GROUP, yaml_data)
394
402
  yaml_data["groupName"] = to_snake_case(yaml_data["groupName"])
395
403
  yaml_data["name"] = yaml_data["name"].lower()
396
404
  if yaml_data.get("isLroInitialOperation") is True:
397
405
  yaml_data["name"] = (
398
- "_" + self.pad_reserved_words(extract_original_name(yaml_data["name"]), PadType.METHOD) + "_initial"
406
+ "_"
407
+ + self.pad_reserved_words(extract_original_name(yaml_data["name"]), PadType.METHOD, yaml_data)
408
+ + "_initial"
399
409
  )
400
410
  else:
401
- yaml_data["name"] = self.pad_reserved_words(yaml_data["name"], PadType.METHOD)
411
+ yaml_data["name"] = self.pad_reserved_words(yaml_data["name"], PadType.METHOD, yaml_data)
402
412
  yaml_data["description"] = update_description(yaml_data["description"], yaml_data["name"])
403
413
  yaml_data["summary"] = update_description(yaml_data.get("summary", ""))
404
414
  body_parameter = yaml_data.get("bodyParameter")
@@ -485,7 +495,7 @@ class PreProcessPlugin(YamlUpdatePlugin):
485
495
  item_type = item_type or yaml_data["itemType"]["elementType"]
486
496
  if yaml_data.get("nextOperation"):
487
497
  yaml_data["nextOperation"]["groupName"] = self.pad_reserved_words(
488
- yaml_data["nextOperation"]["groupName"], PadType.OPERATION_GROUP
498
+ yaml_data["nextOperation"]["groupName"], PadType.OPERATION_GROUP, yaml_data["nextOperation"]
489
499
  )
490
500
  yaml_data["nextOperation"]["groupName"] = to_snake_case(yaml_data["nextOperation"]["groupName"])
491
501
  for response in yaml_data["nextOperation"].get("responses", []):
@@ -503,10 +513,11 @@ class PreProcessPlugin(YamlUpdatePlugin):
503
513
  operation_group["identifyName"] = self.pad_reserved_words(
504
514
  operation_group.get("name", operation_group["propertyName"]),
505
515
  PadType.OPERATION_GROUP,
516
+ operation_group,
506
517
  )
507
518
  operation_group["identifyName"] = to_snake_case(operation_group["identifyName"])
508
519
  operation_group["propertyName"] = self.pad_reserved_words(
509
- operation_group["propertyName"], PadType.OPERATION_GROUP
520
+ operation_group["propertyName"], PadType.OPERATION_GROUP, operation_group
510
521
  )
511
522
  operation_group["propertyName"] = to_snake_case(operation_group["propertyName"])
512
523
  operation_group["className"] = update_operation_group_class_name(
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@autorest/python",
3
- "version": "6.43.0",
3
+ "version": "6.44.0",
4
4
  "description": "The Python extension for generators in AutoRest.",
5
5
  "main": "index.js",
6
6
  "repository": {
@@ -19,7 +19,7 @@
19
19
  },
20
20
  "homepage": "https://github.com/Azure/autorest.python/blob/main/README.md",
21
21
  "dependencies": {
22
- "@typespec/http-client-python": "~0.21.0",
22
+ "@typespec/http-client-python": "~0.22.0",
23
23
  "@autorest/system-requirements": "~1.0.2",
24
24
  "fs-extra": "~11.2.0",
25
25
  "tsx": "~4.19.1"