@typespec/http-client-python 0.21.0 → 0.22.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/dist/emitter/types.d.ts.map +1 -1
  2. package/dist/emitter/types.js +9 -1
  3. package/dist/emitter/types.js.map +1 -1
  4. package/emitter/src/types.ts +10 -1
  5. package/emitter/temp/tsconfig.tsbuildinfo +1 -1
  6. package/eng/scripts/ci/regenerate.ts +1 -1
  7. package/eng/scripts/setup/__pycache__/package_manager.cpython-311.pyc +0 -0
  8. package/eng/scripts/setup/__pycache__/venvtools.cpython-311.pyc +0 -0
  9. package/generator/build/lib/pygen/codegen/models/__init__.py +2 -0
  10. package/generator/build/lib/pygen/codegen/models/code_model.py +15 -0
  11. package/generator/build/lib/pygen/codegen/models/primitive_types.py +33 -0
  12. package/generator/build/lib/pygen/codegen/models/property.py +1 -0
  13. package/generator/build/lib/pygen/codegen/serializers/builder_serializer.py +1 -2
  14. package/generator/build/lib/pygen/codegen/serializers/general_serializer.py +29 -11
  15. package/generator/build/lib/pygen/codegen/serializers/model_serializer.py +3 -0
  16. package/generator/build/lib/pygen/codegen/templates/model_base.py.jinja2 +64 -1
  17. package/generator/build/lib/pygen/codegen/templates/packaging_templates/pyproject.toml.jinja2 +3 -0
  18. package/generator/build/lib/pygen/codegen/templates/packaging_templates/setup.py.jinja2 +3 -0
  19. package/generator/build/lib/pygen/codegen/templates/utils.py.jinja2 +5 -4
  20. package/generator/build/lib/pygen/preprocess/__init__.py +23 -12
  21. package/generator/dist/pygen-0.1.0-py3-none-any.whl +0 -0
  22. package/generator/pygen/codegen/models/__init__.py +2 -0
  23. package/generator/pygen/codegen/models/code_model.py +15 -0
  24. package/generator/pygen/codegen/models/primitive_types.py +33 -0
  25. package/generator/pygen/codegen/models/property.py +1 -0
  26. package/generator/pygen/codegen/serializers/builder_serializer.py +1 -2
  27. package/generator/pygen/codegen/serializers/general_serializer.py +29 -11
  28. package/generator/pygen/codegen/serializers/model_serializer.py +3 -0
  29. package/generator/pygen/codegen/templates/model_base.py.jinja2 +64 -1
  30. package/generator/pygen/codegen/templates/packaging_templates/pyproject.toml.jinja2 +3 -0
  31. package/generator/pygen/codegen/templates/packaging_templates/setup.py.jinja2 +3 -0
  32. package/generator/pygen/codegen/templates/utils.py.jinja2 +5 -4
  33. package/generator/pygen/preprocess/__init__.py +23 -12
  34. package/generator/test/azure/mock_api_tests/test_model_base_flatten_compatibility.py +251 -0
  35. package/generator/test/azure/requirements.txt +2 -0
  36. package/generator/test/unbranded/requirements.txt +1 -0
  37. package/generator/test/unittests/test_name_converter.py +1 -1
  38. package/package.json +33 -33
@@ -23,7 +23,7 @@ VERSION_MAP = {
23
23
  "msrest": "0.7.1",
24
24
  "isodate": "0.6.1",
25
25
  "azure-mgmt-core": "1.6.0",
26
- "azure-core": "1.35.0",
26
+ "azure-core": "1.36.0",
27
27
  "typing-extensions": "4.6.0",
28
28
  "corehttp": "1.0.0b6",
29
29
  }
@@ -57,7 +57,16 @@ class GeneralSerializer(BaseSerializer):
57
57
  m = re.search(r"[>=]=?([\d.]+(?:[a-z]+\d+)?)", s)
58
58
  return parse_version(m.group(1)) if m else parse_version("0")
59
59
 
60
- def _keep_pyproject_fields(self, file_content: str) -> dict:
60
+ def _update_version_map(self, version_map: dict[str, str], dep_name: str, dep: str) -> None:
61
+ # For tracked dependencies, check if the version is higher than our default
62
+ default_version = parse_version(version_map[dep_name])
63
+ dep_version = self._extract_min_dependency(dep)
64
+ # If the version is higher than the default, update VERSION_MAP
65
+ # with higher min dependency version
66
+ if dep_version > default_version:
67
+ version_map[dep_name] = str(dep_version)
68
+
69
+ def external_lib_version_map(self, file_content: str, additional_version_map: dict[str, str]) -> dict:
61
70
  # Load the pyproject.toml file if it exists and extract fields to keep.
62
71
  result: dict = {"KEEP_FIELDS": {}}
63
72
  try:
@@ -80,15 +89,11 @@ class GeneralSerializer(BaseSerializer):
80
89
  for dep in loaded_pyproject_toml["project"]["dependencies"]:
81
90
  dep_name = re.split(r"[<>=\[]", dep)[0].strip()
82
91
 
83
- # Check if dependency is one we track in VERSION_MAP
92
+ # Check if dependency is one we track in version map
84
93
  if dep_name in VERSION_MAP:
85
- # For tracked dependencies, check if the version is higher than our default
86
- default_version = parse_version(VERSION_MAP[dep_name])
87
- dep_version = self._extract_min_dependency(dep)
88
- # If the version is higher than the default, update VERSION_MAP
89
- # with higher min dependency version
90
- if dep_version > default_version:
91
- VERSION_MAP[dep_name] = str(dep_version)
94
+ self._update_version_map(VERSION_MAP, dep_name, dep)
95
+ elif dep_name in additional_version_map:
96
+ self._update_version_map(additional_version_map, dep_name, dep)
92
97
  else:
93
98
  # Keep non-default dependencies
94
99
  kept_deps.append(dep)
@@ -107,9 +112,20 @@ class GeneralSerializer(BaseSerializer):
107
112
  def serialize_package_file(self, template_name: str, file_content: str, **kwargs: Any) -> str:
108
113
  template = self.env.get_template(template_name)
109
114
 
115
+ additional_version_map = {}
116
+ if self.code_model.has_external_type:
117
+ for item in self.code_model.external_types:
118
+ if item.package_name:
119
+ if item.min_version:
120
+ additional_version_map[item.package_name] = item.min_version
121
+ else:
122
+ # Use "0" as a placeholder when min_version is not specified for external types.
123
+ # This allows the dependency to be included without a specific version constraint.
124
+ additional_version_map[item.package_name] = "0"
125
+
110
126
  # Add fields to keep from an existing pyproject.toml
111
127
  if template_name == "pyproject.toml.jinja2":
112
- params = self._keep_pyproject_fields(file_content)
128
+ params = self.external_lib_version_map(file_content, additional_version_map)
113
129
  else:
114
130
  params = {}
115
131
 
@@ -126,6 +142,7 @@ class GeneralSerializer(BaseSerializer):
126
142
  dev_status = "4 - Beta"
127
143
  else:
128
144
  dev_status = "5 - Production/Stable"
145
+
129
146
  params |= {
130
147
  "code_model": self.code_model,
131
148
  "dev_status": dev_status,
@@ -136,6 +153,7 @@ class GeneralSerializer(BaseSerializer):
136
153
  "VERSION_MAP": VERSION_MAP,
137
154
  "MIN_PYTHON_VERSION": MIN_PYTHON_VERSION,
138
155
  "MAX_PYTHON_VERSION": MAX_PYTHON_VERSION,
156
+ "ADDITIONAL_DEPENDENCIES": [f"{item[0]}>={item[1]}" for item in additional_version_map.items()],
139
157
  }
140
158
  params |= {"options": self.code_model.options}
141
159
  params |= kwargs
@@ -333,6 +333,9 @@ class DpgModelSerializer(_ModelSerializer):
333
333
  if prop.xml_metadata:
334
334
  args.append(f"xml={prop.xml_metadata}")
335
335
 
336
+ if prop.original_tsp_name:
337
+ args.append(f'original_tsp_name="{prop.original_tsp_name}"')
338
+
336
339
  field = "rest_discriminator" if prop.is_discriminator else "rest_field"
337
340
  type_ignore = (
338
341
  " # type: ignore"
@@ -25,6 +25,9 @@ from {{ code_model.core_library }}.exceptions import DeserializationError
25
25
  from {{ code_model.core_library }}{{ "" if code_model.is_azure_flavor else ".utils" }} import CaseInsensitiveEnumMeta
26
26
  from {{ code_model.core_library }}.{{ "" if code_model.is_azure_flavor else "runtime." }}pipeline import PipelineResponse
27
27
  from {{ code_model.core_library }}.serialization import _Null
28
+ {% if code_model.has_external_type %}
29
+ from {{ code_model.core_library }}.serialization import TypeHandlerRegistry
30
+ {% endif %}
28
31
  from {{ code_model.core_library }}.rest import HttpResponse
29
32
 
30
33
  _LOGGER = logging.getLogger(__name__)
@@ -34,6 +37,10 @@ __all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"]
34
37
  TZ_UTC = timezone.utc
35
38
  _T = typing.TypeVar("_T")
36
39
 
40
+ {% if code_model.has_external_type %}
41
+ TYPE_HANDLER_REGISTRY = TypeHandlerRegistry()
42
+ {% endif %}
43
+
37
44
 
38
45
  def _timedelta_as_isostr(td: timedelta) -> str:
39
46
  """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S'
@@ -158,6 +165,11 @@ class SdkJSONEncoder(JSONEncoder):
158
165
  except AttributeError:
159
166
  # This will be raised when it hits value.total_seconds in the method above
160
167
  pass
168
+ {% if code_model.has_external_type %}
169
+ custom_serializer = TYPE_HANDLER_REGISTRY.get_serializer(o)
170
+ if custom_serializer:
171
+ return custom_serializer(o)
172
+ {% endif %}
161
173
  return super(SdkJSONEncoder, self).default(o)
162
174
 
163
175
 
@@ -313,7 +325,13 @@ def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] =
313
325
  return _deserialize_int_as_str
314
326
  if rf and rf._format:
315
327
  return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format)
328
+ {% if code_model.has_external_type %}
329
+ if _DESERIALIZE_MAPPING.get(annotation): # pyright: ignore
330
+ return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore
331
+ return TYPE_HANDLER_REGISTRY.get_deserializer(annotation) # pyright: ignore
332
+ {% else %}
316
333
  return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore
334
+ {% endif %}
317
335
 
318
336
 
319
337
  def _get_type_alias_type(module_name: str, alias_name: str):
@@ -507,6 +525,14 @@ def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-m
507
525
  except AttributeError:
508
526
  # This will be raised when it hits value.total_seconds in the method above
509
527
  pass
528
+ {% if code_model.has_external_type %}
529
+
530
+ # Check if there's a custom serializer for the type
531
+ custom_serializer = TYPE_HANDLER_REGISTRY.get_serializer(o)
532
+ if custom_serializer:
533
+ return custom_serializer(o)
534
+
535
+ {% endif %}
510
536
  return o
511
537
 
512
538
 
@@ -636,6 +662,12 @@ class Model(_MyMutableMapping):
636
662
  if not rf._rest_name_input:
637
663
  rf._rest_name_input = attr
638
664
  cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items())
665
+ {% if code_model.has_padded_model_property %}
666
+ cls._backcompat_attr_to_rest_field: dict[str, _RestField] = {
667
+ Model._get_backcompat_attribute_name(cls._attr_to_rest_field, attr): rf for attr, rf in cls
668
+ ._attr_to_rest_field.items()
669
+ }
670
+ {% endif %}
639
671
  cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}")
640
672
 
641
673
  return super().__new__(cls)
@@ -645,6 +677,18 @@ class Model(_MyMutableMapping):
645
677
  if hasattr(base, "__mapping__"):
646
678
  base.__mapping__[discriminator or cls.__name__] = cls # type: ignore
647
679
 
680
+ {% if code_model.has_padded_model_property %}
681
+ @classmethod
682
+ def _get_backcompat_attribute_name(cls, attr_to_rest_field: dict[str, "_RestField"], attr_name: str) -> str:
683
+ rest_field_obj = attr_to_rest_field.get(attr_name) # pylint: disable=protected-access
684
+ if rest_field_obj is None:
685
+ return attr_name
686
+ original_tsp_name = getattr(rest_field_obj, "_original_tsp_name", None) # pylint: disable=protected-access
687
+ if original_tsp_name:
688
+ return original_tsp_name
689
+ return attr_name
690
+ {% endif %}
691
+
648
692
  @classmethod
649
693
  def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]:
650
694
  for v in cls.__dict__.values():
@@ -971,6 +1015,9 @@ def _failsafe_deserialize_xml(
971
1015
  return None
972
1016
 
973
1017
 
1018
+ {% if code_model.has_padded_model_property %}
1019
+ # pylint: disable=too-many-instance-attributes
1020
+ {% endif %}
974
1021
  class _RestField:
975
1022
  def __init__(
976
1023
  self,
@@ -983,6 +1030,9 @@ class _RestField:
983
1030
  format: typing.Optional[str] = None,
984
1031
  is_multipart_file_input: bool = False,
985
1032
  xml: typing.Optional[dict[str, typing.Any]] = None,
1033
+ {% if code_model.has_padded_model_property %}
1034
+ original_tsp_name: typing.Optional[str] = None,
1035
+ {% endif %}
986
1036
  ):
987
1037
  self._type = type
988
1038
  self._rest_name_input = name
@@ -994,10 +1044,17 @@ class _RestField:
994
1044
  self._format = format
995
1045
  self._is_multipart_file_input = is_multipart_file_input
996
1046
  self._xml = xml if xml is not None else {}
1047
+ {% if code_model.has_padded_model_property %}
1048
+ self._original_tsp_name = original_tsp_name
1049
+ {% endif %}
997
1050
 
998
1051
  @property
999
1052
  def _class_type(self) -> typing.Any:
1000
- return getattr(self._type, "args", [None])[0]
1053
+ result = getattr(self._type, "args", [None])[0]
1054
+ # type may be wrapped by nested functools.partial so we need to check for that
1055
+ if isinstance(result, functools.partial):
1056
+ return getattr(result, "args", [None])[0]
1057
+ return result
1001
1058
 
1002
1059
  @property
1003
1060
  def _rest_name(self) -> str:
@@ -1045,6 +1102,9 @@ def rest_field(
1045
1102
  format: typing.Optional[str] = None,
1046
1103
  is_multipart_file_input: bool = False,
1047
1104
  xml: typing.Optional[dict[str, typing.Any]] = None,
1105
+ {% if code_model.has_padded_model_property %}
1106
+ original_tsp_name: typing.Optional[str] = None,
1107
+ {% endif %}
1048
1108
  ) -> typing.Any:
1049
1109
  return _RestField(
1050
1110
  name=name,
@@ -1054,6 +1114,9 @@ def rest_field(
1054
1114
  format=format,
1055
1115
  is_multipart_file_input=is_multipart_file_input,
1056
1116
  xml=xml,
1117
+ {% if code_model.has_padded_model_property %}
1118
+ original_tsp_name=original_tsp_name,
1119
+ {% endif %}
1057
1120
  )
1058
1121
 
1059
1122
 
@@ -56,6 +56,9 @@ dependencies = [
56
56
  "{{ dep }}",
57
57
  {% endfor %}
58
58
  {% endif %}
59
+ {% for dep in ADDITIONAL_DEPENDENCIES %}
60
+ "{{ dep }}",
61
+ {% endfor %}
59
62
  ]
60
63
  dynamic = [
61
64
  {% if options.get('package-mode') %}"version", {% endif %}"readme"
@@ -108,6 +108,9 @@ setup(
108
108
  "corehttp[requests]>={{ VERSION_MAP["corehttp"] }}",
109
109
  {% endif %}
110
110
  "typing-extensions>={{ VERSION_MAP['typing-extensions'] }}",
111
+ {% for dep in ADDITIONAL_DEPENDENCIES %}
112
+ {{ dep }},
113
+ {% endfor %}
111
114
  ],
112
115
  {% if options["package-mode"] %}
113
116
  python_requires=">={{ MIN_PYTHON_VERSION }}",
@@ -78,9 +78,8 @@ def serialize_multipart_data_entry(data_entry: Any) -> Any:
78
78
 
79
79
  def prepare_multipart_form_data(
80
80
  body: Mapping[str, Any], multipart_fields: list[str], data_fields: list[str]
81
- ) -> tuple[list[FileType], dict[str, Any]]:
81
+ ) -> list[FileType]:
82
82
  files: list[FileType] = []
83
- data: dict[str, Any] = {}
84
83
  for multipart_field in multipart_fields:
85
84
  multipart_entry = body.get(multipart_field)
86
85
  if isinstance(multipart_entry, list):
@@ -88,10 +87,12 @@ def prepare_multipart_form_data(
88
87
  elif multipart_entry:
89
88
  files.append((multipart_field, multipart_entry))
90
89
 
90
+ # if files is empty, sdk core library can't handle multipart/form-data correctly, so
91
+ # we put data fields into files with filename as None to avoid that scenario.
91
92
  for data_field in data_fields:
92
93
  data_entry = body.get(data_field)
93
94
  if data_entry:
94
- data[data_field] = serialize_multipart_data_entry(data_entry)
95
+ files.append((data_field, str(serialize_multipart_data_entry(data_entry))))
95
96
 
96
- return files, data
97
+ return files
97
98
  {% endif %}
@@ -236,7 +236,7 @@ class PreProcessPlugin(YamlUpdatePlugin):
236
236
  body_parameter["type"]["types"].insert(1, any_obj_list_or_dict)
237
237
  code_model["types"].append(body_parameter["type"])
238
238
 
239
- def pad_reserved_words(self, name: str, pad_type: PadType):
239
+ def pad_reserved_words(self, name: str, pad_type: PadType, yaml_type: dict[str, Any]) -> str:
240
240
  # we want to pad hidden variables as well
241
241
  if not name:
242
242
  # we'll pass in empty operation groups sometime etc.
@@ -250,6 +250,10 @@ class PreProcessPlugin(YamlUpdatePlugin):
250
250
  name_prefix = "_" if name[0] == "_" else ""
251
251
  name = name[1:] if name[0] == "_" else name
252
252
  if name.lower() in reserved_words[pad_type]:
253
+ if self.is_tsp and name.lower() in TSP_RESERVED_WORDS.get(pad_type, []):
254
+ # to maintain backcompat for cases where we pad in tsp but not in autorest,
255
+ # if we have a tsp reserved word, we also want to keep track of the original name for backcompat
256
+ yaml_type["originalTspName"] = name_prefix + name
253
257
  return name_prefix + name + pad_type
254
258
  return name_prefix + name
255
259
 
@@ -257,11 +261,13 @@ class PreProcessPlugin(YamlUpdatePlugin):
257
261
  for type in yaml_data:
258
262
  for property in type.get("properties", []):
259
263
  property["description"] = update_description(property.get("description", ""))
260
- property["clientName"] = self.pad_reserved_words(property["clientName"].lower(), PadType.PROPERTY)
264
+ property["clientName"] = self.pad_reserved_words(
265
+ property["clientName"].lower(), PadType.PROPERTY, property
266
+ )
261
267
  add_redefined_builtin_info(property["clientName"], property)
262
268
  if type.get("name"):
263
269
  pad_type = PadType.MODEL if type["type"] == "model" else PadType.ENUM_CLASS
264
- name = self.pad_reserved_words(type["name"], pad_type)
270
+ name = self.pad_reserved_words(type["name"], pad_type, type)
265
271
  type["name"] = name[0].upper() + name[1:]
266
272
  type["description"] = update_description(type.get("description", ""), type["name"])
267
273
  type["snakeCaseName"] = to_snake_case(type["name"])
@@ -269,7 +275,7 @@ class PreProcessPlugin(YamlUpdatePlugin):
269
275
  # we're enums
270
276
  values_to_add = []
271
277
  for value in type["values"]:
272
- padded_name = self.pad_reserved_words(value["name"].lower(), PadType.ENUM_VALUE).upper()
278
+ padded_name = self.pad_reserved_words(value["name"].lower(), PadType.ENUM_VALUE, value).upper()
273
279
  if self.version_tolerant:
274
280
  if padded_name[0] in "0123456789":
275
281
  padded_name = "ENUM_" + padded_name
@@ -364,12 +370,14 @@ class PreProcessPlugin(YamlUpdatePlugin):
364
370
  def update_parameter(self, yaml_data: dict[str, Any]) -> None:
365
371
  yaml_data["description"] = update_description(yaml_data.get("description", ""))
366
372
  if not (yaml_data["location"] == "header" and yaml_data["clientName"] in ("content_type", "accept")):
367
- yaml_data["clientName"] = self.pad_reserved_words(yaml_data["clientName"].lower(), PadType.PARAMETER)
373
+ yaml_data["clientName"] = self.pad_reserved_words(
374
+ yaml_data["clientName"].lower(), PadType.PARAMETER, yaml_data
375
+ )
368
376
  if yaml_data.get("propertyToParameterName"):
369
377
  # need to create a new one with padded keys and values
370
378
  yaml_data["propertyToParameterName"] = {
371
- self.pad_reserved_words(prop, PadType.PROPERTY): self.pad_reserved_words(
372
- param_name, PadType.PARAMETER
379
+ self.pad_reserved_words(prop, PadType.PROPERTY, yaml_data): self.pad_reserved_words(
380
+ param_name, PadType.PARAMETER, yaml_data
373
381
  ).lower()
374
382
  for prop, param_name in yaml_data["propertyToParameterName"].items()
375
383
  }
@@ -390,15 +398,17 @@ class PreProcessPlugin(YamlUpdatePlugin):
390
398
  *,
391
399
  is_overload: bool = False,
392
400
  ) -> None:
393
- yaml_data["groupName"] = self.pad_reserved_words(yaml_data["groupName"], PadType.OPERATION_GROUP)
401
+ yaml_data["groupName"] = self.pad_reserved_words(yaml_data["groupName"], PadType.OPERATION_GROUP, yaml_data)
394
402
  yaml_data["groupName"] = to_snake_case(yaml_data["groupName"])
395
403
  yaml_data["name"] = yaml_data["name"].lower()
396
404
  if yaml_data.get("isLroInitialOperation") is True:
397
405
  yaml_data["name"] = (
398
- "_" + self.pad_reserved_words(extract_original_name(yaml_data["name"]), PadType.METHOD) + "_initial"
406
+ "_"
407
+ + self.pad_reserved_words(extract_original_name(yaml_data["name"]), PadType.METHOD, yaml_data)
408
+ + "_initial"
399
409
  )
400
410
  else:
401
- yaml_data["name"] = self.pad_reserved_words(yaml_data["name"], PadType.METHOD)
411
+ yaml_data["name"] = self.pad_reserved_words(yaml_data["name"], PadType.METHOD, yaml_data)
402
412
  yaml_data["description"] = update_description(yaml_data["description"], yaml_data["name"])
403
413
  yaml_data["summary"] = update_description(yaml_data.get("summary", ""))
404
414
  body_parameter = yaml_data.get("bodyParameter")
@@ -485,7 +495,7 @@ class PreProcessPlugin(YamlUpdatePlugin):
485
495
  item_type = item_type or yaml_data["itemType"]["elementType"]
486
496
  if yaml_data.get("nextOperation"):
487
497
  yaml_data["nextOperation"]["groupName"] = self.pad_reserved_words(
488
- yaml_data["nextOperation"]["groupName"], PadType.OPERATION_GROUP
498
+ yaml_data["nextOperation"]["groupName"], PadType.OPERATION_GROUP, yaml_data["nextOperation"]
489
499
  )
490
500
  yaml_data["nextOperation"]["groupName"] = to_snake_case(yaml_data["nextOperation"]["groupName"])
491
501
  for response in yaml_data["nextOperation"].get("responses", []):
@@ -503,10 +513,11 @@ class PreProcessPlugin(YamlUpdatePlugin):
503
513
  operation_group["identifyName"] = self.pad_reserved_words(
504
514
  operation_group.get("name", operation_group["propertyName"]),
505
515
  PadType.OPERATION_GROUP,
516
+ operation_group,
506
517
  )
507
518
  operation_group["identifyName"] = to_snake_case(operation_group["identifyName"])
508
519
  operation_group["propertyName"] = self.pad_reserved_words(
509
- operation_group["propertyName"], PadType.OPERATION_GROUP
520
+ operation_group["propertyName"], PadType.OPERATION_GROUP, operation_group
510
521
  )
511
522
  operation_group["propertyName"] = to_snake_case(operation_group["propertyName"])
512
523
  operation_group["className"] = update_operation_group_class_name(
@@ -0,0 +1,251 @@
1
+ # ------------------------------------
2
+ # Copyright (c) Microsoft Corporation.
3
+ # Licensed under the MIT License.
4
+ # ------------------------------------
5
+ import datetime
6
+ from typing import (
7
+ Any,
8
+ Mapping,
9
+ Optional,
10
+ overload,
11
+ )
12
+
13
+ from specs.azure.clientgenerator.core.flattenproperty._utils.model_base import (
14
+ Model,
15
+ rest_field,
16
+ )
17
+ from azure.core.serialization import attribute_list
18
+
19
+
20
+ class ModelProperty(Model):
21
+ """This is a test model."""
22
+
23
+ value: str = rest_field()
24
+ """Required."""
25
+
26
+ @overload
27
+ def __init__(
28
+ self,
29
+ *,
30
+ value: str,
31
+ ) -> None: ...
32
+
33
+ @overload
34
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
35
+ """
36
+ :param mapping: raw JSON to initialize the model.
37
+ :type mapping: Mapping[str, Any]
38
+ """
39
+
40
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
41
+ super().__init__(*args, **kwargs)
42
+
43
+
44
+ class ChildModel(Model):
45
+ """This is the child model to be flattened.
46
+
47
+ :ivar description: Required.
48
+ :vartype description: str
49
+ :ivar age: Required.
50
+ :vartype age: int
51
+ """
52
+
53
+ description: str = rest_field()
54
+ """Required."""
55
+ age: int = rest_field()
56
+ """Required."""
57
+ model_property: "ModelProperty" = rest_field(name="modelProperty")
58
+ """Required."""
59
+ datetime_default: datetime.datetime = rest_field(name="datetimeDefault")
60
+ datetime_rfc3339: datetime.datetime = rest_field(name="datetimeRfc3339", format="rfc3339")
61
+ datetime_rfc7231: datetime.datetime = rest_field(name="datetimeRfc7231", format="rfc7231")
62
+ datetime_unix_timestamp: datetime.datetime = rest_field(name="datetimeUnixTimestamp", format="unix-timestamp")
63
+
64
+ @overload
65
+ def __init__(
66
+ self,
67
+ *,
68
+ description: str,
69
+ age: int,
70
+ model_property: "ModelProperty",
71
+ datetime_default: datetime.datetime,
72
+ datetime_rfc3339: datetime.datetime,
73
+ datetime_rfc7231: datetime.datetime,
74
+ datetime_unix_timestamp: datetime.datetime,
75
+ ) -> None: ...
76
+
77
+ @overload
78
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
79
+ """
80
+ :param mapping: raw JSON to initialize the model.
81
+ :type mapping: Mapping[str, Any]
82
+ """
83
+
84
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
85
+ super().__init__(*args, **kwargs)
86
+
87
+
88
+ class FlattenModel(Model):
89
+ """This is the model with one level of flattening."""
90
+
91
+ name: str = rest_field()
92
+ """Required."""
93
+ properties: "ChildModel" = rest_field()
94
+ """Required."""
95
+
96
+ __flattened_items = [
97
+ "description",
98
+ "age",
99
+ "model_property",
100
+ "datetime_default",
101
+ "datetime_rfc3339",
102
+ "datetime_rfc7231",
103
+ "datetime_unix_timestamp",
104
+ ]
105
+
106
+ @overload
107
+ def __init__(
108
+ self,
109
+ *,
110
+ name: str,
111
+ properties: "ChildModel",
112
+ ) -> None: ...
113
+
114
+ @overload
115
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
116
+ """
117
+ :param mapping: raw JSON to initialize the model.
118
+ :type mapping: Mapping[str, Any]
119
+ """
120
+
121
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
122
+ _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items}
123
+ super().__init__(*args, **kwargs)
124
+ for k, v in _flattened_input.items():
125
+ setattr(self, k, v)
126
+
127
+ def __getattr__(self, name: str) -> Any:
128
+ if name in self.__flattened_items:
129
+ if self.properties is None:
130
+ return None
131
+ return getattr(self.properties, name)
132
+ raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'")
133
+
134
+ def __setattr__(self, key: str, value: Any) -> None:
135
+ if key in self.__flattened_items:
136
+ if self.properties is None:
137
+ self.properties = self._attr_to_rest_field["properties"]._class_type()
138
+ setattr(self.properties, key, value)
139
+ else:
140
+ super().__setattr__(key, value)
141
+
142
+
143
+ def test_model_initialization():
144
+ model = FlattenModel(
145
+ name="test",
146
+ description="a description",
147
+ age=30,
148
+ model_property=ModelProperty(value="test value"),
149
+ datetime_default=datetime.datetime(2023, 1, 12, 0, 0, 0, tzinfo=datetime.timezone.utc),
150
+ datetime_rfc3339=datetime.datetime(2023, 1, 12, 0, 0, 0, tzinfo=datetime.timezone.utc),
151
+ datetime_rfc7231=datetime.datetime(2023, 1, 12, 0, 0, 0, tzinfo=datetime.timezone.utc),
152
+ datetime_unix_timestamp=datetime.datetime(2023, 1, 12, 0, 0, 0, tzinfo=datetime.timezone.utc),
153
+ )
154
+
155
+ assert model.name == "test"
156
+
157
+ assert model.description == "a description"
158
+ assert model.properties.description == "a description"
159
+
160
+ assert model.age == 30
161
+ assert model.properties.age == 30
162
+
163
+ assert model.model_property.value == "test value"
164
+ assert model.properties.model_property == ModelProperty(value="test value")
165
+ assert model.properties.model_property.value == "test value"
166
+
167
+ assert model.datetime_default == datetime.datetime(2023, 1, 12, 0, 0, 0, tzinfo=datetime.timezone.utc)
168
+ assert model.properties.datetime_default == datetime.datetime(2023, 1, 12, 0, 0, 0, tzinfo=datetime.timezone.utc)
169
+ assert model.properties["datetimeDefault"] == "2023-01-12T00:00:00Z"
170
+
171
+ assert model.datetime_rfc3339 == datetime.datetime(2023, 1, 12, 0, 0, 0, tzinfo=datetime.timezone.utc)
172
+ assert model.properties.datetime_rfc3339 == datetime.datetime(2023, 1, 12, 0, 0, 0, tzinfo=datetime.timezone.utc)
173
+ assert model.properties["datetimeRfc3339"] == "2023-01-12T00:00:00Z"
174
+
175
+ assert model.datetime_rfc7231 == datetime.datetime(2023, 1, 12, 0, 0, 0, tzinfo=datetime.timezone.utc)
176
+ assert model.properties.datetime_rfc7231 == datetime.datetime(2023, 1, 12, 0, 0, 0, tzinfo=datetime.timezone.utc)
177
+ assert model.properties["datetimeRfc7231"] == "Thu, 12 Jan 2023 00:00:00 GMT"
178
+
179
+ assert model.datetime_unix_timestamp == datetime.datetime(2023, 1, 12, 0, 0, 0, tzinfo=datetime.timezone.utc)
180
+ assert model.properties.datetime_unix_timestamp == datetime.datetime(
181
+ 2023, 1, 12, 0, 0, 0, tzinfo=datetime.timezone.utc
182
+ )
183
+ assert model.properties["datetimeUnixTimestamp"] == 1673481600
184
+
185
+
186
+ class FlattenModelWithOptionalProperties(Model):
187
+ """This is the model with one level of flattening and optional properties."""
188
+
189
+ name: str = rest_field()
190
+ """Required."""
191
+ properties: Optional["ModelProperty"] = rest_field()
192
+ """Optional."""
193
+
194
+ __flattened_items = ["value"]
195
+
196
+ @overload
197
+ def __init__(
198
+ self,
199
+ *,
200
+ name: str,
201
+ properties: Optional["ModelProperty"],
202
+ ) -> None: ...
203
+
204
+ @overload
205
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
206
+ """
207
+ :param mapping: raw JSON to initialize the model.
208
+ :type mapping: Mapping[str, Any]
209
+ """
210
+
211
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
212
+ _flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items}
213
+ super().__init__(*args, **kwargs)
214
+ for k, v in _flattened_input.items():
215
+ setattr(self, k, v)
216
+
217
+ def __getattr__(self, name: str) -> Any:
218
+ if name in self.__flattened_items:
219
+ if self.properties is None:
220
+ return None
221
+ return getattr(self.properties, name)
222
+ raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'")
223
+
224
+ def __setattr__(self, key: str, value: Any) -> None:
225
+ if key in self.__flattened_items:
226
+ if self.properties is None:
227
+ self.properties = self._attr_to_rest_field["properties"]._class_type()
228
+ setattr(self.properties, key, value)
229
+ else:
230
+ super().__setattr__(key, value)
231
+
232
+
233
+ def test_model_with_optional_properties_initialization():
234
+ model = FlattenModelWithOptionalProperties(
235
+ name="test",
236
+ value="test value",
237
+ )
238
+
239
+ assert model.name == "test"
240
+
241
+ assert model.value == "test value"
242
+ assert model.properties.value == "test value"
243
+
244
+
245
+ def test_model_with_optional_properties_attribute_list():
246
+ model = FlattenModelWithOptionalProperties(
247
+ name="test",
248
+ )
249
+
250
+ attrs = attribute_list(model)
251
+ assert sorted(attrs) == sorted(["name", "value"])
@@ -14,6 +14,7 @@ azure-mgmt-core==1.6.0
14
14
  -e ./generated/azure-client-generator-core-usage
15
15
  -e ./generated/azure-client-generator-core-override
16
16
  -e ./generated/azure-client-generator-core-client-location
17
+ -e ./generated/azure-client-generator-core-alternate-type
17
18
  -e ./generated/azure-client-generator-core-next-link-verb
18
19
  -e ./generated/azure-core-basic
19
20
  -e ./generated/azure-core-scalar
@@ -53,6 +54,7 @@ azure-mgmt-core==1.6.0
53
54
  -e ./generated/setuppy-authentication-union
54
55
  -e ./generated/encode-duration
55
56
  -e ./generated/encode-numeric
57
+ -e ./generated/encode-array
56
58
  -e ./generated/parameters-basic
57
59
  -e ./generated/parameters-collection-format
58
60
  -e ./generated/parameters-path
@@ -9,6 +9,7 @@
9
9
  -e ./generated/setuppy-authentication-union
10
10
  -e ./generated/encode-duration
11
11
  -e ./generated/encode-numeric
12
+ -e ./generated/encode-array
12
13
  -e ./generated/parameters-basic
13
14
  -e ./generated/parameters-collection-format
14
15
  -e ./generated/parameters-path