atlas-init 0.6.0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. atlas_init/__init__.py +1 -1
  2. atlas_init/atlas_init.yaml +1 -0
  3. atlas_init/cli_args.py +19 -1
  4. atlas_init/cli_tf/ci_tests.py +116 -24
  5. atlas_init/cli_tf/example_update.py +20 -8
  6. atlas_init/cli_tf/go_test_run.py +14 -2
  7. atlas_init/cli_tf/go_test_summary.py +334 -82
  8. atlas_init/cli_tf/go_test_tf_error.py +20 -12
  9. atlas_init/cli_tf/hcl/modifier.py +22 -8
  10. atlas_init/cli_tf/hcl/modifier2.py +120 -0
  11. atlas_init/cli_tf/openapi.py +10 -6
  12. atlas_init/html_out/__init__.py +0 -0
  13. atlas_init/html_out/md_export.py +143 -0
  14. atlas_init/sdk_ext/__init__.py +0 -0
  15. atlas_init/sdk_ext/go.py +102 -0
  16. atlas_init/sdk_ext/typer_app.py +18 -0
  17. atlas_init/settings/env_vars.py +25 -3
  18. atlas_init/settings/env_vars_generated.py +2 -0
  19. atlas_init/tf/.terraform.lock.hcl +33 -33
  20. atlas_init/tf/modules/aws_s3/provider.tf +1 -1
  21. atlas_init/tf/modules/aws_vpc/provider.tf +1 -1
  22. atlas_init/tf/modules/cloud_provider/provider.tf +1 -1
  23. atlas_init/tf/modules/cluster/provider.tf +1 -1
  24. atlas_init/tf/modules/encryption_at_rest/provider.tf +1 -1
  25. atlas_init/tf/modules/federated_vars/federated_vars.tf +1 -2
  26. atlas_init/tf/modules/federated_vars/provider.tf +1 -1
  27. atlas_init/tf/modules/project_extra/provider.tf +1 -1
  28. atlas_init/tf/modules/stream_instance/provider.tf +1 -1
  29. atlas_init/tf/modules/vpc_peering/provider.tf +1 -1
  30. atlas_init/tf/modules/vpc_privatelink/versions.tf +1 -1
  31. atlas_init/tf/providers.tf +1 -1
  32. atlas_init/tf_ext/__init__.py +0 -0
  33. atlas_init/tf_ext/__main__.py +3 -0
  34. atlas_init/tf_ext/api_call.py +325 -0
  35. atlas_init/tf_ext/args.py +32 -0
  36. atlas_init/tf_ext/constants.py +3 -0
  37. atlas_init/tf_ext/gen_examples.py +141 -0
  38. atlas_init/tf_ext/gen_module_readme.py +131 -0
  39. atlas_init/tf_ext/gen_resource_main.py +195 -0
  40. atlas_init/tf_ext/gen_resource_output.py +71 -0
  41. atlas_init/tf_ext/gen_resource_variables.py +159 -0
  42. atlas_init/tf_ext/gen_versions.py +10 -0
  43. atlas_init/tf_ext/models.py +106 -0
  44. atlas_init/tf_ext/models_module.py +454 -0
  45. atlas_init/tf_ext/newres.py +90 -0
  46. atlas_init/tf_ext/paths.py +126 -0
  47. atlas_init/tf_ext/plan_diffs.py +140 -0
  48. atlas_init/tf_ext/provider_schema.py +199 -0
  49. atlas_init/tf_ext/py_gen.py +294 -0
  50. atlas_init/tf_ext/schema_to_dataclass.py +522 -0
  51. atlas_init/tf_ext/settings.py +188 -0
  52. atlas_init/tf_ext/tf_dep.py +324 -0
  53. atlas_init/tf_ext/tf_desc_gen.py +53 -0
  54. atlas_init/tf_ext/tf_desc_update.py +0 -0
  55. atlas_init/tf_ext/tf_mod_gen.py +263 -0
  56. atlas_init/tf_ext/tf_mod_gen_provider.py +124 -0
  57. atlas_init/tf_ext/tf_modules.py +395 -0
  58. atlas_init/tf_ext/tf_vars.py +158 -0
  59. atlas_init/tf_ext/typer_app.py +28 -0
  60. {atlas_init-0.6.0.dist-info → atlas_init-0.8.0.dist-info}/METADATA +5 -3
  61. {atlas_init-0.6.0.dist-info → atlas_init-0.8.0.dist-info}/RECORD +64 -31
  62. atlas_init-0.8.0.dist-info/entry_points.txt +5 -0
  63. atlas_init-0.6.0.dist-info/entry_points.txt +0 -2
  64. {atlas_init-0.6.0.dist-info → atlas_init-0.8.0.dist-info}/WHEEL +0 -0
  65. {atlas_init-0.6.0.dist-info → atlas_init-0.8.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,522 @@
1
+ from __future__ import annotations
2
+
3
+ import keyword
4
+ import logging
5
+ import re
6
+ from collections import defaultdict
7
+ from dataclasses import fields
8
+ from pathlib import Path
9
+ from tempfile import TemporaryDirectory
10
+ from typing import Any, Callable, ClassVar, Self
11
+
12
+ from ask_shell import ShellError, run_and_wait
13
+ from inflection import singularize
14
+ from model_lib import Entity
15
+ from pydantic import model_validator
16
+ from zero_3rdparty import humps
17
+ from zero_3rdparty.file_utils import copy, update_between_markers
18
+
19
+ from atlas_init.tf_ext.models_module import (
20
+ ModuleGenConfig,
21
+ ResourceAbs,
22
+ ResourceTypePythonModule,
23
+ import_resource_type_python_module,
24
+ )
25
+ from atlas_init.tf_ext.provider_schema import ResourceSchema, SchemaAttribute, SchemaBlock
26
+ from atlas_init.tf_ext.py_gen import (
27
+ as_set,
28
+ dataclass_matches,
29
+ ensure_dataclass_use_conversion,
30
+ import_from_path,
31
+ longest_common_substring_among_all,
32
+ make_post_init_line_optional,
33
+ module_dataclasses,
34
+ move_main_call_to_end,
35
+ primitive_types,
36
+ )
37
+
38
+ logger = logging.getLogger(__name__)
39
+
40
+ MARKER_START = "# codegen atlas-init-marker-start"
41
+ MARKER_END = "# codegen atlas-init-marker-end"
42
+
43
+
44
+ def is_computed_only(attr: SchemaAttribute) -> bool:
45
+ return bool(attr.computed) and not bool(attr.required) and not bool(attr.optional)
46
+
47
+
48
+ def type_from_schema_attr(attr: SchemaAttribute, parent_class_name=None, attr_name=None) -> str:
49
+ # Only handle attribute types (not nested_type)
50
+ t = attr.type
51
+ if isinstance(t, str):
52
+ return {
53
+ "string": "str",
54
+ "number": "float",
55
+ "bool": "bool",
56
+ "int": "int",
57
+ "any": "Any",
58
+ }.get(t, "Any")
59
+ elif isinstance(t, list):
60
+ # Terraform types: ["list", "string"] or ["set", "object", {...}]
61
+ if t[0] in ("list", "set"):
62
+ if len(t) == 2 and isinstance(t[1], str):
63
+ return f"List[{type_from_schema_attr(SchemaAttribute(type=t[1]))}]"
64
+ elif len(t) == 3 and isinstance(t[2], dict):
65
+ # object type
66
+ return "List[dict]"
67
+ elif t[0] == "map":
68
+ return "Dict[str, Any]"
69
+ elif isinstance(t, dict):
70
+ return "dict"
71
+ return "Any"
72
+
73
+
74
+ def safe_name(name):
75
+ return f"{name}_" if keyword.iskeyword(name) else name
76
+
77
+
78
+ def py_type_from_element_type(elem_type_val: str | dict[str, str] | Any) -> str:
79
+ if isinstance(elem_type_val, str):
80
+ return {
81
+ "string": "str",
82
+ "number": "float",
83
+ "bool": "bool",
84
+ "int": "int",
85
+ "any": "Any",
86
+ }.get(elem_type_val, "Any")
87
+ elif isinstance(elem_type_val, dict):
88
+ return "dict"
89
+ else:
90
+ return "Any"
91
+
92
+
93
+ class DcField(Entity):
94
+ NO_DEFAULT: ClassVar[str] = "None"
95
+ METADATA_DEFAULT_NAME: ClassVar[str] = "default_hcl"
96
+ name: str
97
+ type_annotation: str
98
+ description: str | None = None
99
+ default_value: str = NO_DEFAULT
100
+ default_hcl_string: str | None = None
101
+ nested_class_name: str = ""
102
+ required: bool = False
103
+ optional: bool = False
104
+ computed: bool = False
105
+
106
+ @model_validator(mode="after")
107
+ def validate_self(self) -> Self:
108
+ self.name = safe_name(self.name)
109
+ return self
110
+
111
+ @property
112
+ def is_list(self) -> bool:
113
+ return self.type_annotation.startswith("List[")
114
+
115
+ @property
116
+ def is_dict(self) -> bool:
117
+ return self.type_annotation.startswith("Dict[")
118
+
119
+ @property
120
+ def is_nested(self) -> bool:
121
+ return self.type_annotation.startswith(("List[", "Dict[", "Set[")) or self.nested_class_name != ""
122
+
123
+ @property
124
+ def metadata(self) -> dict:
125
+ return {
126
+ key: value
127
+ for key, value in [
128
+ ("description", self.description),
129
+ (self.METADATA_DEFAULT_NAME, self.default_hcl_string),
130
+ ]
131
+ if value
132
+ }
133
+
134
+ @property
135
+ def declare(self) -> str:
136
+ if metadata := self.metadata:
137
+ field_args = ["default=None", f"metadata={metadata}"]
138
+ return f" {self.name}: Optional[{self.type_annotation}] = field({', '.join(field_args)})"
139
+ return f" {self.name}: Optional[{self.type_annotation}] = None"
140
+
141
+ @property
142
+ def declare_required(self) -> str:
143
+ """Why not use self.required? Even though an attribute is required in the schema, we might be able to infer the value, for example cluster_type"""
144
+ if metadata := self.metadata:
145
+ field_args = [f"metadata={metadata}"]
146
+ return f" {self.name}: {self.type_annotation} = field({', '.join(field_args)})"
147
+ return f" {self.name}: {self.type_annotation}"
148
+
149
+ @property
150
+ def post_init(self) -> str:
151
+ if cls_name := self.nested_class_name:
152
+ return make_post_init_line_optional(self.name, cls_name, is_list=self.is_list, is_map=self.is_dict)
153
+ return ""
154
+
155
+ @property
156
+ def computed_only(self) -> bool:
157
+ return self.computed and not self.required and not self.optional
158
+
159
+
160
+ def nested_type_annotation(elem_cls_name: str, nesting_mode: str | None) -> str:
161
+ nesting_mode = nesting_mode or ""
162
+ if nesting_mode == "list":
163
+ return f"List[{elem_cls_name}]"
164
+ if nesting_mode == "set":
165
+ return f"Set[{elem_cls_name}]"
166
+ return elem_cls_name
167
+
168
+
169
+ def convert_to_dataclass(
170
+ schema: ResourceSchema, existing: ResourceTypePythonModule, config: ModuleGenConfig, resource_type: str
171
+ ) -> str:
172
+ class_defs = []
173
+
174
+ def block_to_class(block: SchemaBlock, class_name: str, extra_post_init: list[str] | None = None) -> str:
175
+ lines = ["@dataclass", f"class {class_name}:"]
176
+ dc_fields: list[DcField] = []
177
+
178
+ def add_attribute(
179
+ attr_name: str,
180
+ attr: SchemaAttribute,
181
+ type_annotation: str,
182
+ *,
183
+ required: bool = False,
184
+ optional: bool = False,
185
+ computed: bool = False,
186
+ nested_class_name: str = "",
187
+ ):
188
+ dc_field = DcField(
189
+ name=attr_name,
190
+ type_annotation=type_annotation,
191
+ required=required or bool(attr.required),
192
+ optional=optional or bool(attr.optional),
193
+ computed=computed or bool(attr.computed),
194
+ description=attr.description,
195
+ nested_class_name=nested_class_name,
196
+ )
197
+ dc_fields.append(dc_field)
198
+
199
+ def add_block_attribute(
200
+ attr_name: str,
201
+ block_type: SchemaBlock,
202
+ required: bool = False,
203
+ optional: bool = False,
204
+ computed: bool = False,
205
+ description: str | None = None,
206
+ ):
207
+ nested_class_name = f"{class_name}_{attr_name.capitalize()}"
208
+ type_annotation = nested_type_annotation(nested_class_name, block_type.nesting_mode)
209
+ dc_field = DcField(
210
+ name=attr_name,
211
+ type_annotation=type_annotation,
212
+ required=required,
213
+ optional=optional,
214
+ computed=computed,
215
+ description=description,
216
+ nested_class_name=nested_class_name,
217
+ )
218
+ class_defs.append(block_to_class(block_type, nested_class_name))
219
+ dc_fields.append(dc_field)
220
+
221
+ for attr_name, attr in (block.attributes or {}).items():
222
+ if attr.deprecated or attr.deprecated_message or attr_name in config.skip_variables_extra(resource_type):
223
+ if attr.deprecated:
224
+ logger.info(f"skipping deprecated attribute {attr_name} for {resource_type}")
225
+ continue
226
+ required = bool(attr.required)
227
+ if nested_block := attr.nested_type:
228
+ add_block_attribute(
229
+ attr_name,
230
+ nested_block,
231
+ required=required,
232
+ optional=bool(attr.optional),
233
+ computed=bool(attr.computed),
234
+ description=attr.description,
235
+ )
236
+ continue
237
+ if elem_type_val := attr.element_type:
238
+ elem_py_type = py_type_from_element_type(elem_type_val)
239
+ match attr.type:
240
+ case ["map", *_]:
241
+ py_type = f"Dict[str, {elem_py_type}]"
242
+ case ["list", *_]:
243
+ py_type = f"List[{elem_py_type}]"
244
+ case ["set", *_]:
245
+ py_type = f"Set[{elem_py_type}]"
246
+ case _:
247
+ py_type = elem_py_type
248
+ nested_class_name = ""
249
+ if elem_py_type not in ("str", "float", "bool", "int", "Any", "dict"):
250
+ nested_class_name = elem_py_type
251
+ add_attribute(attr_name, attr, py_type, nested_class_name=nested_class_name)
252
+ else:
253
+ py_type = type_from_schema_attr(attr, class_name, attr_name)
254
+ add_attribute(attr_name, attr, py_type)
255
+
256
+ block_attributes = set()
257
+ for block_type_name, block_type in (block.block_types or {}).items():
258
+ if block_type.deprecated or block_type_name in config.skip_variables_extra(resource_type):
259
+ logger.info(f"skipping deprecated block type {block_type_name}")
260
+ continue
261
+ is_required = (block_type.min_items or 0) > 0 or bool(block_type.required)
262
+ block_attributes.add(block_type_name)
263
+ add_block_attribute(
264
+ block_type_name,
265
+ block_type.block_with_nesting_mode,
266
+ required=is_required,
267
+ optional=bool(block_type.optional),
268
+ description=block_type.description,
269
+ )
270
+
271
+ lines.append(
272
+ f" {ResourceAbs.BLOCK_ATTRIBUTES_NAME}: ClassVar[Set[str]] = {as_set(sorted(block_attributes))}"
273
+ )
274
+ lines.append(
275
+ f" {ResourceAbs.NESTED_ATTRIBUTES_NAME}: ClassVar[Set[str]] = {as_set([dc_field.name for dc_field in dc_fields if dc_field.is_nested])}"
276
+ )
277
+ lines.append(
278
+ f" {ResourceAbs.REQUIRED_ATTRIBUTES_NAME}: ClassVar[Set[str]] = {as_set([dc_field.name for dc_field in dc_fields if dc_field.required])}"
279
+ )
280
+ lines.append(
281
+ f" {ResourceAbs.COMPUTED_ONLY_ATTRIBUTES_NAME}: ClassVar[Set[str]] = {as_set([dc_field.name for dc_field in dc_fields if dc_field.computed_only])}"
282
+ )
283
+ default_strings = {
284
+ dc_field.name: default_hcl_string
285
+ for dc_field in dc_fields
286
+ if (default_hcl_string := config.attribute_default_hcl_strings(resource_type).get(dc_field.name))
287
+ }
288
+ lines.append(f" {ResourceAbs.DEFAULTS_HCL_STRINGS_NAME}: ClassVar[dict[str, str]] = {default_strings!r}")
289
+
290
+ if not dc_fields:
291
+ lines.append(" pass")
292
+ return "\n".join(lines)
293
+ if not config.use_descriptions:
294
+ for dc_field in dc_fields:
295
+ dc_field.description = None
296
+ required_vars = config.required_variables(resource_type)
297
+ lines.extend(dc_field.declare_required for dc_field in dc_fields if dc_field.name in required_vars)
298
+ lines.extend(dc_field.declare for dc_field in dc_fields if dc_field.name not in required_vars)
299
+ post_init_lines = [post_init for dc_field in dc_fields if (post_init := dc_field.post_init)]
300
+ if extra_post_init:
301
+ post_init_lines.extend(extra_post_init)
302
+ if post_init_lines:
303
+ lines.append(" def __post_init__(self):")
304
+ lines.extend(post_init_lines)
305
+ lines.extend(["", ""])
306
+ return "\n".join(lines)
307
+
308
+ root_class_name = "Resource"
309
+ class_defs.append(block_to_class(schema.block, root_class_name, existing.extra_post_init_lines))
310
+
311
+ import_lines = [
312
+ "import json",
313
+ "import sys",
314
+ "from dataclasses import asdict, dataclass, field",
315
+ "from typing import Optional, List, Dict, Any, Set, ClassVar, Union, Iterable",
316
+ ]
317
+ import_lines.extend(existing.extra_import_lines)
318
+
319
+ module_str = "\n".join(import_lines + [""] + class_defs)
320
+ return module_str.strip() + "\n"
321
+
322
+
323
+ _primitive_conversion = """
324
+
325
+ def format_primitive(value: Union[str, float, bool, int, None]):
326
+ if value is None:
327
+ return None
328
+ if value is True:
329
+ return "true"
330
+ if value is False:
331
+ return "false"
332
+ return str(value)
333
+ """
334
+ _debug_logs = """
335
+ from pathlib import Path
336
+ logs_out = Path(__file__).parent / "logs.json"
337
+ logs_out.write_text(json_str)
338
+ """
339
+
340
+
341
+ def main_entrypoint(existing: ResourceTypePythonModule, config: ModuleGenConfig) -> str:
342
+ parse_cls = "ResourceExt" if existing.resource_ext_cls_used else "Resource"
343
+ errors_func_call = r'"\\n".join(errors(resource))' if existing.errors_func_used else '""'
344
+ modify_out_func_call = "\n resource = modify_out(resource)" if existing.modify_out_func_used else ""
345
+ logs_debug = _debug_logs if config.debug_json_logs else ""
346
+ return (
347
+ _primitive_conversion
348
+ + f"""
349
+ def main():
350
+ input_data = sys.stdin.read()
351
+ # Parse the input as JSON
352
+ params = json.loads(input_data)
353
+ input_json = params["input_json"]
354
+ resource = {parse_cls}(**json.loads(input_json))
355
+ error_message = {errors_func_call}
356
+ primitive_types = ({", ".join(t.__name__ for t in primitive_types)}){modify_out_func_call}
357
+ output = {{
358
+ key: format_primitive(value) if value is None or isinstance(value, primitive_types) else json.dumps(value)
359
+ for key, value in asdict(resource).items()
360
+ }}
361
+ output["error_message"] = error_message
362
+ json_str = json.dumps(output){logs_debug}
363
+ print(json_str)
364
+ if __name__ == "__main__":
365
+ main()
366
+
367
+ """
368
+ )
369
+
370
+
371
+ def py_file_validate_and_auto_fixes(code: str, error_hint: str = "") -> str:
372
+ with TemporaryDirectory() as tmp_dir:
373
+ tmp_file = Path(tmp_dir) / "dataclass.py"
374
+ tmp_file.write_text(code)
375
+ run_fmt_and_fixes(tmp_file)
376
+ return tmp_file.read_text()
377
+
378
+
379
+ def run_fmt_and_fixes(file_path: Path, error_hint: str = ""):
380
+ tmp_dir = file_path.parent
381
+ try:
382
+ run_and_wait("ruff format . --line-length 120", cwd=tmp_dir)
383
+ except ShellError as e:
384
+ logger.exception(f"Failed to format dataclass:\n{file_path.read_text()}\n{error_hint}")
385
+ raise e
386
+ try:
387
+ run_and_wait("ruff check --fix .", cwd=tmp_dir)
388
+ except ShellError as e:
389
+ logger.exception(f"Failed to check dataclass:\n{file_path.read_text()}\n{error_hint}")
390
+ raise e
391
+ return file_path.read_text()
392
+
393
+
394
+ def dataclass_id(cls: type) -> str:
395
+ field_names = ",".join(sorted(f.name for f in fields(cls)))
396
+ computed_only_names = ",".join(sorted(f.name for f in fields(cls) if ResourceAbs.is_computed_only(f.name, cls)))
397
+ required_only_names = ",".join(sorted(f.name for f in fields(cls) if ResourceAbs.is_required(f.name, cls)))
398
+ id_parts = [field_names]
399
+ if computed_only_names:
400
+ id_parts.append(f"computed={computed_only_names}")
401
+ if required_only_names:
402
+ id_parts.append(f"required={required_only_names}")
403
+ return "|".join(id_parts)
404
+
405
+
406
+ class NameAlreadyTakenError(Exception):
407
+ def __init__(self, *args: object) -> None:
408
+ super().__init__(*args)
409
+
410
+
411
+ def simplify_classes(py_code: str) -> tuple[str, set[str]]:
412
+ with TemporaryDirectory() as tmp_dir:
413
+ tmp_file = Path(tmp_dir) / "dataclass.py"
414
+ tmp_file.write_text(py_code)
415
+ module = import_from_path("dataclass", tmp_file)
416
+ dataclasses = module_dataclasses(module)
417
+ fields_to_dataclass = defaultdict(list)
418
+ for name, dc in dataclasses.items():
419
+ fields_to_dataclass[dataclass_id(dc)].append(name)
420
+ new_names: set[str] = set()
421
+
422
+ def add_new_name(new_name: str) -> None:
423
+ if new_name in new_names or new_name in dataclasses:
424
+ raise NameAlreadyTakenError(f"Duplicate new name: {new_name}")
425
+ new_names.add(new_name)
426
+
427
+ if duplicate_classes := {k: v for k, v in fields_to_dataclass.items() if len(v) > 1}:
428
+ for duplicates in duplicate_classes.values():
429
+ py_code, new_name = rename_and_remove_duplicates(duplicates, py_code, add_new_name)
430
+ for old_classes in fields_to_dataclass.values():
431
+ if len(old_classes) != 1:
432
+ continue
433
+ cls_name = old_classes[0]
434
+ if "_" not in cls_name:
435
+ continue
436
+ new_name = extract_last_name_part(cls_name)
437
+ py_code = _safe_replace(py_code, cls_name, new_name)
438
+ add_new_name(new_name)
439
+ return py_code, new_names
440
+
441
+
442
+ def _safe_replace(text: str, old: str, new: str) -> str:
443
+ def replacer(match: re.Match) -> str:
444
+ return match[0].replace(old, new)
445
+
446
+ return re.sub(rf"\W({old})\W", replacer, text)
447
+
448
+
449
+ _plural_exception_list = {"Aws"}
450
+ _cls_exception_mapping = {
451
+ "Resource": "ResourceElem",
452
+ }
453
+
454
+
455
+ def extract_last_name_part(full_name: str) -> str:
456
+ included_words = []
457
+ for word in reversed(full_name.split("_")):
458
+ included_words.insert(0, word)
459
+ if word[0].isupper():
460
+ break
461
+ plural_word = humps.pascalize("_".join(included_words))
462
+ name = plural_word
463
+ if plural_word not in _plural_exception_list:
464
+ name = singularize(plural_word)
465
+ return _cls_exception_mapping.get(name, name)
466
+
467
+
468
+ def rename_and_remove_duplicates(
469
+ duplicates: list[str], py_code: str, add_new_name: Callable[[str], None]
470
+ ) -> tuple[str, str]:
471
+ duplicates_short = [extract_last_name_part(d) for d in duplicates]
472
+ new_name = longest_common_substring_among_all(duplicates_short)
473
+ try:
474
+ add_new_name(new_name)
475
+ except NameAlreadyTakenError:
476
+ new_name += "2"
477
+ add_new_name(new_name)
478
+ for replace in duplicates:
479
+ py_code = _safe_replace(py_code, replace, new_name)
480
+ py_code = remove_duplicates(py_code, new_name)
481
+ return py_code, new_name
482
+
483
+
484
+ def remove_duplicates(py_code: str, new_name) -> str:
485
+ matches = list(dataclass_matches(py_code, new_name))
486
+ logger.info(f"found {len(matches)} matches for {new_name}")
487
+ while len(matches) > 1:
488
+ next_match = matches.pop()
489
+ py_code = py_code[: next_match.index_start] + py_code[next_match.index_end :]
490
+ return py_code
491
+
492
+
493
+ SKIP_FILTER = {"Resource", "ResourceExt"}
494
+
495
+
496
+ def generate_python_from_schema(
497
+ py_module: ResourceTypePythonModule, schema: ResourceSchema, config: ModuleGenConfig, resource_type: str
498
+ ) -> str:
499
+ dataclass_unformatted = convert_to_dataclass(schema, py_module, config, resource_type)
500
+ dataclass_unformatted = simplify_classes(dataclass_unformatted)[0]
501
+ return f"{dataclass_unformatted}\n{main_entrypoint(py_module, config)}"
502
+
503
+
504
+ def convert_and_format(
505
+ resource_type: str,
506
+ schema: ResourceSchema,
507
+ config: ModuleGenConfig,
508
+ existing_path: Path | None = None,
509
+ ) -> str:
510
+ if existing_path is not None and existing_path.exists():
511
+ py_module = import_resource_type_python_module(resource_type, existing_path)
512
+ with TemporaryDirectory() as tmp_path:
513
+ tmp_file = Path(tmp_path) / f"{resource_type}.py"
514
+ copy(existing_path, tmp_file)
515
+ dataclass_unformatted = generate_python_from_schema(py_module, schema, config, resource_type)
516
+ update_between_markers(tmp_file, dataclass_unformatted, MARKER_START, MARKER_END)
517
+ move_main_call_to_end(tmp_file)
518
+ ensure_dataclass_use_conversion(py_module.dataclasses, tmp_file, SKIP_FILTER)
519
+ return run_fmt_and_fixes(tmp_file)
520
+ existing = ResourceTypePythonModule(resource_type)
521
+ dataclass_unformatted = generate_python_from_schema(existing, schema, config, resource_type)
522
+ return py_file_validate_and_auto_fixes(f"{MARKER_START}\n{dataclass_unformatted}\n{MARKER_END}\n")