atlas-init 0.7.0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. atlas_init/__init__.py +1 -1
  2. atlas_init/atlas_init.yaml +1 -0
  3. atlas_init/cli_tf/example_update.py +20 -8
  4. atlas_init/cli_tf/hcl/modifier.py +22 -8
  5. atlas_init/settings/env_vars.py +12 -2
  6. atlas_init/tf_ext/api_call.py +9 -9
  7. atlas_init/tf_ext/args.py +16 -1
  8. atlas_init/tf_ext/gen_examples.py +141 -0
  9. atlas_init/tf_ext/gen_module_readme.py +131 -0
  10. atlas_init/tf_ext/gen_resource_main.py +195 -0
  11. atlas_init/tf_ext/gen_resource_output.py +71 -0
  12. atlas_init/tf_ext/gen_resource_variables.py +159 -0
  13. atlas_init/tf_ext/gen_versions.py +10 -0
  14. atlas_init/tf_ext/models_module.py +454 -0
  15. atlas_init/tf_ext/newres.py +90 -0
  16. atlas_init/tf_ext/plan_diffs.py +140 -0
  17. atlas_init/tf_ext/provider_schema.py +199 -0
  18. atlas_init/tf_ext/py_gen.py +294 -0
  19. atlas_init/tf_ext/schema_to_dataclass.py +522 -0
  20. atlas_init/tf_ext/settings.py +151 -2
  21. atlas_init/tf_ext/tf_dep.py +5 -5
  22. atlas_init/tf_ext/tf_desc_gen.py +53 -0
  23. atlas_init/tf_ext/tf_desc_update.py +0 -0
  24. atlas_init/tf_ext/tf_mod_gen.py +263 -0
  25. atlas_init/tf_ext/tf_mod_gen_provider.py +124 -0
  26. atlas_init/tf_ext/tf_modules.py +5 -4
  27. atlas_init/tf_ext/tf_vars.py +13 -28
  28. atlas_init/tf_ext/typer_app.py +6 -2
  29. {atlas_init-0.7.0.dist-info → atlas_init-0.8.0.dist-info}/METADATA +4 -3
  30. {atlas_init-0.7.0.dist-info → atlas_init-0.8.0.dist-info}/RECORD +33 -17
  31. {atlas_init-0.7.0.dist-info → atlas_init-0.8.0.dist-info}/WHEEL +0 -0
  32. {atlas_init-0.7.0.dist-info → atlas_init-0.8.0.dist-info}/entry_points.txt +0 -0
  33. {atlas_init-0.7.0.dist-info → atlas_init-0.8.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,454 @@
1
+ from abc import ABC
2
+ from collections import defaultdict
3
+ from contextlib import suppress
4
+ from dataclasses import Field, dataclass, fields
5
+ from pathlib import Path
6
+ from types import ModuleType
7
+ from typing import Any, ClassVar, Iterable, Self, TypeAlias
8
+
9
+ from model_lib import Entity, copy_and_validate, dump, parse_dict, parse_model
10
+ from pydantic import DirectoryPath, model_validator
11
+ from pydantic import Field as PydanticField
12
+ from zero_3rdparty.file_utils import ensure_parents_write_text
13
+ from zero_3rdparty.object_name import as_name
14
+
15
+ from atlas_init.tf_ext.plan_diffs import ExamplePlanCheck
16
+ from atlas_init.tf_ext.py_gen import (
17
+ ContainerType,
18
+ PrimitiveTypeError,
19
+ import_from_path,
20
+ make_post_init_line_from_field,
21
+ module_dataclasses,
22
+ unwrap_type,
23
+ )
24
+ from atlas_init.tf_ext.settings import RepoOut, TfExtSettings
25
+
26
+ ResourceTypeT: TypeAlias = str
27
+
28
+
29
+ @dataclass
30
+ class ResourceAbs(ABC):
31
+ BLOCK_ATTRIBUTES_NAME: ClassVar[str] = "BLOCK_ATTRIBUTES"
32
+ BLOCK_ATTRIBUTES: ClassVar[set[str]] = set()
33
+ COMPUTED_ONLY_ATTRIBUTES_NAME: ClassVar[str] = "COMPUTED_ONLY_ATTRIBUTES"
34
+ COMPUTED_ONLY_ATTRIBUTES: ClassVar[set[str]] = set()
35
+ DEFAULTS_HCL_STRINGS_NAME: ClassVar[str] = "DEFAULTS_HCL_STRINGS"
36
+ DEFAULTS_HCL_STRINGS: ClassVar[dict[str, str]] = {}
37
+ NESTED_ATTRIBUTES_NAME: ClassVar[str] = "NESTED_ATTRIBUTES"
38
+ NESTED_ATTRIBUTES: ClassVar[set[str]] = set()
39
+ REQUIRED_ATTRIBUTES_NAME: ClassVar[str] = "REQUIRED_ATTRIBUTES"
40
+ REQUIRED_ATTRIBUTES: ClassVar[set[str]] = set()
41
+ SKIP_VARIABLES_NAME: ClassVar[str] = "SKIP_VARIABLES"
42
+ SKIP_VARIABLES: ClassVar[set[str]] = set()
43
+
44
+ @staticmethod
45
+ def is_block(field_name: str, some_cls: type) -> bool:
46
+ return field_name in getattr(some_cls, ResourceAbs.BLOCK_ATTRIBUTES_NAME, set())
47
+
48
+ @staticmethod
49
+ def is_required(field_name: str, some_cls: type) -> bool:
50
+ return field_name in getattr(some_cls, ResourceAbs.REQUIRED_ATTRIBUTES_NAME, set())
51
+
52
+ @staticmethod
53
+ def is_computed_only(field_name: str, some_cls: type) -> bool:
54
+ return field_name in getattr(some_cls, ResourceAbs.COMPUTED_ONLY_ATTRIBUTES_NAME, set())
55
+
56
+ @staticmethod
57
+ def is_nested(field_name: str, some_cls: type) -> bool:
58
+ return field_name in getattr(some_cls, ResourceAbs.NESTED_ATTRIBUTES_NAME, set())
59
+
60
+ @staticmethod
61
+ def default_hcl_string(field_name: str, some_cls: type) -> str | None:
62
+ return getattr(some_cls, ResourceAbs.DEFAULTS_HCL_STRINGS_NAME, {}).get(field_name)
63
+
64
+ @staticmethod
65
+ def skip_variable(field_name: str, some_cls: type) -> bool:
66
+ return field_name in getattr(some_cls, ResourceAbs.SKIP_VARIABLES_NAME, set())
67
+
68
+
69
+ def as_import_line(name: str) -> str:
70
+ from_part, name_part = name.rsplit(".", maxsplit=1)
71
+ return f"from {from_part} import {name_part}"
72
+
73
+
74
+ class ResourceGenConfig(Entity):
75
+ name: str
76
+ use_single_variable: bool = False
77
+ required_variables: set[str] = PydanticField(default_factory=set)
78
+ skip_variables_extra: set[str] = PydanticField(default_factory=set)
79
+ attribute_default_hcl_strings: dict[str, str] = PydanticField(default_factory=dict)
80
+ include_id_field: bool = False
81
+
82
+ @model_validator(mode="after")
83
+ def add_id_as_skip_variable(self) -> Self:
84
+ if not self.include_id_field:
85
+ self.skip_variables_extra.add("id") # SDKv2 Adds a computed+optional `id` field
86
+ return self
87
+
88
+ def single_variable_version(self) -> Self:
89
+ assert not self.use_single_variable, "use_single_variable must be False to create a single variable version"
90
+ return copy_and_validate(self, use_single_variable=True)
91
+
92
+
93
+ def as_provider_name(provider_path: str) -> str:
94
+ return provider_path.rsplit("/", maxsplit=1)[-1]
95
+
96
+
97
+ class ProviderGenConfig(Entity):
98
+ provider_path: str
99
+ resources: list[ResourceGenConfig] = PydanticField(default_factory=list)
100
+ settings: TfExtSettings = PydanticField(default_factory=TfExtSettings.from_env)
101
+ last_gen_sha: str = ""
102
+
103
+ def config_dump(self) -> dict[str, Any]:
104
+ return {
105
+ "provider_path": self.provider_path,
106
+ "resources": [r.model_dump(exclude_defaults=True, exclude_unset=True) for r in self.resources],
107
+ "last_gen_sha": self.last_gen_sha,
108
+ }
109
+
110
+ @property
111
+ def provider_name(self) -> str:
112
+ return self.provider_path.rsplit("/", maxsplit=1)[-1]
113
+
114
+ def resource_types(self) -> list[str]:
115
+ return [r.name for r in self.resources]
116
+
117
+ def resource_config_or_none(self, resource_type: str) -> ResourceGenConfig | None:
118
+ return next((r for r in self.resources if r.name == resource_type), None)
119
+
120
+
121
+ class ModuleGenConfig(Entity):
122
+ CONFIG_FILENAME: ClassVar[str] = "config.yaml"
123
+ FILENAME_EXAMPLE_CHECKS: ClassVar[str] = "example_plan_checks.yaml"
124
+ FILENAME_EXAMPLES_TEST: ClassVar[str] = "examples_test.py"
125
+
126
+ @classmethod
127
+ def skip_copy(cls, src_file: Path) -> bool:
128
+ return (
129
+ src_file.stem.endswith("_test")
130
+ or src_file.name == "__init__.py"
131
+ or src_file.name in {cls.CONFIG_FILENAME, cls.FILENAME_EXAMPLE_CHECKS, cls.FILENAME_EXAMPLES_TEST}
132
+ )
133
+
134
+ name: str = ""
135
+ resources: list[ResourceGenConfig] = PydanticField(default_factory=list)
136
+ settings: TfExtSettings = PydanticField(default_factory=TfExtSettings.from_env)
137
+ in_dir: Path | None = None
138
+ out_dir: Path | None = None
139
+ dataclass_out_dir: Path | None = None
140
+ skip_python: bool = False
141
+ debug_json_logs: bool = False
142
+ example_plan_checks: list[ExamplePlanCheck] = PydanticField(default_factory=list)
143
+ use_descriptions: bool = False
144
+ inputs_json_hcl_extras: list[str] = PydanticField(default_factory=list)
145
+
146
+ @model_validator(mode="after")
147
+ def set_defaults(self) -> Self:
148
+ if not self.name:
149
+ assert self.resource_types, "must set either name or resource_types"
150
+ self.name = self.resource_types[0]
151
+ return self
152
+
153
+ @property
154
+ def resource_types(self) -> list[str]:
155
+ return [r.name for r in self.resources]
156
+
157
+ def resource_config(self, resource_type: str) -> ResourceGenConfig:
158
+ config = next((r for r in self.resources if r.name == resource_type), None)
159
+ if config is None:
160
+ raise ValueError(f"module config {self.name} doesn't have: {resource_type}")
161
+ return config
162
+
163
+ @classmethod
164
+ def from_repo_out(cls, resource_type: str, provider_config: ProviderGenConfig, repo_out: RepoOut) -> Self:
165
+ resource_config = provider_config.resource_config_or_none(resource_type) or ResourceGenConfig(
166
+ name=resource_type
167
+ )
168
+ return cls(
169
+ name=resource_type,
170
+ resources=[resource_config],
171
+ settings=provider_config.settings,
172
+ in_dir=None,
173
+ out_dir=repo_out.resource_module_path(provider_config.provider_name, resource_type),
174
+ dataclass_out_dir=repo_out.py_provider_module(provider_config.provider_name),
175
+ )
176
+
177
+ @classmethod
178
+ def from_paths(cls, name: str, in_dir: DirectoryPath, out_dir: DirectoryPath, settings: TfExtSettings) -> Self:
179
+ config_path = in_dir / name / f"{cls.CONFIG_FILENAME}"
180
+ assert config_path.exists(), f"{config_path} does not exist"
181
+ out_dir = out_dir or settings.modules_out_path
182
+ assert out_dir.exists(), f"{out_dir} does not exist"
183
+ config = parse_model(config_path, t=cls)
184
+ config.out_dir = out_dir / name
185
+ config.in_dir = in_dir / name
186
+ config.settings = settings
187
+ return config
188
+
189
+ def skip_variables_extra(self, resource_type: str) -> set[str]:
190
+ return next((r.skip_variables_extra for r in self.resources if r.name == resource_type), set())
191
+
192
+ def required_variables(self, resource_type: str) -> set[str]:
193
+ return next((r.required_variables for r in self.resources if r.name == resource_type), set())
194
+
195
+ def attribute_default_hcl_strings(self, resource_type: str) -> dict[str, str]:
196
+ return next((r.attribute_default_hcl_strings for r in self.resources if r.name == resource_type), {})
197
+
198
+ @property
199
+ def module_out_path(self) -> Path:
200
+ if out_dir := self.out_dir:
201
+ return out_dir
202
+ parent_path = self.settings.modules_out_path
203
+ return parent_path / self.name
204
+
205
+ @property
206
+ def example_plan_checks_path(self) -> Path:
207
+ assert self.in_dir, "in_dir is required to find example checks"
208
+ return self.in_dir / ModuleGenConfig.FILENAME_EXAMPLE_CHECKS
209
+
210
+ @property
211
+ def examples_test_path(self) -> Path:
212
+ assert self.in_dir, "in_dir is required to find examples test"
213
+ return self.in_dir / ModuleGenConfig.FILENAME_EXAMPLES_TEST
214
+
215
+ def dataclass_path(self, resource_type: str) -> Path:
216
+ # Must align with RepoOut.dataclass_path
217
+ if dataclass_out_dir := self.dataclass_out_dir:
218
+ return dataclass_out_dir / f"{resource_type}.py"
219
+ return self.module_out_path / f"{resource_type}.py"
220
+
221
+ def main_tf_path(self, resource_type: str) -> Path:
222
+ if len(self.resource_types) > 1:
223
+ return self.module_out_path / f"{resource_type}.tf"
224
+ return self.module_out_path / "main.tf"
225
+
226
+ def variables_path(self, resource_type: str) -> Path:
227
+ if len(self.resource_types) > 1:
228
+ return self.module_out_path / f"{resource_type}_variables.tf"
229
+ return self.module_out_path / "variables.tf"
230
+
231
+ def variablesx_path(self, resource_type: str) -> Path:
232
+ if len(self.resource_types) > 1:
233
+ return self.module_out_path / f"{resource_type}_variablesx.tf"
234
+ return self.module_out_path / "variablesx.tf"
235
+
236
+ def output_path(self, resource_type: str) -> Path:
237
+ if len(self.resource_types) > 1:
238
+ return self.module_out_path / f"{resource_type}_output.tf"
239
+ return self.module_out_path / "output.tf"
240
+
241
+ def output_name(self, resource_type: str, *attr_name: str) -> str:
242
+ attr_single = "_".join(attr_name)
243
+ if len(self.resource_types) > 1:
244
+ return f"{resource_type}_{attr_single}"
245
+ return attr_single
246
+
247
+ def resolve_resource_type(self, path: Path) -> ResourceTypeT:
248
+ if len(self.resource_types) == 1:
249
+ return self.resource_types[0]
250
+ for resource_type in self.resource_types:
251
+ if path.name.startswith(resource_type):
252
+ return resource_type
253
+ raise ValueError(f"Could not resolve resource type for path {path}")
254
+
255
+ def readme_path(self) -> Path:
256
+ return self.module_out_path / "README.md"
257
+
258
+ @property
259
+ def examples_path(self) -> Path:
260
+ return self.module_out_path / "examples"
261
+
262
+ def example_name(self, name: str, example_nr: int) -> str:
263
+ return f"{example_nr:02d}_{name}"
264
+
265
+ def example_path(self, name: str) -> Path:
266
+ return self.examples_path / name
267
+
268
+ def terraform_docs_config_path(self) -> Path:
269
+ return self.module_out_path / ".terraform-docs.yml"
270
+
271
+
272
+ @dataclass
273
+ class ResourceTypePythonModule:
274
+ resource_type: str
275
+ resource: type[ResourceAbs] | None = None
276
+ resource_ext: type[ResourceAbs] | None = None
277
+ module: ModuleType | None = None
278
+
279
+ @property
280
+ def dataclasses(self) -> dict[str, type]:
281
+ if not self.module:
282
+ return {}
283
+ return module_dataclasses(self.module)
284
+
285
+ @property
286
+ def resource_ext_cls_used(self) -> bool:
287
+ return self.resource_ext is not None
288
+
289
+ @property
290
+ def errors_func_used(self) -> bool:
291
+ return self.module is not None and getattr(self.module, "errors", None) is not None
292
+
293
+ @property
294
+ def modify_out_func_used(self) -> bool:
295
+ return self.module is not None and hasattr(self.module, "modify_out")
296
+
297
+ @property
298
+ def extra_post_init_lines(self) -> list[str]:
299
+ if self.resource_ext is None:
300
+ return []
301
+ return [make_post_init_line_from_field(extra_field) for extra_field in self.extra_fields]
302
+
303
+ @property
304
+ def base_fields(self) -> list[Field]:
305
+ if self.resource is None:
306
+ return []
307
+ return list(fields(self.resource))
308
+
309
+ @property
310
+ def base_field_names(self) -> list[str]:
311
+ return sorted(f.name for f in self.base_fields)
312
+
313
+ @property
314
+ def all_fields(self) -> list[Field]:
315
+ return self.base_fields + self.extra_fields
316
+
317
+ @property
318
+ def all_field_names(self) -> list[str]:
319
+ return sorted(f.name for f in self.all_fields)
320
+
321
+ @property
322
+ def base_field_names_computed(self) -> list[str]:
323
+ if self.resource is None:
324
+ return []
325
+ computed = getattr(self.resource, ResourceAbs.COMPUTED_ONLY_ATTRIBUTES_NAME, set())
326
+ return sorted(name for name in self.base_field_names if name in computed)
327
+
328
+ @property
329
+ def base_field_names_not_computed(self) -> list[str]:
330
+ computed = getattr(self.resource, ResourceAbs.COMPUTED_ONLY_ATTRIBUTES_NAME, set())
331
+ return sorted(name for name in self.base_field_names if name not in computed)
332
+
333
+ @property
334
+ def extra_fields(self) -> list[Field]:
335
+ if self.resource is None or self.resource_ext is None:
336
+ return []
337
+ base_fields = {f.name for f in self.base_fields}
338
+ return sorted(
339
+ (
340
+ f
341
+ for f in fields(self.resource_ext)
342
+ if f.name not in base_fields and not ResourceAbs.skip_variable(f.name, self.resource_ext)
343
+ ),
344
+ key=lambda f: f.name,
345
+ )
346
+
347
+ @property
348
+ def extra_fields_names(self) -> list[str]:
349
+ return [f.name for f in self.extra_fields]
350
+
351
+ @property
352
+ def extra_import_lines(self) -> list[str]:
353
+ module = self.module
354
+ if not module:
355
+ return []
356
+ return [
357
+ as_import_line(as_name(value))
358
+ for key, value in vars(module).items()
359
+ if not key.startswith("_") and not as_name(value).startswith(("__", self.resource_type))
360
+ ]
361
+
362
+ @property
363
+ def all_skip_variables(self) -> set[str]:
364
+ skip_vars = set()
365
+ if self.resource:
366
+ skip_vars.update(getattr(self.resource, ResourceAbs.SKIP_VARIABLES_NAME, set()))
367
+ if self.resource_ext:
368
+ skip_vars.update(getattr(self.resource_ext, ResourceAbs.SKIP_VARIABLES_NAME, set()))
369
+ return skip_vars
370
+
371
+ @property
372
+ def nested_field_types(self) -> Iterable[tuple[str, ContainerType[ResourceAbs]]]:
373
+ cls = self.resource_ext or self.resource
374
+ if not cls:
375
+ return []
376
+ yield from self.container_types(cls)
377
+
378
+ @staticmethod
379
+ def container_types(data_class: type[ResourceAbs]) -> Iterable[tuple[str, ContainerType[ResourceAbs]]]:
380
+ for field in fields(data_class):
381
+ if ResourceAbs.is_nested(field.name, data_class):
382
+ with suppress(PrimitiveTypeError):
383
+ container_type = unwrap_type(field)
384
+ yield field.name, container_type
385
+
386
+
387
+ class MissingDescriptionError(Exception):
388
+ def __init__(self, attribute_name: str, resource_type: ResourceTypeT):
389
+ super().__init__(f"Missing description for attribute {attribute_name} in resource type {resource_type}")
390
+ self.attribute_name = attribute_name
391
+ self.resource_type = resource_type
392
+
393
+
394
+ class AttributeDescriptions(Entity):
395
+ manual_nested: dict[ResourceTypeT, dict[str, str]] = PydanticField(default_factory=lambda: defaultdict(dict))
396
+ generated_nested: dict[ResourceTypeT, dict[str, str]] = PydanticField(default_factory=lambda: defaultdict(dict))
397
+ manual_flat: dict[str, str] = PydanticField(default_factory=dict)
398
+ generated_flat: dict[str, str] = PydanticField(default_factory=dict)
399
+
400
+ def resolve_description(self, attribute_name: str, resource_type: ResourceTypeT) -> str:
401
+ lookup_order = [
402
+ self.manual_nested.get(resource_type, {}),
403
+ self.generated_nested.get(resource_type, {}),
404
+ self.manual_flat,
405
+ self.generated_flat,
406
+ ]
407
+ try:
408
+ return next(desc for desc_dict in lookup_order if (desc := desc_dict.get(attribute_name)))
409
+ except StopIteration as e:
410
+ raise MissingDescriptionError(attribute_name, resource_type) from e
411
+
412
+
413
+ def parse_attribute_descriptions(settings: TfExtSettings) -> AttributeDescriptions:
414
+ return AttributeDescriptions(
415
+ manual_nested=parse_dict(settings.attribute_resource_descriptions_manual_file_path)
416
+ if settings.attribute_resource_descriptions_manual_file_path.exists()
417
+ else {},
418
+ generated_nested=parse_dict(settings.attribute_resource_descriptions_file_path)
419
+ if settings.attribute_resource_descriptions_file_path.exists()
420
+ else {},
421
+ manual_flat=parse_dict(settings.attribute_description_manual_file_path)
422
+ if settings.attribute_description_manual_file_path.exists()
423
+ else {},
424
+ generated_flat=parse_dict(settings.attribute_description_file_path)
425
+ if settings.attribute_description_file_path.exists()
426
+ else {},
427
+ )
428
+
429
+
430
+ def store_updated_attribute_description(
431
+ existing: AttributeDescriptions,
432
+ settings: TfExtSettings,
433
+ attribute_name: str,
434
+ description: str,
435
+ resource_type: ResourceTypeT = "",
436
+ ):
437
+ if resource_type:
438
+ out_path = settings.attribute_resource_descriptions_manual_file_path
439
+ existing.manual_nested.setdefault(resource_type, {})[attribute_name] = description
440
+ out_yaml = dump(existing.manual_nested, "yaml")
441
+ else:
442
+ out_path = settings.attribute_description_manual_file_path
443
+ existing.manual_flat[attribute_name] = description
444
+ out_yaml = dump(existing.manual_flat, "yaml")
445
+ ensure_parents_write_text(out_path, out_yaml)
446
+
447
+
448
+ def import_resource_type_python_module(resource_type: str, generated_dataclass_path: Path) -> ResourceTypePythonModule:
449
+ module = import_from_path(resource_type, generated_dataclass_path)
450
+ assert module
451
+ resource = getattr(module, "Resource")
452
+ assert resource
453
+ resource_ext = getattr(module, "ResourceExt", None)
454
+ return ResourceTypePythonModule(resource_type, resource, resource_ext, module)
@@ -0,0 +1,90 @@
1
+ import logging
2
+ from pathlib import Path
3
+ from ask_shell import run_and_wait
4
+ from ask_shell.settings import clean_dir
5
+ from model_lib import dump
6
+ from zero_3rdparty import humps
7
+ from zero_3rdparty.file_utils import ensure_parents_write_text
8
+ from atlas_init.tf_ext.provider_schema import AtlasSchemaInfo, parse_atlas_schema
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+
13
+ def prepare_newres(path: Path):
14
+ if not path.exists():
15
+ path.parent.mkdir(exist_ok=True, parents=True)
16
+ run_and_wait(f"git clone https://github.com/lonegunmanb/newres.git {path.name}", cwd=path.parent)
17
+ schema = parse_atlas_schema()
18
+ modify_newres(path, schema)
19
+ run_and_wait("go fmt ./...", cwd=path)
20
+
21
+
22
+ def _template_resource_go(resource_type: str, resource_type_schema_json: str) -> str:
23
+ json_backticks_escaped = resource_type_schema_json.replace("`", '`+"`"+`')
24
+ return f"""
25
+ package custom
26
+ import (
27
+ "encoding/json"
28
+ tfjson "github.com/hashicorp/terraform-json"
29
+ )
30
+
31
+ const {humps.camelize(resource_type)} = `
32
+ {json_backticks_escaped}
33
+ `
34
+ func {humps.camelize(resource_type)}Schema() *tfjson.Schema {{
35
+ \tvar result tfjson.Schema
36
+ \t_ = json.Unmarshal([]byte({humps.camelize(resource_type)}), &result)
37
+ \treturn &result
38
+ }}
39
+
40
+ """
41
+
42
+
43
+ def _register_go(resources: list[str]) -> str:
44
+ resources_key_assignments = "\n".join(
45
+ f' Resources["{resource}"] = {humps.camelize(resource)}Schema()' for resource in resources
46
+ )
47
+ return f"""
48
+ package custom
49
+ import (
50
+ tfjson "github.com/hashicorp/terraform-json"
51
+ )
52
+ var Resources map[string]*tfjson.Schema
53
+
54
+ func init() {{
55
+ Resources = make(map[string]*tfjson.Schema)
56
+ {resources_key_assignments}
57
+ }}
58
+
59
+ """
60
+
61
+
62
+ def modify_newres(new_res_path: Path, schema: AtlasSchemaInfo):
63
+ custom_resource_dir = new_res_path / "pkg/custom"
64
+ clean_dir(custom_resource_dir)
65
+ for resource_type, resource_type_schema in schema.raw_resource_schema.items():
66
+ schema_json = dump(resource_type_schema, format="pretty_json")
67
+ resource_type_go = _template_resource_go(resource_type, schema_json)
68
+ resource_type_file = custom_resource_dir / f"{resource_type}.go"
69
+ ensure_parents_write_text(resource_type_file, resource_type_go)
70
+ register_go = _register_go(schema.resource_types)
71
+ register_file = custom_resource_dir / "register.go"
72
+ ensure_parents_write_text(register_file, register_go)
73
+ logger.info(f"Custom resource files written to {custom_resource_dir}")
74
+ add_to_register_go(new_res_path)
75
+
76
+
77
+ def add_to_register_go(new_res_path: Path):
78
+ register_go = new_res_path / "pkg/resource_register.go"
79
+ in_text = register_go.read_text()
80
+ replacements = {
81
+ "import (": 'import (\n\t"github.com/lonegunmanb/newres/v3/pkg/custom"',
82
+ "resources := []map[string]*tfjson.Schema{": "resources := []map[string]*tfjson.Schema{\n\t\tcustom.Resources,",
83
+ }
84
+ out_text = in_text
85
+ for old, new in replacements.items():
86
+ if new in out_text:
87
+ continue
88
+ out_text = out_text.replace(old, new)
89
+ ensure_parents_write_text(register_go, out_text)
90
+ logger.info(f"Added custom resources to {register_go}")
@@ -0,0 +1,140 @@
1
+ from functools import total_ordering
2
+ from pathlib import Path
3
+ from tempfile import TemporaryDirectory
4
+ from typing import Any
5
+
6
+ from model_lib import Entity, dump, parse_model
7
+ from pydantic import Field, field_validator
8
+ from zero_3rdparty.file_utils import ensure_parents_write_text
9
+
10
+
11
+ PLAN_VARIABLES_FILENAME = "variables.tfvars.json"
12
+
13
+
14
+ class PlannedResource(Entity):
15
+ address: str
16
+ mode: str
17
+ type: str
18
+ name: str
19
+ provider_name: str
20
+ schema_version: int
21
+ values: dict[str, Any]
22
+ sensitive_values: dict[str, Any]
23
+
24
+
25
+ class VariableUsage(Entity):
26
+ value: Any
27
+
28
+
29
+ class OutputUsage(Entity):
30
+ resource: str # address to resource
31
+ attribute: list[str] # attribute name, only seen length 1 so far
32
+
33
+
34
+ def flatten_dict(d: dict[str, Any] | list[dict[str, Any]], current_address: str = "") -> dict[str, Any]:
35
+ response_dict = {}
36
+ if isinstance(d, list):
37
+ for item in d:
38
+ response_dict |= flatten_dict(item, current_address)
39
+ return response_dict
40
+ for key, value in d.items():
41
+ if key == "resources":
42
+ response_dict[current_address] = value
43
+ continue
44
+ if not isinstance(value, dict | list):
45
+ continue
46
+ response_dict |= flatten_dict(value, f"{current_address}.{key}".lstrip("."))
47
+ return response_dict
48
+
49
+
50
+ class PlanOutput(Entity):
51
+ planned_values: dict[str, list[PlannedResource]]
52
+ format_version: str # of the plan
53
+ terraform_version: str # used to generate the plan
54
+ variables: dict[str, VariableUsage]
55
+ configuration: dict[str, Any]
56
+ relevant_attributes: dict[str, OutputUsage] | list[OutputUsage] = Field(default_factory=list)
57
+
58
+ @field_validator("planned_values", mode="before")
59
+ def unpack_planned_values(cls, v: dict[str, Any]):
60
+ return flatten_dict(v)
61
+
62
+
63
+ def parse_plan_output(plan_json_path: Path) -> PlanOutput:
64
+ return parse_model(plan_json_path, t=PlanOutput)
65
+
66
+
67
+ def resource_type_name_filename(resource_type: str, resource_name: str) -> str:
68
+ return f"{resource_type}_{resource_name}.yaml"
69
+
70
+
71
+ def dump_plan_output_resources(output_dir: Path, plan_output: PlanOutput) -> list[Path]:
72
+ output_files: dict[str, Path] = {}
73
+ for resources in plan_output.planned_values.values():
74
+ for resource in resources:
75
+ resource_type_name = resource_type_name_filename(resource.type, resource.name)
76
+ output_file = output_dir / resource_type_name
77
+ assert resource_type_name not in output_files, f"Duplicate name {resource_type_name} in plan output"
78
+ output_files[resource_type_name] = output_file
79
+ ensure_parents_write_text(output_file, dump(resource.values, "yaml"))
80
+ return list(output_files.values())
81
+
82
+
83
+ def dump_plan_output_variables(output_dir: Path, plan_output: PlanOutput) -> Path:
84
+ variable_values = {name: value.value for name, value in plan_output.variables.items()}
85
+ output_file = output_dir / PLAN_VARIABLES_FILENAME
86
+ ensure_parents_write_text(output_file, dump(variable_values, "pretty_json"))
87
+ return output_file
88
+
89
+
90
+ def read_variables_path(module_path: Path) -> Path:
91
+ return module_path / PLAN_VARIABLES_FILENAME
92
+
93
+
94
+ class ResourceTypeName(Entity):
95
+ type: str
96
+ name: str
97
+
98
+
99
+ @total_ordering
100
+ class ResourceCheck(Entity):
101
+ actual: ResourceTypeName
102
+ expected_resource: ResourceTypeName
103
+
104
+ def __lt__(self, other) -> bool:
105
+ if not isinstance(other, ResourceCheck):
106
+ raise TypeError
107
+ return (self.actual.type, self.actual.name) < (other.actual.type, other.actual.name)
108
+
109
+ def __str__(self) -> str:
110
+ return f"Expecting Resource Match {self.expected_resource.type}.{self.expected_resource.name} == {self.actual.type}.{self.actual.name}"
111
+
112
+
113
+ class ExamplePlanCheck(Entity):
114
+ resource_checks: list[ResourceCheck] = Field(default_factory=list)
115
+ example_name: str
116
+ expected_output_dir_name: str
117
+
118
+
119
+ def generate_expected_actual(
120
+ stored_plan_outputs: Path, example_check: ExamplePlanCheck, plan_output: PlanOutput
121
+ ) -> tuple[str, str]:
122
+ expected_output_path = stored_plan_outputs / example_check.expected_output_dir_name
123
+ assert expected_output_path.exists(), f"Expected output directory {expected_output_path} does not exist"
124
+ expected_content, actual_content = [], []
125
+ with TemporaryDirectory() as temp_dir:
126
+ out_dir = Path(temp_dir)
127
+ dump_plan_output_resources(out_dir, plan_output)
128
+ for check in sorted(example_check.resource_checks):
129
+ check_header = str(check)
130
+ expected_file = expected_output_path / resource_type_name_filename(
131
+ check.expected_resource.type, check.expected_resource.name
132
+ )
133
+ actual_file = out_dir / resource_type_name_filename(check.actual.type, check.actual.name)
134
+ if not expected_file.exists():
135
+ raise ValueError(f"Expected file {expected_file} doesn't exist!")
136
+ if not actual_file.exists():
137
+ raise ValueError(f"Actual file {actual_file} doesn't exist!")
138
+ expected_content.append(f"\n{check_header}\n{expected_file.read_text()}")
139
+ actual_content.append(f"\n{check_header}\n{actual_file.read_text()}")
140
+ return "\n".join(expected_content), "\n".join(actual_content)