atlas-init 0.7.0__py3-none-any.whl → 0.8.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. atlas_init/__init__.py +1 -1
  2. atlas_init/atlas_init.yaml +1 -0
  3. atlas_init/cli_tf/example_update.py +20 -8
  4. atlas_init/cli_tf/hcl/modifier.py +22 -8
  5. atlas_init/settings/env_vars.py +12 -2
  6. atlas_init/tf_ext/api_call.py +9 -9
  7. atlas_init/tf_ext/args.py +16 -1
  8. atlas_init/tf_ext/gen_examples.py +141 -0
  9. atlas_init/tf_ext/gen_module_readme.py +131 -0
  10. atlas_init/tf_ext/gen_resource_main.py +195 -0
  11. atlas_init/tf_ext/gen_resource_output.py +71 -0
  12. atlas_init/tf_ext/gen_resource_variables.py +162 -0
  13. atlas_init/tf_ext/gen_versions.py +10 -0
  14. atlas_init/tf_ext/models_module.py +455 -0
  15. atlas_init/tf_ext/newres.py +90 -0
  16. atlas_init/tf_ext/plan_diffs.py +140 -0
  17. atlas_init/tf_ext/provider_schema.py +199 -0
  18. atlas_init/tf_ext/py_gen.py +294 -0
  19. atlas_init/tf_ext/schema_to_dataclass.py +522 -0
  20. atlas_init/tf_ext/settings.py +151 -2
  21. atlas_init/tf_ext/tf_dep.py +5 -5
  22. atlas_init/tf_ext/tf_desc_gen.py +53 -0
  23. atlas_init/tf_ext/tf_desc_update.py +0 -0
  24. atlas_init/tf_ext/tf_mod_gen.py +263 -0
  25. atlas_init/tf_ext/tf_mod_gen_provider.py +124 -0
  26. atlas_init/tf_ext/tf_modules.py +5 -4
  27. atlas_init/tf_ext/tf_vars.py +13 -28
  28. atlas_init/tf_ext/typer_app.py +6 -2
  29. {atlas_init-0.7.0.dist-info → atlas_init-0.8.1.dist-info}/METADATA +4 -3
  30. {atlas_init-0.7.0.dist-info → atlas_init-0.8.1.dist-info}/RECORD +33 -17
  31. {atlas_init-0.7.0.dist-info → atlas_init-0.8.1.dist-info}/WHEEL +0 -0
  32. {atlas_init-0.7.0.dist-info → atlas_init-0.8.1.dist-info}/entry_points.txt +0 -0
  33. {atlas_init-0.7.0.dist-info → atlas_init-0.8.1.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,455 @@
1
+ from abc import ABC
2
+ from collections import defaultdict
3
+ from contextlib import suppress
4
+ from dataclasses import Field, dataclass, fields
5
+ from pathlib import Path
6
+ from types import ModuleType
7
+ from typing import Any, ClassVar, Iterable, Self, TypeAlias
8
+
9
+ from model_lib import Entity, copy_and_validate, dump, parse_dict, parse_model
10
+ from pydantic import DirectoryPath, model_validator
11
+ from pydantic import Field as PydanticField
12
+ from zero_3rdparty.file_utils import ensure_parents_write_text
13
+ from zero_3rdparty.object_name import as_name
14
+
15
+ from atlas_init.tf_ext.plan_diffs import ExamplePlanCheck
16
+ from atlas_init.tf_ext.py_gen import (
17
+ ContainerType,
18
+ PrimitiveTypeError,
19
+ import_from_path,
20
+ make_post_init_line_from_field,
21
+ module_dataclasses,
22
+ unwrap_type,
23
+ )
24
+ from atlas_init.tf_ext.settings import RepoOut, TfExtSettings
25
+
26
+ ResourceTypeT: TypeAlias = str
27
+
28
+
29
+ @dataclass
30
+ class ResourceAbs(ABC):
31
+ BLOCK_ATTRIBUTES_NAME: ClassVar[str] = "BLOCK_ATTRIBUTES"
32
+ BLOCK_ATTRIBUTES: ClassVar[set[str]] = set()
33
+ COMPUTED_ONLY_ATTRIBUTES_NAME: ClassVar[str] = "COMPUTED_ONLY_ATTRIBUTES"
34
+ COMPUTED_ONLY_ATTRIBUTES: ClassVar[set[str]] = set()
35
+ DEFAULTS_HCL_STRINGS_NAME: ClassVar[str] = "DEFAULTS_HCL_STRINGS"
36
+ DEFAULTS_HCL_STRINGS: ClassVar[dict[str, str]] = {}
37
+ NESTED_ATTRIBUTES_NAME: ClassVar[str] = "NESTED_ATTRIBUTES"
38
+ NESTED_ATTRIBUTES: ClassVar[set[str]] = set()
39
+ REQUIRED_ATTRIBUTES_NAME: ClassVar[str] = "REQUIRED_ATTRIBUTES"
40
+ REQUIRED_ATTRIBUTES: ClassVar[set[str]] = set()
41
+ SKIP_VARIABLES_NAME: ClassVar[str] = "SKIP_VARIABLES"
42
+ SKIP_VARIABLES: ClassVar[set[str]] = set()
43
+
44
+ @staticmethod
45
+ def is_block(field_name: str, some_cls: type) -> bool:
46
+ return field_name in getattr(some_cls, ResourceAbs.BLOCK_ATTRIBUTES_NAME, set())
47
+
48
+ @staticmethod
49
+ def is_required(field_name: str, some_cls: type) -> bool:
50
+ return field_name in getattr(some_cls, ResourceAbs.REQUIRED_ATTRIBUTES_NAME, set())
51
+
52
+ @staticmethod
53
+ def is_computed_only(field_name: str, some_cls: type) -> bool:
54
+ return field_name in getattr(some_cls, ResourceAbs.COMPUTED_ONLY_ATTRIBUTES_NAME, set())
55
+
56
+ @staticmethod
57
+ def is_nested(field_name: str, some_cls: type) -> bool:
58
+ return field_name in getattr(some_cls, ResourceAbs.NESTED_ATTRIBUTES_NAME, set())
59
+
60
+ @staticmethod
61
+ def default_hcl_string(field_name: str, some_cls: type) -> str | None:
62
+ return getattr(some_cls, ResourceAbs.DEFAULTS_HCL_STRINGS_NAME, {}).get(field_name)
63
+
64
+ @staticmethod
65
+ def skip_variable(field_name: str, some_cls: type) -> bool:
66
+ return field_name in getattr(some_cls, ResourceAbs.SKIP_VARIABLES_NAME, set())
67
+
68
+
69
+ def as_import_line(name: str) -> str:
70
+ from_part, name_part = name.rsplit(".", maxsplit=1)
71
+ return f"from {from_part} import {name_part}"
72
+
73
+
74
+ class ResourceGenConfig(Entity):
75
+ name: str
76
+ use_single_variable: bool = False
77
+ use_opt_in_required_variables: bool = False
78
+ required_variables: set[str] = PydanticField(default_factory=set)
79
+ skip_variables_extra: set[str] = PydanticField(default_factory=set)
80
+ attribute_default_hcl_strings: dict[str, str] = PydanticField(default_factory=dict)
81
+ include_id_field: bool = False
82
+
83
+ @model_validator(mode="after")
84
+ def add_id_as_skip_variable(self) -> Self:
85
+ if not self.include_id_field:
86
+ self.skip_variables_extra.add("id") # SDKv2 Adds a computed+optional `id` field
87
+ return self
88
+
89
+ def single_variable_version(self) -> Self:
90
+ assert not self.use_single_variable, "use_single_variable must be False to create a single variable version"
91
+ return copy_and_validate(self, use_single_variable=True)
92
+
93
+
94
+ def as_provider_name(provider_path: str) -> str:
95
+ return provider_path.rsplit("/", maxsplit=1)[-1]
96
+
97
+
98
+ class ProviderGenConfig(Entity):
99
+ provider_path: str
100
+ resources: list[ResourceGenConfig] = PydanticField(default_factory=list)
101
+ settings: TfExtSettings = PydanticField(default_factory=TfExtSettings.from_env)
102
+ last_gen_sha: str = ""
103
+
104
+ def config_dump(self) -> dict[str, Any]:
105
+ return {
106
+ "provider_path": self.provider_path,
107
+ "resources": [r.model_dump(exclude_defaults=True, exclude_unset=True) for r in self.resources],
108
+ "last_gen_sha": self.last_gen_sha,
109
+ }
110
+
111
+ @property
112
+ def provider_name(self) -> str:
113
+ return self.provider_path.rsplit("/", maxsplit=1)[-1]
114
+
115
+ def resource_types(self) -> list[str]:
116
+ return [r.name for r in self.resources]
117
+
118
+ def resource_config_or_none(self, resource_type: str) -> ResourceGenConfig | None:
119
+ return next((r for r in self.resources if r.name == resource_type), None)
120
+
121
+
122
+ class ModuleGenConfig(Entity):
123
+ CONFIG_FILENAME: ClassVar[str] = "config.yaml"
124
+ FILENAME_EXAMPLE_CHECKS: ClassVar[str] = "example_plan_checks.yaml"
125
+ FILENAME_EXAMPLES_TEST: ClassVar[str] = "examples_test.py"
126
+
127
+ @classmethod
128
+ def skip_copy(cls, src_file: Path) -> bool:
129
+ return (
130
+ src_file.stem.endswith("_test")
131
+ or src_file.name == "__init__.py"
132
+ or src_file.name in {cls.CONFIG_FILENAME, cls.FILENAME_EXAMPLE_CHECKS, cls.FILENAME_EXAMPLES_TEST}
133
+ )
134
+
135
+ name: str = ""
136
+ resources: list[ResourceGenConfig] = PydanticField(default_factory=list)
137
+ settings: TfExtSettings = PydanticField(default_factory=TfExtSettings.from_env)
138
+ in_dir: Path | None = None
139
+ out_dir: Path | None = None
140
+ dataclass_out_dir: Path | None = None
141
+ skip_python: bool = False
142
+ debug_json_logs: bool = False
143
+ example_plan_checks: list[ExamplePlanCheck] = PydanticField(default_factory=list)
144
+ use_descriptions: bool = False
145
+ inputs_json_hcl_extras: list[str] = PydanticField(default_factory=list)
146
+
147
+ @model_validator(mode="after")
148
+ def set_defaults(self) -> Self:
149
+ if not self.name:
150
+ assert self.resource_types, "must set either name or resource_types"
151
+ self.name = self.resource_types[0]
152
+ return self
153
+
154
+ @property
155
+ def resource_types(self) -> list[str]:
156
+ return [r.name for r in self.resources]
157
+
158
+ def resource_config(self, resource_type: str) -> ResourceGenConfig:
159
+ config = next((r for r in self.resources if r.name == resource_type), None)
160
+ if config is None:
161
+ raise ValueError(f"module config {self.name} doesn't have: {resource_type}")
162
+ return config
163
+
164
+ @classmethod
165
+ def from_repo_out(cls, resource_type: str, provider_config: ProviderGenConfig, repo_out: RepoOut) -> Self:
166
+ resource_config = provider_config.resource_config_or_none(resource_type) or ResourceGenConfig(
167
+ name=resource_type
168
+ )
169
+ return cls(
170
+ name=resource_type,
171
+ resources=[resource_config],
172
+ settings=provider_config.settings,
173
+ in_dir=None,
174
+ out_dir=repo_out.resource_module_path(provider_config.provider_name, resource_type),
175
+ dataclass_out_dir=repo_out.py_provider_module(provider_config.provider_name),
176
+ )
177
+
178
+ @classmethod
179
+ def from_paths(cls, name: str, in_dir: DirectoryPath, out_dir: DirectoryPath, settings: TfExtSettings) -> Self:
180
+ config_path = in_dir / name / f"{cls.CONFIG_FILENAME}"
181
+ assert config_path.exists(), f"{config_path} does not exist"
182
+ out_dir = out_dir or settings.modules_out_path
183
+ assert out_dir.exists(), f"{out_dir} does not exist"
184
+ config = parse_model(config_path, t=cls)
185
+ config.out_dir = out_dir / name
186
+ config.in_dir = in_dir / name
187
+ config.settings = settings
188
+ return config
189
+
190
+ def skip_variables_extra(self, resource_type: str) -> set[str]:
191
+ return next((r.skip_variables_extra for r in self.resources if r.name == resource_type), set())
192
+
193
+ def required_variables(self, resource_type: str) -> set[str]:
194
+ return next((r.required_variables for r in self.resources if r.name == resource_type), set())
195
+
196
+ def attribute_default_hcl_strings(self, resource_type: str) -> dict[str, str]:
197
+ return next((r.attribute_default_hcl_strings for r in self.resources if r.name == resource_type), {})
198
+
199
+ @property
200
+ def module_out_path(self) -> Path:
201
+ if out_dir := self.out_dir:
202
+ return out_dir
203
+ parent_path = self.settings.modules_out_path
204
+ return parent_path / self.name
205
+
206
+ @property
207
+ def example_plan_checks_path(self) -> Path:
208
+ assert self.in_dir, "in_dir is required to find example checks"
209
+ return self.in_dir / ModuleGenConfig.FILENAME_EXAMPLE_CHECKS
210
+
211
+ @property
212
+ def examples_test_path(self) -> Path:
213
+ assert self.in_dir, "in_dir is required to find examples test"
214
+ return self.in_dir / ModuleGenConfig.FILENAME_EXAMPLES_TEST
215
+
216
+ def dataclass_path(self, resource_type: str) -> Path:
217
+ # Must align with RepoOut.dataclass_path
218
+ if dataclass_out_dir := self.dataclass_out_dir:
219
+ return dataclass_out_dir / f"{resource_type}.py"
220
+ return self.module_out_path / f"{resource_type}.py"
221
+
222
+ def main_tf_path(self, resource_type: str) -> Path:
223
+ if len(self.resource_types) > 1:
224
+ return self.module_out_path / f"{resource_type}.tf"
225
+ return self.module_out_path / "main.tf"
226
+
227
+ def variables_path(self, resource_type: str) -> Path:
228
+ if len(self.resource_types) > 1:
229
+ return self.module_out_path / f"{resource_type}_variables.tf"
230
+ return self.module_out_path / "variables.tf"
231
+
232
+ def variablesx_path(self, resource_type: str) -> Path:
233
+ if len(self.resource_types) > 1:
234
+ return self.module_out_path / f"{resource_type}_variablesx.tf"
235
+ return self.module_out_path / "variablesx.tf"
236
+
237
+ def output_path(self, resource_type: str) -> Path:
238
+ if len(self.resource_types) > 1:
239
+ return self.module_out_path / f"{resource_type}_output.tf"
240
+ return self.module_out_path / "output.tf"
241
+
242
+ def output_name(self, resource_type: str, *attr_name: str) -> str:
243
+ attr_single = "_".join(attr_name)
244
+ if len(self.resource_types) > 1:
245
+ return f"{resource_type}_{attr_single}"
246
+ return attr_single
247
+
248
+ def resolve_resource_type(self, path: Path) -> ResourceTypeT:
249
+ if len(self.resource_types) == 1:
250
+ return self.resource_types[0]
251
+ for resource_type in self.resource_types:
252
+ if path.name.startswith(resource_type):
253
+ return resource_type
254
+ raise ValueError(f"Could not resolve resource type for path {path}")
255
+
256
+ def readme_path(self) -> Path:
257
+ return self.module_out_path / "README.md"
258
+
259
+ @property
260
+ def examples_path(self) -> Path:
261
+ return self.module_out_path / "examples"
262
+
263
+ def example_name(self, name: str, example_nr: int) -> str:
264
+ return f"{example_nr:02d}_{name}"
265
+
266
+ def example_path(self, name: str) -> Path:
267
+ return self.examples_path / name
268
+
269
+ def terraform_docs_config_path(self) -> Path:
270
+ return self.module_out_path / ".terraform-docs.yml"
271
+
272
+
273
+ @dataclass
274
+ class ResourceTypePythonModule:
275
+ resource_type: str
276
+ resource: type[ResourceAbs] | None = None
277
+ resource_ext: type[ResourceAbs] | None = None
278
+ module: ModuleType | None = None
279
+
280
+ @property
281
+ def dataclasses(self) -> dict[str, type]:
282
+ if not self.module:
283
+ return {}
284
+ return module_dataclasses(self.module)
285
+
286
+ @property
287
+ def resource_ext_cls_used(self) -> bool:
288
+ return self.resource_ext is not None
289
+
290
+ @property
291
+ def errors_func_used(self) -> bool:
292
+ return self.module is not None and getattr(self.module, "errors", None) is not None
293
+
294
+ @property
295
+ def modify_out_func_used(self) -> bool:
296
+ return self.module is not None and hasattr(self.module, "modify_out")
297
+
298
+ @property
299
+ def extra_post_init_lines(self) -> list[str]:
300
+ if self.resource_ext is None:
301
+ return []
302
+ return [make_post_init_line_from_field(extra_field) for extra_field in self.extra_fields]
303
+
304
+ @property
305
+ def base_fields(self) -> list[Field]:
306
+ if self.resource is None:
307
+ return []
308
+ return list(fields(self.resource))
309
+
310
+ @property
311
+ def base_field_names(self) -> list[str]:
312
+ return sorted(f.name for f in self.base_fields)
313
+
314
+ @property
315
+ def all_fields(self) -> list[Field]:
316
+ return self.base_fields + self.extra_fields
317
+
318
+ @property
319
+ def all_field_names(self) -> list[str]:
320
+ return sorted(f.name for f in self.all_fields)
321
+
322
+ @property
323
+ def base_field_names_computed(self) -> list[str]:
324
+ if self.resource is None:
325
+ return []
326
+ computed = getattr(self.resource, ResourceAbs.COMPUTED_ONLY_ATTRIBUTES_NAME, set())
327
+ return sorted(name for name in self.base_field_names if name in computed)
328
+
329
+ @property
330
+ def base_field_names_not_computed(self) -> list[str]:
331
+ computed = getattr(self.resource, ResourceAbs.COMPUTED_ONLY_ATTRIBUTES_NAME, set())
332
+ return sorted(name for name in self.base_field_names if name not in computed)
333
+
334
+ @property
335
+ def extra_fields(self) -> list[Field]:
336
+ if self.resource is None or self.resource_ext is None:
337
+ return []
338
+ base_fields = {f.name for f in self.base_fields}
339
+ return sorted(
340
+ (
341
+ f
342
+ for f in fields(self.resource_ext)
343
+ if f.name not in base_fields and not ResourceAbs.skip_variable(f.name, self.resource_ext)
344
+ ),
345
+ key=lambda f: f.name,
346
+ )
347
+
348
+ @property
349
+ def extra_fields_names(self) -> list[str]:
350
+ return [f.name for f in self.extra_fields]
351
+
352
+ @property
353
+ def extra_import_lines(self) -> list[str]:
354
+ module = self.module
355
+ if not module:
356
+ return []
357
+ return [
358
+ as_import_line(as_name(value))
359
+ for key, value in vars(module).items()
360
+ if not key.startswith("_") and not as_name(value).startswith(("__", self.resource_type))
361
+ ]
362
+
363
+ @property
364
+ def all_skip_variables(self) -> set[str]:
365
+ skip_vars = set()
366
+ if self.resource:
367
+ skip_vars.update(getattr(self.resource, ResourceAbs.SKIP_VARIABLES_NAME, set()))
368
+ if self.resource_ext:
369
+ skip_vars.update(getattr(self.resource_ext, ResourceAbs.SKIP_VARIABLES_NAME, set()))
370
+ return skip_vars
371
+
372
+ @property
373
+ def nested_field_types(self) -> Iterable[tuple[str, ContainerType[ResourceAbs]]]:
374
+ cls = self.resource_ext or self.resource
375
+ if not cls:
376
+ return []
377
+ yield from self.container_types(cls)
378
+
379
+ @staticmethod
380
+ def container_types(data_class: type[ResourceAbs]) -> Iterable[tuple[str, ContainerType[ResourceAbs]]]:
381
+ for field in fields(data_class):
382
+ if ResourceAbs.is_nested(field.name, data_class):
383
+ with suppress(PrimitiveTypeError):
384
+ container_type = unwrap_type(field)
385
+ yield field.name, container_type
386
+
387
+
388
+ class MissingDescriptionError(Exception):
389
+ def __init__(self, attribute_name: str, resource_type: ResourceTypeT):
390
+ super().__init__(f"Missing description for attribute {attribute_name} in resource type {resource_type}")
391
+ self.attribute_name = attribute_name
392
+ self.resource_type = resource_type
393
+
394
+
395
+ class AttributeDescriptions(Entity):
396
+ manual_nested: dict[ResourceTypeT, dict[str, str]] = PydanticField(default_factory=lambda: defaultdict(dict))
397
+ generated_nested: dict[ResourceTypeT, dict[str, str]] = PydanticField(default_factory=lambda: defaultdict(dict))
398
+ manual_flat: dict[str, str] = PydanticField(default_factory=dict)
399
+ generated_flat: dict[str, str] = PydanticField(default_factory=dict)
400
+
401
+ def resolve_description(self, attribute_name: str, resource_type: ResourceTypeT) -> str:
402
+ lookup_order = [
403
+ self.manual_nested.get(resource_type, {}),
404
+ self.generated_nested.get(resource_type, {}),
405
+ self.manual_flat,
406
+ self.generated_flat,
407
+ ]
408
+ try:
409
+ return next(desc for desc_dict in lookup_order if (desc := desc_dict.get(attribute_name)))
410
+ except StopIteration as e:
411
+ raise MissingDescriptionError(attribute_name, resource_type) from e
412
+
413
+
414
+ def parse_attribute_descriptions(settings: TfExtSettings) -> AttributeDescriptions:
415
+ return AttributeDescriptions(
416
+ manual_nested=parse_dict(settings.attribute_resource_descriptions_manual_file_path)
417
+ if settings.attribute_resource_descriptions_manual_file_path.exists()
418
+ else {},
419
+ generated_nested=parse_dict(settings.attribute_resource_descriptions_file_path)
420
+ if settings.attribute_resource_descriptions_file_path.exists()
421
+ else {},
422
+ manual_flat=parse_dict(settings.attribute_description_manual_file_path)
423
+ if settings.attribute_description_manual_file_path.exists()
424
+ else {},
425
+ generated_flat=parse_dict(settings.attribute_description_file_path)
426
+ if settings.attribute_description_file_path.exists()
427
+ else {},
428
+ )
429
+
430
+
431
+ def store_updated_attribute_description(
432
+ existing: AttributeDescriptions,
433
+ settings: TfExtSettings,
434
+ attribute_name: str,
435
+ description: str,
436
+ resource_type: ResourceTypeT = "",
437
+ ):
438
+ if resource_type:
439
+ out_path = settings.attribute_resource_descriptions_manual_file_path
440
+ existing.manual_nested.setdefault(resource_type, {})[attribute_name] = description
441
+ out_yaml = dump(existing.manual_nested, "yaml")
442
+ else:
443
+ out_path = settings.attribute_description_manual_file_path
444
+ existing.manual_flat[attribute_name] = description
445
+ out_yaml = dump(existing.manual_flat, "yaml")
446
+ ensure_parents_write_text(out_path, out_yaml)
447
+
448
+
449
+ def import_resource_type_python_module(resource_type: str, generated_dataclass_path: Path) -> ResourceTypePythonModule:
450
+ module = import_from_path(resource_type, generated_dataclass_path)
451
+ assert module
452
+ resource = getattr(module, "Resource")
453
+ assert resource
454
+ resource_ext = getattr(module, "ResourceExt", None)
455
+ return ResourceTypePythonModule(resource_type, resource, resource_ext, module)
@@ -0,0 +1,90 @@
1
+ import logging
2
+ from pathlib import Path
3
+ from ask_shell import run_and_wait
4
+ from ask_shell.settings import clean_dir
5
+ from model_lib import dump
6
+ from zero_3rdparty import humps
7
+ from zero_3rdparty.file_utils import ensure_parents_write_text
8
+ from atlas_init.tf_ext.provider_schema import AtlasSchemaInfo, parse_atlas_schema
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+
13
+ def prepare_newres(path: Path):
14
+ if not path.exists():
15
+ path.parent.mkdir(exist_ok=True, parents=True)
16
+ run_and_wait(f"git clone https://github.com/lonegunmanb/newres.git {path.name}", cwd=path.parent)
17
+ schema = parse_atlas_schema()
18
+ modify_newres(path, schema)
19
+ run_and_wait("go fmt ./...", cwd=path)
20
+
21
+
22
+ def _template_resource_go(resource_type: str, resource_type_schema_json: str) -> str:
23
+ json_backticks_escaped = resource_type_schema_json.replace("`", '`+"`"+`')
24
+ return f"""
25
+ package custom
26
+ import (
27
+ "encoding/json"
28
+ tfjson "github.com/hashicorp/terraform-json"
29
+ )
30
+
31
+ const {humps.camelize(resource_type)} = `
32
+ {json_backticks_escaped}
33
+ `
34
+ func {humps.camelize(resource_type)}Schema() *tfjson.Schema {{
35
+ \tvar result tfjson.Schema
36
+ \t_ = json.Unmarshal([]byte({humps.camelize(resource_type)}), &result)
37
+ \treturn &result
38
+ }}
39
+
40
+ """
41
+
42
+
43
+ def _register_go(resources: list[str]) -> str:
44
+ resources_key_assignments = "\n".join(
45
+ f' Resources["{resource}"] = {humps.camelize(resource)}Schema()' for resource in resources
46
+ )
47
+ return f"""
48
+ package custom
49
+ import (
50
+ tfjson "github.com/hashicorp/terraform-json"
51
+ )
52
+ var Resources map[string]*tfjson.Schema
53
+
54
+ func init() {{
55
+ Resources = make(map[string]*tfjson.Schema)
56
+ {resources_key_assignments}
57
+ }}
58
+
59
+ """
60
+
61
+
62
+ def modify_newres(new_res_path: Path, schema: AtlasSchemaInfo):
63
+ custom_resource_dir = new_res_path / "pkg/custom"
64
+ clean_dir(custom_resource_dir)
65
+ for resource_type, resource_type_schema in schema.raw_resource_schema.items():
66
+ schema_json = dump(resource_type_schema, format="pretty_json")
67
+ resource_type_go = _template_resource_go(resource_type, schema_json)
68
+ resource_type_file = custom_resource_dir / f"{resource_type}.go"
69
+ ensure_parents_write_text(resource_type_file, resource_type_go)
70
+ register_go = _register_go(schema.resource_types)
71
+ register_file = custom_resource_dir / "register.go"
72
+ ensure_parents_write_text(register_file, register_go)
73
+ logger.info(f"Custom resource files written to {custom_resource_dir}")
74
+ add_to_register_go(new_res_path)
75
+
76
+
77
+ def add_to_register_go(new_res_path: Path):
78
+ register_go = new_res_path / "pkg/resource_register.go"
79
+ in_text = register_go.read_text()
80
+ replacements = {
81
+ "import (": 'import (\n\t"github.com/lonegunmanb/newres/v3/pkg/custom"',
82
+ "resources := []map[string]*tfjson.Schema{": "resources := []map[string]*tfjson.Schema{\n\t\tcustom.Resources,",
83
+ }
84
+ out_text = in_text
85
+ for old, new in replacements.items():
86
+ if new in out_text:
87
+ continue
88
+ out_text = out_text.replace(old, new)
89
+ ensure_parents_write_text(register_go, out_text)
90
+ logger.info(f"Added custom resources to {register_go}")
@@ -0,0 +1,140 @@
1
+ from functools import total_ordering
2
+ from pathlib import Path
3
+ from tempfile import TemporaryDirectory
4
+ from typing import Any
5
+
6
+ from model_lib import Entity, dump, parse_model
7
+ from pydantic import Field, field_validator
8
+ from zero_3rdparty.file_utils import ensure_parents_write_text
9
+
10
+
11
+ PLAN_VARIABLES_FILENAME = "variables.tfvars.json"
12
+
13
+
14
+ class PlannedResource(Entity):
15
+ address: str
16
+ mode: str
17
+ type: str
18
+ name: str
19
+ provider_name: str
20
+ schema_version: int
21
+ values: dict[str, Any]
22
+ sensitive_values: dict[str, Any]
23
+
24
+
25
+ class VariableUsage(Entity):
26
+ value: Any
27
+
28
+
29
+ class OutputUsage(Entity):
30
+ resource: str # address to resource
31
+ attribute: list[str] # attribute name, only seen length 1 so far
32
+
33
+
34
+ def flatten_dict(d: dict[str, Any] | list[dict[str, Any]], current_address: str = "") -> dict[str, Any]:
35
+ response_dict = {}
36
+ if isinstance(d, list):
37
+ for item in d:
38
+ response_dict |= flatten_dict(item, current_address)
39
+ return response_dict
40
+ for key, value in d.items():
41
+ if key == "resources":
42
+ response_dict[current_address] = value
43
+ continue
44
+ if not isinstance(value, dict | list):
45
+ continue
46
+ response_dict |= flatten_dict(value, f"{current_address}.{key}".lstrip("."))
47
+ return response_dict
48
+
49
+
50
+ class PlanOutput(Entity):
51
+ planned_values: dict[str, list[PlannedResource]]
52
+ format_version: str # of the plan
53
+ terraform_version: str # used to generate the plan
54
+ variables: dict[str, VariableUsage]
55
+ configuration: dict[str, Any]
56
+ relevant_attributes: dict[str, OutputUsage] | list[OutputUsage] = Field(default_factory=list)
57
+
58
+ @field_validator("planned_values", mode="before")
59
+ def unpack_planned_values(cls, v: dict[str, Any]):
60
+ return flatten_dict(v)
61
+
62
+
63
+ def parse_plan_output(plan_json_path: Path) -> PlanOutput:
64
+ return parse_model(plan_json_path, t=PlanOutput)
65
+
66
+
67
+ def resource_type_name_filename(resource_type: str, resource_name: str) -> str:
68
+ return f"{resource_type}_{resource_name}.yaml"
69
+
70
+
71
+ def dump_plan_output_resources(output_dir: Path, plan_output: PlanOutput) -> list[Path]:
72
+ output_files: dict[str, Path] = {}
73
+ for resources in plan_output.planned_values.values():
74
+ for resource in resources:
75
+ resource_type_name = resource_type_name_filename(resource.type, resource.name)
76
+ output_file = output_dir / resource_type_name
77
+ assert resource_type_name not in output_files, f"Duplicate name {resource_type_name} in plan output"
78
+ output_files[resource_type_name] = output_file
79
+ ensure_parents_write_text(output_file, dump(resource.values, "yaml"))
80
+ return list(output_files.values())
81
+
82
+
83
+ def dump_plan_output_variables(output_dir: Path, plan_output: PlanOutput) -> Path:
84
+ variable_values = {name: value.value for name, value in plan_output.variables.items()}
85
+ output_file = output_dir / PLAN_VARIABLES_FILENAME
86
+ ensure_parents_write_text(output_file, dump(variable_values, "pretty_json"))
87
+ return output_file
88
+
89
+
90
+ def read_variables_path(module_path: Path) -> Path:
91
+ return module_path / PLAN_VARIABLES_FILENAME
92
+
93
+
94
+ class ResourceTypeName(Entity):
95
+ type: str
96
+ name: str
97
+
98
+
99
+ @total_ordering
100
+ class ResourceCheck(Entity):
101
+ actual: ResourceTypeName
102
+ expected_resource: ResourceTypeName
103
+
104
+ def __lt__(self, other) -> bool:
105
+ if not isinstance(other, ResourceCheck):
106
+ raise TypeError
107
+ return (self.actual.type, self.actual.name) < (other.actual.type, other.actual.name)
108
+
109
+ def __str__(self) -> str:
110
+ return f"Expecting Resource Match {self.expected_resource.type}.{self.expected_resource.name} == {self.actual.type}.{self.actual.name}"
111
+
112
+
113
+ class ExamplePlanCheck(Entity):
114
+ resource_checks: list[ResourceCheck] = Field(default_factory=list)
115
+ example_name: str
116
+ expected_output_dir_name: str
117
+
118
+
119
+ def generate_expected_actual(
120
+ stored_plan_outputs: Path, example_check: ExamplePlanCheck, plan_output: PlanOutput
121
+ ) -> tuple[str, str]:
122
+ expected_output_path = stored_plan_outputs / example_check.expected_output_dir_name
123
+ assert expected_output_path.exists(), f"Expected output directory {expected_output_path} does not exist"
124
+ expected_content, actual_content = [], []
125
+ with TemporaryDirectory() as temp_dir:
126
+ out_dir = Path(temp_dir)
127
+ dump_plan_output_resources(out_dir, plan_output)
128
+ for check in sorted(example_check.resource_checks):
129
+ check_header = str(check)
130
+ expected_file = expected_output_path / resource_type_name_filename(
131
+ check.expected_resource.type, check.expected_resource.name
132
+ )
133
+ actual_file = out_dir / resource_type_name_filename(check.actual.type, check.actual.name)
134
+ if not expected_file.exists():
135
+ raise ValueError(f"Expected file {expected_file} doesn't exist!")
136
+ if not actual_file.exists():
137
+ raise ValueError(f"Actual file {actual_file} doesn't exist!")
138
+ expected_content.append(f"\n{check_header}\n{expected_file.read_text()}")
139
+ actual_content.append(f"\n{check_header}\n{actual_file.read_text()}")
140
+ return "\n".join(expected_content), "\n".join(actual_content)