atlas-init 0.6.0__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- atlas_init/__init__.py +1 -1
- atlas_init/atlas_init.yaml +1 -0
- atlas_init/cli_args.py +19 -1
- atlas_init/cli_tf/ci_tests.py +116 -24
- atlas_init/cli_tf/example_update.py +20 -8
- atlas_init/cli_tf/go_test_run.py +14 -2
- atlas_init/cli_tf/go_test_summary.py +334 -82
- atlas_init/cli_tf/go_test_tf_error.py +20 -12
- atlas_init/cli_tf/hcl/modifier.py +22 -8
- atlas_init/cli_tf/hcl/modifier2.py +120 -0
- atlas_init/cli_tf/openapi.py +10 -6
- atlas_init/html_out/__init__.py +0 -0
- atlas_init/html_out/md_export.py +143 -0
- atlas_init/sdk_ext/__init__.py +0 -0
- atlas_init/sdk_ext/go.py +102 -0
- atlas_init/sdk_ext/typer_app.py +18 -0
- atlas_init/settings/env_vars.py +25 -3
- atlas_init/settings/env_vars_generated.py +2 -0
- atlas_init/tf/.terraform.lock.hcl +33 -33
- atlas_init/tf/modules/aws_s3/provider.tf +1 -1
- atlas_init/tf/modules/aws_vpc/provider.tf +1 -1
- atlas_init/tf/modules/cloud_provider/provider.tf +1 -1
- atlas_init/tf/modules/cluster/provider.tf +1 -1
- atlas_init/tf/modules/encryption_at_rest/provider.tf +1 -1
- atlas_init/tf/modules/federated_vars/federated_vars.tf +1 -2
- atlas_init/tf/modules/federated_vars/provider.tf +1 -1
- atlas_init/tf/modules/project_extra/provider.tf +1 -1
- atlas_init/tf/modules/stream_instance/provider.tf +1 -1
- atlas_init/tf/modules/vpc_peering/provider.tf +1 -1
- atlas_init/tf/modules/vpc_privatelink/versions.tf +1 -1
- atlas_init/tf/providers.tf +1 -1
- atlas_init/tf_ext/__init__.py +0 -0
- atlas_init/tf_ext/__main__.py +3 -0
- atlas_init/tf_ext/api_call.py +325 -0
- atlas_init/tf_ext/args.py +32 -0
- atlas_init/tf_ext/constants.py +3 -0
- atlas_init/tf_ext/gen_examples.py +141 -0
- atlas_init/tf_ext/gen_module_readme.py +131 -0
- atlas_init/tf_ext/gen_resource_main.py +195 -0
- atlas_init/tf_ext/gen_resource_output.py +71 -0
- atlas_init/tf_ext/gen_resource_variables.py +159 -0
- atlas_init/tf_ext/gen_versions.py +10 -0
- atlas_init/tf_ext/models.py +106 -0
- atlas_init/tf_ext/models_module.py +454 -0
- atlas_init/tf_ext/newres.py +90 -0
- atlas_init/tf_ext/paths.py +126 -0
- atlas_init/tf_ext/plan_diffs.py +140 -0
- atlas_init/tf_ext/provider_schema.py +199 -0
- atlas_init/tf_ext/py_gen.py +294 -0
- atlas_init/tf_ext/schema_to_dataclass.py +522 -0
- atlas_init/tf_ext/settings.py +188 -0
- atlas_init/tf_ext/tf_dep.py +324 -0
- atlas_init/tf_ext/tf_desc_gen.py +53 -0
- atlas_init/tf_ext/tf_desc_update.py +0 -0
- atlas_init/tf_ext/tf_mod_gen.py +263 -0
- atlas_init/tf_ext/tf_mod_gen_provider.py +124 -0
- atlas_init/tf_ext/tf_modules.py +395 -0
- atlas_init/tf_ext/tf_vars.py +158 -0
- atlas_init/tf_ext/typer_app.py +28 -0
- {atlas_init-0.6.0.dist-info → atlas_init-0.8.0.dist-info}/METADATA +5 -3
- {atlas_init-0.6.0.dist-info → atlas_init-0.8.0.dist-info}/RECORD +64 -31
- atlas_init-0.8.0.dist-info/entry_points.txt +5 -0
- atlas_init-0.6.0.dist-info/entry_points.txt +0 -2
- {atlas_init-0.6.0.dist-info → atlas_init-0.8.0.dist-info}/WHEEL +0 -0
- {atlas_init-0.6.0.dist-info → atlas_init-0.8.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,141 @@
|
|
1
|
+
from contextlib import suppress
|
2
|
+
from dataclasses import asdict
|
3
|
+
from functools import singledispatch
|
4
|
+
from pathlib import Path
|
5
|
+
|
6
|
+
from zero_3rdparty import humps
|
7
|
+
from zero_3rdparty.file_utils import clean_dir, ensure_parents_write_text
|
8
|
+
|
9
|
+
from atlas_init.tf_ext.gen_resource_variables import generate_resource_variables
|
10
|
+
from atlas_init.tf_ext.gen_versions import dump_versions_tf
|
11
|
+
from atlas_init.tf_ext.models_module import ModuleGenConfig, ResourceAbs, ResourceGenConfig, ResourceTypePythonModule
|
12
|
+
from atlas_init.tf_ext.py_gen import import_module_by_using_parents
|
13
|
+
|
14
|
+
VARIABLE_PLACEHOLDER = "var."
|
15
|
+
INDENT = " "
|
16
|
+
|
17
|
+
|
18
|
+
def _examples_casted(examples: dict) -> dict[str, ResourceAbs]:
|
19
|
+
return examples
|
20
|
+
|
21
|
+
|
22
|
+
def read_example_dirs(module_path: Path) -> list[Path]:
|
23
|
+
return sorted(
|
24
|
+
example_dir
|
25
|
+
for example_dir in (module_path / "examples").glob("*")
|
26
|
+
if example_dir.is_dir()
|
27
|
+
and len(example_dir.name) > 2
|
28
|
+
and example_dir.name[:2].isdigit()
|
29
|
+
and (example_dir / "main.tf").exists()
|
30
|
+
)
|
31
|
+
|
32
|
+
|
33
|
+
def generate_module_examples(
|
34
|
+
config: ModuleGenConfig,
|
35
|
+
module: ResourceTypePythonModule,
|
36
|
+
resource_type: str,
|
37
|
+
*,
|
38
|
+
skip_clean_dir: bool = False,
|
39
|
+
) -> list[Path]:
|
40
|
+
test_path = config.examples_test_path
|
41
|
+
imported_module = import_module_by_using_parents(test_path)
|
42
|
+
examples = getattr(imported_module, "EXAMPLES")
|
43
|
+
assert isinstance(examples, dict), f"{imported_module} does not have an EXAMPLES attribute"
|
44
|
+
examples_parsed = _examples_casted(examples)
|
45
|
+
examples_generated: list[Path] = []
|
46
|
+
for example_name, example in examples_parsed.items():
|
47
|
+
dumped_resource = {k: v for k, v in asdict(example).items() if v is not None}
|
48
|
+
variables = {
|
49
|
+
k: f"{v}{k}"
|
50
|
+
for k, v in dumped_resource.items()
|
51
|
+
if isinstance(v, str) and v.startswith(VARIABLE_PLACEHOLDER)
|
52
|
+
}
|
53
|
+
dumped_resource |= variables
|
54
|
+
variable_names = set(variables.keys())
|
55
|
+
ignored_names = set(module.all_field_names) - variable_names
|
56
|
+
ignored_names |= module.all_skip_variables
|
57
|
+
resource_cls = module.resource_ext or module.resource
|
58
|
+
assert resource_cls, f"{module} does not have a resource class"
|
59
|
+
example_path = config.example_path(example_name)
|
60
|
+
if not skip_clean_dir and example_path.exists():
|
61
|
+
clean_dir(example_path)
|
62
|
+
|
63
|
+
variables_tf = generate_resource_variables(
|
64
|
+
resource_cls,
|
65
|
+
ResourceGenConfig(
|
66
|
+
name=resource_type, skip_variables_extra=ignored_names, required_variables=variable_names
|
67
|
+
),
|
68
|
+
)
|
69
|
+
ensure_parents_write_text(example_path / "variables.tf", variables_tf)
|
70
|
+
variables_str = "\n".join(f"{k} = {dump_variable(v)}" for k, v in dumped_resource.items() if can_dump(v))
|
71
|
+
example_main = example_main_tf(config, variables_str)
|
72
|
+
ensure_parents_write_text(example_path / "main.tf", example_main)
|
73
|
+
dump_versions_tf(example_path, skip_python=config.skip_python, minimal=True)
|
74
|
+
examples_generated.append(example_path)
|
75
|
+
return examples_generated
|
76
|
+
|
77
|
+
|
78
|
+
class NotDumpableError(Exception):
|
79
|
+
def __init__(self, value: object) -> None:
|
80
|
+
super().__init__(f"Cannot dump variable {value!r}")
|
81
|
+
|
82
|
+
|
83
|
+
def can_dump(variable: object) -> bool:
|
84
|
+
with suppress(NotDumpableError):
|
85
|
+
dump_variable(variable)
|
86
|
+
return True
|
87
|
+
return False
|
88
|
+
|
89
|
+
|
90
|
+
@singledispatch
|
91
|
+
def dump_variable(variable: object) -> str:
|
92
|
+
raise NotDumpableError(variable)
|
93
|
+
|
94
|
+
|
95
|
+
@dump_variable.register
|
96
|
+
def dump_variable_str(variable: str) -> str:
|
97
|
+
if "." in variable:
|
98
|
+
return variable
|
99
|
+
return f'"{variable}"'
|
100
|
+
|
101
|
+
|
102
|
+
@dump_variable.register
|
103
|
+
def dump_variable_int(variable: int) -> str:
|
104
|
+
return str(variable)
|
105
|
+
|
106
|
+
|
107
|
+
@dump_variable.register
|
108
|
+
def dump_variable_float(variable: float) -> str:
|
109
|
+
return str(variable)
|
110
|
+
|
111
|
+
|
112
|
+
@dump_variable.register
|
113
|
+
def dump_variable_bool(variable: bool) -> str:
|
114
|
+
return "true" if variable else "false"
|
115
|
+
|
116
|
+
|
117
|
+
@dump_variable.register
|
118
|
+
def dump_variable_list(variable: list) -> str:
|
119
|
+
return f"[\n{', '.join(f'{INDENT}{dump_variable(nested)}' for nested in variable if can_dump(nested))}\n]"
|
120
|
+
|
121
|
+
|
122
|
+
@dump_variable.register
|
123
|
+
def dump_variable_set(variable: set) -> str:
|
124
|
+
return f"[\n{', '.join(f'{INDENT}{dump_variable(nested)}' for nested in variable if can_dump(nested))}\n]"
|
125
|
+
|
126
|
+
|
127
|
+
@dump_variable.register
|
128
|
+
def dump_variable_dict(variable: dict) -> str:
|
129
|
+
return "{\n" + "\n".join(f"{INDENT}{k} = {dump_variable(v)}" for k, v in variable.items() if can_dump(v)) + "\n}"
|
130
|
+
|
131
|
+
|
132
|
+
def example_main_tf(module: ModuleGenConfig, variables: str) -> str:
|
133
|
+
variables_indented = "\n".join(f"{INDENT}{var}" for var in variables.split("\n"))
|
134
|
+
module_name_snake = humps.dekebabize(module.name)
|
135
|
+
return f"""\
|
136
|
+
module "{module_name_snake}" {{
|
137
|
+
source = "../.."
|
138
|
+
|
139
|
+
{variables_indented}
|
140
|
+
}}
|
141
|
+
"""
|
@@ -0,0 +1,131 @@
|
|
1
|
+
import logging
|
2
|
+
from enum import StrEnum
|
3
|
+
|
4
|
+
from ask_shell import run_and_wait
|
5
|
+
from zero_3rdparty.file_utils import ensure_parents_write_text, update_between_markers
|
6
|
+
|
7
|
+
from atlas_init.tf_ext.gen_examples import read_example_dirs
|
8
|
+
from atlas_init.tf_ext.models_module import ModuleGenConfig
|
9
|
+
|
10
|
+
logger = logging.getLogger(__name__)
|
11
|
+
_readme_disclaimer = """\
|
12
|
+
## Disclaimer
|
13
|
+
This Module is not meant for external consumption.
|
14
|
+
It is part of a development PoC.
|
15
|
+
Any usage problems will not be supported.
|
16
|
+
However, if you have any ideas or feedback feel free to open a Github Issue!
|
17
|
+
"""
|
18
|
+
|
19
|
+
|
20
|
+
class ReadmeMarkers(StrEnum):
|
21
|
+
DISCLAIMER = "DISCLAIMER"
|
22
|
+
EXAMPLE = "TF_EXAMPLES"
|
23
|
+
TF_DOCS = "TF_DOCS"
|
24
|
+
|
25
|
+
@classmethod
|
26
|
+
def as_start(cls, marker_name: str) -> str:
|
27
|
+
return f"<!-- BEGIN_{marker_name} -->"
|
28
|
+
|
29
|
+
@classmethod
|
30
|
+
def as_end(cls, marker_name: str) -> str:
|
31
|
+
return f"<!-- END_{marker_name} -->"
|
32
|
+
|
33
|
+
@classmethod
|
34
|
+
def marker_lines(cls, marker_name: str) -> str:
|
35
|
+
return f"""\
|
36
|
+
{cls.as_start(marker_name)}
|
37
|
+
|
38
|
+
{cls.as_end(marker_name)}
|
39
|
+
"""
|
40
|
+
|
41
|
+
@classmethod
|
42
|
+
def example_boilerplate(cls) -> str:
|
43
|
+
return "\n".join(cls.marker_lines(marker_name) for marker_name in list(cls))
|
44
|
+
|
45
|
+
|
46
|
+
def read_examples(module: ModuleGenConfig) -> str:
|
47
|
+
example_dirs = read_example_dirs(module.module_out_path)
|
48
|
+
content = ["# Examples"]
|
49
|
+
for example_dir in example_dirs:
|
50
|
+
example_name = example_dir.name
|
51
|
+
header_name = example_name.replace("_", " ").replace("-", " ").title()
|
52
|
+
main_path = example_dir / "main.tf"
|
53
|
+
assert main_path.exists(), f"{main_path} does not exist, every example must have a main.tf"
|
54
|
+
content.extend(
|
55
|
+
[
|
56
|
+
f"## [{header_name}](./examples/{example_name})",
|
57
|
+
"",
|
58
|
+
"```terraform",
|
59
|
+
main_path.read_text(),
|
60
|
+
"```",
|
61
|
+
"",
|
62
|
+
"",
|
63
|
+
]
|
64
|
+
)
|
65
|
+
return "\n".join(content)
|
66
|
+
|
67
|
+
|
68
|
+
_static_terraform_config = """\
|
69
|
+
formatter: markdown document
|
70
|
+
output:
|
71
|
+
file: "FILENAME"
|
72
|
+
mode: inject
|
73
|
+
template: |-
|
74
|
+
START_MARKER
|
75
|
+
{{ .Content }}
|
76
|
+
END_MARKER
|
77
|
+
sort:
|
78
|
+
enabled: true
|
79
|
+
by: required
|
80
|
+
"""
|
81
|
+
|
82
|
+
|
83
|
+
def terraform_docs_config_content(module: ModuleGenConfig) -> str:
|
84
|
+
config = _static_terraform_config
|
85
|
+
for replacement_in, replacement_out in [
|
86
|
+
("FILENAME", module.readme_path().name),
|
87
|
+
("START_MARKER", ReadmeMarkers.as_start(ReadmeMarkers.TF_DOCS)),
|
88
|
+
("END_MARKER", ReadmeMarkers.as_end(ReadmeMarkers.TF_DOCS)),
|
89
|
+
]:
|
90
|
+
config = config.replace(replacement_in, replacement_out)
|
91
|
+
return config
|
92
|
+
|
93
|
+
|
94
|
+
def generate_readme(module: ModuleGenConfig) -> str:
|
95
|
+
readme_path = module.readme_path()
|
96
|
+
assert readme_path.exists(), (
|
97
|
+
f"{readme_path} does not exist, currently a boilerplate is expected, consider adding to {readme_path}\n{ReadmeMarkers.example_boilerplate()}"
|
98
|
+
)
|
99
|
+
update_between_markers(
|
100
|
+
readme_path,
|
101
|
+
_readme_disclaimer,
|
102
|
+
ReadmeMarkers.as_start(ReadmeMarkers.DISCLAIMER),
|
103
|
+
ReadmeMarkers.as_end(ReadmeMarkers.DISCLAIMER),
|
104
|
+
)
|
105
|
+
run_and_wait("terraform fmt -recursive .", cwd=module.module_out_path, allow_non_zero_exit=True, ansi_content=False)
|
106
|
+
example_section = read_examples(module)
|
107
|
+
update_between_markers(
|
108
|
+
readme_path,
|
109
|
+
example_section,
|
110
|
+
ReadmeMarkers.as_start(ReadmeMarkers.EXAMPLE),
|
111
|
+
ReadmeMarkers.as_end(ReadmeMarkers.EXAMPLE),
|
112
|
+
)
|
113
|
+
docs_config_path = module.terraform_docs_config_path()
|
114
|
+
if docs_config_path.exists():
|
115
|
+
logger.warning(f"{docs_config_path} already exists, skipping generation")
|
116
|
+
else:
|
117
|
+
config_content = terraform_docs_config_content(module)
|
118
|
+
ensure_parents_write_text(docs_config_path, config_content)
|
119
|
+
logger.info(f"generated {docs_config_path}")
|
120
|
+
run_and_wait(f"terraform-docs -c {docs_config_path} .", cwd=module.module_out_path)
|
121
|
+
readme_content = _default_link_updater(readme_path.read_text())
|
122
|
+
ensure_parents_write_text(readme_path, readme_content)
|
123
|
+
return readme_path.read_text()
|
124
|
+
|
125
|
+
|
126
|
+
def _default_link_updater(readme_content: str) -> str: # can be a global replacer for now
|
127
|
+
for replace_in, replace_out in {
|
128
|
+
"docs/resources/advanced_cluster": r"docs/resources/advanced_cluster%2520%2528preview%2520provider%25202.0.0%2529"
|
129
|
+
}.items():
|
130
|
+
readme_content = readme_content.replace(replace_in, replace_out)
|
131
|
+
return readme_content
|
@@ -0,0 +1,195 @@
|
|
1
|
+
import logging
|
2
|
+
from dataclasses import fields
|
3
|
+
from pathlib import Path
|
4
|
+
from tempfile import TemporaryDirectory
|
5
|
+
from typing import Iterable
|
6
|
+
|
7
|
+
from ask_shell import run_and_wait
|
8
|
+
|
9
|
+
from atlas_init.tf_ext.models_module import ModuleGenConfig, ResourceAbs, ResourceGenConfig
|
10
|
+
from atlas_init.tf_ext.schema_to_dataclass import ResourceTypePythonModule
|
11
|
+
|
12
|
+
logger = logging.getLogger(__name__)
|
13
|
+
|
14
|
+
|
15
|
+
def local_name_varsx(resource_type: str) -> str:
|
16
|
+
return f"{resource_type}_varsx"
|
17
|
+
|
18
|
+
|
19
|
+
def local_name_vars(resource_type: str) -> str:
|
20
|
+
return f"{resource_type}_vars"
|
21
|
+
|
22
|
+
|
23
|
+
def locals_def(module: ResourceTypePythonModule) -> str:
|
24
|
+
base_defs = "\n".join(f" {name} = var.{name}" for name in module.base_field_names_not_computed)
|
25
|
+
if extras := module.extra_fields_names:
|
26
|
+
extra_defs = "\n".join(f" {name} = var.{name}" for name in extras)
|
27
|
+
base_def = f" {local_name_varsx(module.resource_type)} = {{\n{base_defs}\n }}"
|
28
|
+
extra_def = f"\n {local_name_vars(module.resource_type)} = {{\n{extra_defs}\n }}"
|
29
|
+
else:
|
30
|
+
base_def = f" {local_name_vars(module.resource_type)} = {{\n{base_defs}\n }}"
|
31
|
+
extra_def = ""
|
32
|
+
return f"""
|
33
|
+
locals {{
|
34
|
+
{base_def}{extra_def}
|
35
|
+
}}
|
36
|
+
"""
|
37
|
+
|
38
|
+
|
39
|
+
def data_external(module: ResourceTypePythonModule, config: ModuleGenConfig) -> str:
|
40
|
+
input_json_parts = [
|
41
|
+
f"local.{local_name_vars(module.resource_type)}",
|
42
|
+
]
|
43
|
+
if module.extra_fields_names:
|
44
|
+
input_json_parts.append(f"local.{local_name_varsx(module.resource_type)}")
|
45
|
+
if extras := config.inputs_json_hcl_extras:
|
46
|
+
input_json_parts.extend(extras)
|
47
|
+
inputs_json_merge = input_json_parts[0] if len(input_json_parts) == 1 else f"merge({', '.join(input_json_parts)})"
|
48
|
+
return f"""
|
49
|
+
data "external" "{module.resource_type}" {{
|
50
|
+
program = ["python3", "${{path.module}}/{module.resource_type}.py"]
|
51
|
+
query = {{
|
52
|
+
input_json = jsonencode({inputs_json_merge})
|
53
|
+
}}
|
54
|
+
}}
|
55
|
+
"""
|
56
|
+
|
57
|
+
|
58
|
+
def resource_declare_direct(py_module: ResourceTypePythonModule, config: ResourceGenConfig) -> str:
|
59
|
+
parent_cls = py_module.resource
|
60
|
+
resource_type = py_module.resource_type
|
61
|
+
assert parent_cls, f"{resource_type} does not have a resource"
|
62
|
+
field_base = f"var.{resource_type}." if config.use_single_variable else "var."
|
63
|
+
field_values = "\n".join(
|
64
|
+
_field_value(parent_cls, name, field_base) for name in py_module.base_field_names_not_computed
|
65
|
+
)
|
66
|
+
|
67
|
+
return f"""
|
68
|
+
resource "{py_module.resource_type}" "this" {{
|
69
|
+
{field_values}
|
70
|
+
}}
|
71
|
+
"""
|
72
|
+
|
73
|
+
|
74
|
+
def _field_value(parent_cls: type[ResourceAbs], field_name: str, field_base: str = "var.") -> str:
|
75
|
+
if ResourceAbs.is_computed_only(field_name, parent_cls):
|
76
|
+
return ""
|
77
|
+
this_indent = " "
|
78
|
+
if ResourceAbs.is_block(field_name, parent_cls):
|
79
|
+
return "\n".join(f"{this_indent}{line}" for line in _handle_dynamic(parent_cls, field_name, field_base))
|
80
|
+
return this_indent + f"{field_name} = {field_base}{field_name}"
|
81
|
+
|
82
|
+
|
83
|
+
def _handle_dynamic(
|
84
|
+
parent_cls: type[ResourceAbs], dynamic_field_name: str, existing_ref: str = "var."
|
85
|
+
) -> Iterable[str]:
|
86
|
+
try:
|
87
|
+
container_type = next(
|
88
|
+
t for name, t in ResourceTypePythonModule.container_types(parent_cls) if name == dynamic_field_name
|
89
|
+
)
|
90
|
+
except StopIteration:
|
91
|
+
raise ValueError(f"Could not find container type for field {dynamic_field_name} in {parent_cls}")
|
92
|
+
hcl_ref = f"{dynamic_field_name}.value."
|
93
|
+
yield f'dynamic "{dynamic_field_name}" {{'
|
94
|
+
ref = existing_ref + dynamic_field_name
|
95
|
+
if container_type.is_list or container_type.is_set:
|
96
|
+
if container_type.is_optional:
|
97
|
+
yield f" for_each = {ref} == null ? [] : {ref}"
|
98
|
+
else:
|
99
|
+
yield f" for_each = {ref}"
|
100
|
+
elif container_type.is_dict:
|
101
|
+
raise NotImplementedError(f"Dict not supported for {dynamic_field_name} in {parent_cls}")
|
102
|
+
else: # singular
|
103
|
+
if container_type.is_optional:
|
104
|
+
yield f" for_each = {ref} == null ? [] : [{ref}]"
|
105
|
+
else:
|
106
|
+
yield f" for_each = [{ref}]"
|
107
|
+
yield " content {"
|
108
|
+
yield from [f" {line}" for line in _nested_fields(container_type.type, hcl_ref)]
|
109
|
+
yield " }"
|
110
|
+
yield "}"
|
111
|
+
|
112
|
+
|
113
|
+
def _nested_fields(cls: type[ResourceAbs], hcl_ref: str) -> Iterable[str]:
|
114
|
+
for field in fields(cls):
|
115
|
+
field_name = field.name
|
116
|
+
if ResourceAbs.is_computed_only(field_name, cls):
|
117
|
+
continue
|
118
|
+
if ResourceAbs.is_block(field_name, cls):
|
119
|
+
yield from _handle_dynamic(cls, field_name, hcl_ref)
|
120
|
+
else:
|
121
|
+
yield _field_value(cls, field_name, hcl_ref)
|
122
|
+
|
123
|
+
|
124
|
+
def resource_declare(
|
125
|
+
resource_type: str, required_fields: set[str], nested_fields: set[str], field_names: list[str]
|
126
|
+
) -> str:
|
127
|
+
def output_field(field_name: str) -> str:
|
128
|
+
return f"data.external.{resource_type}.result.{field_name}"
|
129
|
+
|
130
|
+
def as_output_field(field_name: str) -> str:
|
131
|
+
if field_name in nested_fields:
|
132
|
+
if field_name in required_fields:
|
133
|
+
return f"jsondecode({output_field(field_name)})"
|
134
|
+
return f'{output_field(field_name)} == "" ? null : jsondecode({output_field(field_name)})'
|
135
|
+
if field_name in required_fields:
|
136
|
+
return output_field(field_name)
|
137
|
+
return f'{output_field(field_name)} == "" ? null : {output_field(field_name)}'
|
138
|
+
|
139
|
+
required = [f" {field_name} = {as_output_field(field_name)}" for field_name in sorted(required_fields)]
|
140
|
+
non_required = [
|
141
|
+
f" {field_name} = {as_output_field(field_name)}"
|
142
|
+
for field_name in sorted(field_names)
|
143
|
+
if field_name not in required_fields
|
144
|
+
]
|
145
|
+
return f"""
|
146
|
+
resource "{resource_type}" "this" {{
|
147
|
+
lifecycle {{
|
148
|
+
precondition {{
|
149
|
+
condition = length({output_field("error_message")}) == 0
|
150
|
+
error_message = {output_field("error_message")}
|
151
|
+
}}
|
152
|
+
}}
|
153
|
+
|
154
|
+
{"\n".join(required)}
|
155
|
+
{"\n".join(non_required)}
|
156
|
+
}}
|
157
|
+
"""
|
158
|
+
|
159
|
+
|
160
|
+
def format_tf_content(content: str) -> str:
|
161
|
+
with TemporaryDirectory() as tmp_dir:
|
162
|
+
tmp_file = Path(tmp_dir) / "content.tf"
|
163
|
+
tmp_file.write_text(content)
|
164
|
+
try:
|
165
|
+
run_and_wait("terraform fmt .", cwd=tmp_dir)
|
166
|
+
except Exception as e:
|
167
|
+
logger.error(f"Failed to format tf content:\n{content}")
|
168
|
+
raise e
|
169
|
+
return tmp_file.read_text()
|
170
|
+
|
171
|
+
|
172
|
+
def generate_resource_main(python_module: ResourceTypePythonModule, config: ModuleGenConfig) -> str:
|
173
|
+
resource = python_module.resource_ext or python_module.resource
|
174
|
+
assert resource, f"{python_module} does not have a resource"
|
175
|
+
resource_hcl = (
|
176
|
+
resource_declare_direct(python_module, config.resource_config(python_module.resource_type))
|
177
|
+
if config.skip_python
|
178
|
+
else resource_declare(
|
179
|
+
resource_type=python_module.resource_type,
|
180
|
+
required_fields=resource.REQUIRED_ATTRIBUTES,
|
181
|
+
nested_fields=resource.NESTED_ATTRIBUTES,
|
182
|
+
field_names=python_module.base_field_names_not_computed,
|
183
|
+
)
|
184
|
+
)
|
185
|
+
return format_tf_content(
|
186
|
+
"\n".join(
|
187
|
+
[
|
188
|
+
*([] if config.skip_python else [locals_def(python_module)]),
|
189
|
+
*([] if config.skip_python else [data_external(python_module, config)]),
|
190
|
+
"",
|
191
|
+
resource_hcl,
|
192
|
+
"",
|
193
|
+
]
|
194
|
+
)
|
195
|
+
)
|
@@ -0,0 +1,71 @@
|
|
1
|
+
from dataclasses import fields
|
2
|
+
from typing import Iterable
|
3
|
+
from atlas_init.tf_ext.models_module import ContainerType, ResourceTypePythonModule, ModuleGenConfig, ResourceAbs
|
4
|
+
|
5
|
+
|
6
|
+
def as_output(resource_type: str, field_name: str, output_name: str) -> str:
|
7
|
+
return _as_output(output_name, f"{resource_type}.this.{field_name}")
|
8
|
+
|
9
|
+
|
10
|
+
def _as_output(name: str, value: str) -> str:
|
11
|
+
return f"""\
|
12
|
+
output "{name}" {{
|
13
|
+
value = {value}
|
14
|
+
}}
|
15
|
+
"""
|
16
|
+
|
17
|
+
|
18
|
+
def as_nested_output(
|
19
|
+
resource_type: str,
|
20
|
+
parent_cls: type[ResourceAbs],
|
21
|
+
nested_types: dict[str, ContainerType[ResourceAbs]],
|
22
|
+
config: ModuleGenConfig,
|
23
|
+
) -> Iterable[str]:
|
24
|
+
resource_id = f"{resource_type}.this"
|
25
|
+
for field_name, container_type in nested_types.items():
|
26
|
+
if container_type.is_any:
|
27
|
+
continue
|
28
|
+
computed_nested_fields = [
|
29
|
+
nested_field.name
|
30
|
+
for nested_field in fields(container_type.type)
|
31
|
+
if ResourceAbs.is_computed_only(nested_field.name, container_type.type)
|
32
|
+
]
|
33
|
+
if container_type.is_list:
|
34
|
+
for computed_field_name in computed_nested_fields:
|
35
|
+
if container_type.is_optional and not ResourceAbs.is_required(field_name, parent_cls):
|
36
|
+
yield _as_output(
|
37
|
+
config.output_name(resource_type, field_name, computed_field_name),
|
38
|
+
f"{resource_id}.{field_name} == null ? null : {resource_id}.{field_name}[*].{computed_field_name}",
|
39
|
+
)
|
40
|
+
else:
|
41
|
+
yield _as_output(
|
42
|
+
config.output_name(resource_type, field_name, computed_field_name),
|
43
|
+
f"{resource_id}.{field_name}[*].{computed_field_name}",
|
44
|
+
)
|
45
|
+
elif container_type.is_set:
|
46
|
+
continue # block type "limits" is represented by a set of objects, and set elements do not have addressable keys. To find elements matching specific criteria, use a "for" expression with an "if" clause.
|
47
|
+
elif container_type.is_dict or container_type.is_set:
|
48
|
+
raise NotImplementedError("Dict and set container types not supported yet")
|
49
|
+
else:
|
50
|
+
for computed_field_name in computed_nested_fields:
|
51
|
+
if container_type.is_optional and not ResourceAbs.is_required(field_name, parent_cls):
|
52
|
+
yield _as_output(
|
53
|
+
config.output_name(resource_type, field_name, computed_field_name),
|
54
|
+
f"{resource_id}.{field_name} == null ? null : {resource_id}.{field_name}.{computed_field_name}",
|
55
|
+
)
|
56
|
+
else:
|
57
|
+
yield _as_output(
|
58
|
+
config.output_name(resource_type, field_name, computed_field_name),
|
59
|
+
f"{resource_id}.{field_name}.{computed_field_name}",
|
60
|
+
)
|
61
|
+
|
62
|
+
|
63
|
+
def generate_resource_output(py_module: ResourceTypePythonModule, config: ModuleGenConfig) -> str:
|
64
|
+
nested_types = dict(py_module.nested_field_types)
|
65
|
+
base_resource = py_module.resource
|
66
|
+
assert base_resource is not None, f"Resource {py_module.resource_type} has no base resource"
|
67
|
+
computed_field_names = [name for name in py_module.base_field_names_computed if name not in nested_types]
|
68
|
+
return "\n".join(
|
69
|
+
as_output(py_module.resource_type, field_name, config.output_name(py_module.resource_type, field_name))
|
70
|
+
for field_name in computed_field_names
|
71
|
+
) + "\n".join(as_nested_output(py_module.resource_type, base_resource, nested_types, config))
|