atlas-init 0.7.0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. atlas_init/__init__.py +1 -1
  2. atlas_init/atlas_init.yaml +1 -0
  3. atlas_init/cli_tf/example_update.py +20 -8
  4. atlas_init/cli_tf/hcl/modifier.py +22 -8
  5. atlas_init/settings/env_vars.py +12 -2
  6. atlas_init/tf_ext/api_call.py +9 -9
  7. atlas_init/tf_ext/args.py +16 -1
  8. atlas_init/tf_ext/gen_examples.py +141 -0
  9. atlas_init/tf_ext/gen_module_readme.py +131 -0
  10. atlas_init/tf_ext/gen_resource_main.py +195 -0
  11. atlas_init/tf_ext/gen_resource_output.py +71 -0
  12. atlas_init/tf_ext/gen_resource_variables.py +159 -0
  13. atlas_init/tf_ext/gen_versions.py +10 -0
  14. atlas_init/tf_ext/models_module.py +454 -0
  15. atlas_init/tf_ext/newres.py +90 -0
  16. atlas_init/tf_ext/plan_diffs.py +140 -0
  17. atlas_init/tf_ext/provider_schema.py +199 -0
  18. atlas_init/tf_ext/py_gen.py +294 -0
  19. atlas_init/tf_ext/schema_to_dataclass.py +522 -0
  20. atlas_init/tf_ext/settings.py +151 -2
  21. atlas_init/tf_ext/tf_dep.py +5 -5
  22. atlas_init/tf_ext/tf_desc_gen.py +53 -0
  23. atlas_init/tf_ext/tf_desc_update.py +0 -0
  24. atlas_init/tf_ext/tf_mod_gen.py +263 -0
  25. atlas_init/tf_ext/tf_mod_gen_provider.py +124 -0
  26. atlas_init/tf_ext/tf_modules.py +5 -4
  27. atlas_init/tf_ext/tf_vars.py +13 -28
  28. atlas_init/tf_ext/typer_app.py +6 -2
  29. {atlas_init-0.7.0.dist-info → atlas_init-0.8.0.dist-info}/METADATA +4 -3
  30. {atlas_init-0.7.0.dist-info → atlas_init-0.8.0.dist-info}/RECORD +33 -17
  31. {atlas_init-0.7.0.dist-info → atlas_init-0.8.0.dist-info}/WHEEL +0 -0
  32. {atlas_init-0.7.0.dist-info → atlas_init-0.8.0.dist-info}/entry_points.txt +0 -0
  33. {atlas_init-0.7.0.dist-info → atlas_init-0.8.0.dist-info}/licenses/LICENSE +0 -0
@@ -7,8 +7,8 @@ from typing import Iterable, NamedTuple
7
7
 
8
8
  import pydot
9
9
  from ask_shell import ShellError, new_task, run_and_wait
10
- from ask_shell.run_pool import run_pool
11
10
  from ask_shell._run import stop_runs_and_pool
11
+ from ask_shell.run_pool import run_pool
12
12
  from model_lib import Entity, dump
13
13
  from pydantic import BaseModel, Field
14
14
  from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed
@@ -17,10 +17,10 @@ from zero_3rdparty.file_utils import ensure_parents_write_text
17
17
  from zero_3rdparty.iter_utils import flat_map
18
18
 
19
19
  from atlas_init.settings.rich_utils import configure_logging
20
- from atlas_init.tf_ext.args import REPO_PATH_ARG, SKIP_EXAMPLES_DIRS_OPTION
20
+ from atlas_init.tf_ext.args import REPO_PATH_ATLAS_ARG, SKIP_EXAMPLES_DIRS_OPTION
21
21
  from atlas_init.tf_ext.constants import ATLAS_PROVIDER_NAME
22
22
  from atlas_init.tf_ext.paths import find_variable_resource_type_usages, find_variables, get_example_directories
23
- from atlas_init.tf_ext.settings import TfDepSettings
23
+ from atlas_init.tf_ext.settings import TfExtSettings
24
24
 
25
25
  logger = logging.getLogger(__name__)
26
26
  v2_grand_parent_dirs = {
@@ -48,10 +48,10 @@ def is_v2_example_dir(example_dir: Path) -> bool:
48
48
 
49
49
 
50
50
  def tf_dep_graph(
51
- repo_path: Path = REPO_PATH_ARG,
51
+ repo_path: Path = REPO_PATH_ATLAS_ARG,
52
52
  skip_names: list[str] = SKIP_EXAMPLES_DIRS_OPTION,
53
53
  ):
54
- settings = TfDepSettings.from_env()
54
+ settings = TfExtSettings.from_env()
55
55
  output_dir = settings.static_root
56
56
  logger.info(f"Using output directory: {output_dir}")
57
57
  example_dirs = get_example_directories(repo_path, skip_names)
@@ -0,0 +1,53 @@
1
+ import logging
2
+ from collections import defaultdict
3
+ from atlas_init.tf_ext.args import TF_CLI_CONFIG_FILE_ARG
4
+ from atlas_init.tf_ext.settings import TfExtSettings
5
+ from atlas_init.tf_ext.provider_schema import ResourceSchema, parse_atlas_schema
6
+ from model_lib import dump, parse_model
7
+ from zero_3rdparty.file_utils import ensure_parents_write_text
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ def tf_desc_gen(
13
+ tf_cli_config_file: str = TF_CLI_CONFIG_FILE_ARG,
14
+ ):
15
+ settings = TfExtSettings.from_env()
16
+ out_path = settings.attribute_description_file_path
17
+ resource_out_path = settings.attribute_resource_descriptions_file_path
18
+ assert tf_cli_config_file
19
+ schema = parse_atlas_schema()
20
+ descriptions = {}
21
+ descriptions_by_resource: dict[str, dict[str, str]] = defaultdict(dict)
22
+ attr_desc_resource = {}
23
+
24
+ def add_description(resource_type: str, attr_name: str, description: str | None) -> None:
25
+ if not description:
26
+ return
27
+ descriptions_by_resource[resource_type][attr_name] = description
28
+ if existing := descriptions.get(attr_name):
29
+ if existing != description:
30
+ old_resource_type = attr_desc_resource[attr_name]
31
+ logger.info(
32
+ f"Descriptions differs between '{old_resource_type}' and '{resource_type}' for attribute '{attr_name}':\n{existing}\n{description}"
33
+ )
34
+ if len(existing) > len(description):
35
+ return
36
+ descriptions[attr_name] = description
37
+ attr_desc_resource[attr_name] = resource_type
38
+
39
+ for resource_type, resource_schema in schema.raw_resource_schema.items():
40
+ parsed_schema = parse_model(resource_schema, t=ResourceSchema)
41
+ schema_block = parsed_schema.block
42
+ for name, attribute in (schema_block.attributes or {}).items():
43
+ add_description(resource_type, name, attribute.description)
44
+ for name, block_type in (schema_block.block_types or {}).items():
45
+ add_description(resource_type, name, block_type.description)
46
+ descriptions_yaml = dump(dict(sorted(descriptions.items())), format="yaml")
47
+ ensure_parents_write_text(out_path, descriptions_yaml)
48
+ logger.info(f"Generated attribute descriptions to {out_path}")
49
+ resource_descriptions_yaml = dump(
50
+ {k: dict(sorted(v.items())) for k, v in sorted(descriptions_by_resource.items())}, format="yaml"
51
+ )
52
+ ensure_parents_write_text(resource_out_path, resource_descriptions_yaml)
53
+ logger.info(f"Generated attribute resource descriptions to {resource_out_path}")
File without changes
@@ -0,0 +1,263 @@
1
+ import difflib
2
+ import logging
3
+ from pathlib import Path
4
+ from tempfile import TemporaryDirectory
5
+
6
+ import typer
7
+ from ask_shell import new_task, run_and_wait, run_pool, text
8
+ from model_lib import parse_model, parse_payload
9
+ from pydantic import DirectoryPath, TypeAdapter
10
+ from zero_3rdparty.file_utils import clean_dir, copy, ensure_parents_write_text
11
+
12
+ from atlas_init.cli_tf.example_update import UpdateExamples, update_examples
13
+ from atlas_init.tf_ext.args import TF_CLI_CONFIG_FILE_ARG
14
+ from atlas_init.tf_ext.gen_examples import generate_module_examples, read_example_dirs
15
+ from atlas_init.tf_ext.gen_module_readme import generate_readme
16
+ from atlas_init.tf_ext.gen_resource_main import generate_resource_main
17
+ from atlas_init.tf_ext.gen_resource_output import generate_resource_output
18
+ from atlas_init.tf_ext.gen_resource_variables import generate_module_variables
19
+ from atlas_init.tf_ext.gen_versions import dump_versions_tf
20
+ from atlas_init.tf_ext.models_module import (
21
+ MissingDescriptionError,
22
+ ModuleGenConfig,
23
+ import_resource_type_python_module,
24
+ parse_attribute_descriptions,
25
+ store_updated_attribute_description,
26
+ )
27
+ from atlas_init.tf_ext.newres import prepare_newres
28
+ from atlas_init.tf_ext.plan_diffs import (
29
+ ExamplePlanCheck,
30
+ generate_expected_actual,
31
+ parse_plan_output,
32
+ read_variables_path,
33
+ )
34
+ from atlas_init.tf_ext.provider_schema import AtlasSchemaInfo, ResourceSchema, parse_atlas_schema
35
+ from atlas_init.tf_ext.schema_to_dataclass import convert_and_format
36
+ from atlas_init.tf_ext.settings import TfExtSettings
37
+
38
+ logger = logging.getLogger(__name__)
39
+
40
+
41
+ def tf_mod_gen(
42
+ tf_cli_config_file: str = TF_CLI_CONFIG_FILE_ARG,
43
+ use_newres: bool = typer.Option(False, "--use-newres", help="Use newres to generate modules"),
44
+ resource_type: list[str] = typer.Option(
45
+ ..., "-r", "--resource-type", help="Resource types to generate modules for", default_factory=list
46
+ ),
47
+ name: str = typer.Option("", "-n", "--name", help="Name of the module"),
48
+ in_dir: DirectoryPath = typer.Option(
49
+ ..., "-i", "--in-dir", help="Parent directory where the module generation files are stored"
50
+ ),
51
+ out_dir: DirectoryPath = typer.Option(
52
+ ...,
53
+ "-o",
54
+ "--out-dir",
55
+ help="Output directory for generated modules, the module will end up in {output_dir}/{name}",
56
+ ),
57
+ example_var_file: Path = typer.Option(
58
+ ..., "-e", "--example-var-file", help="Path to example variable file", envvar="TF_EXT_EXAMPLE_VAR_FILE"
59
+ ),
60
+ ):
61
+ settings = TfExtSettings.from_env()
62
+ assert tf_cli_config_file, "tf_cli_config_file is required"
63
+ if use_newres:
64
+ prepare_newres(settings.new_res_path)
65
+ else:
66
+ settings = TfExtSettings.from_env()
67
+ logger.info("will use Python generation")
68
+ config = ModuleGenConfig.from_paths(name, in_dir, out_dir, settings)
69
+ prepare_out_dir(config)
70
+ generate_module(config)
71
+ module_examples_and_readme(config, example_var_file=example_var_file)
72
+
73
+
74
+ def prepare_out_dir(config: ModuleGenConfig, *, skip_clean_dir: bool = False):
75
+ if not skip_clean_dir:
76
+ clean_dir(config.module_out_path)
77
+ in_dir = config.in_dir
78
+ assert in_dir, "in_dir is required"
79
+ for src_file in in_dir.glob("*"):
80
+ if config.skip_copy(src_file):
81
+ continue
82
+ copy(src_file, config.module_out_path / src_file.name, clean_dest=True) # also copies directories
83
+ example_checks = config.example_plan_checks_path
84
+ if example_checks.exists():
85
+ example_plan_checks_raw = parse_payload(example_checks)
86
+ config.example_plan_checks = TypeAdapter(list[ExamplePlanCheck]).validate_python(example_plan_checks_raw)
87
+
88
+
89
+ def generate_module(config: ModuleGenConfig) -> Path:
90
+ with new_task("Reading Atlas Schema"):
91
+ schema = parse_atlas_schema()
92
+ assert schema
93
+ resource_types = config.resource_types
94
+ with new_task("Generating module files for resource types", total=len(resource_types)) as task:
95
+ for resource_type in resource_types:
96
+ generate_resource_module(config, resource_type, schema)
97
+ task.update(advance=1)
98
+
99
+ return finalize_and_validate_module(config)
100
+
101
+
102
+ def generate_resource_module(config: ModuleGenConfig, resource_type: str, atlas_schema: AtlasSchemaInfo) -> None:
103
+ resource_type_schema = atlas_schema.raw_resource_schema.get(resource_type)
104
+ assert resource_type_schema, f"resource type {resource_type} not found in schema"
105
+ schema_parsed = parse_model(resource_type_schema, t=ResourceSchema)
106
+ dataclass_path = config.dataclass_path(resource_type)
107
+ dataclass_code = convert_and_format(resource_type, schema_parsed, config, existing_path=dataclass_path)
108
+ logger.info(f"Generated dataclass for {resource_type} to {dataclass_path}")
109
+ ensure_parents_write_text(dataclass_path, dataclass_code)
110
+
111
+ python_module = import_resource_type_python_module(resource_type, dataclass_path)
112
+ main_tf = generate_resource_main(python_module, config)
113
+ main_path = config.main_tf_path(resource_type)
114
+ ensure_parents_write_text(main_path, main_tf)
115
+
116
+ variablesx_tf, variables_tf = generate_module_variables(python_module, config.resource_config(resource_type))
117
+ variables_path = config.variables_path(resource_type)
118
+ if variablesx_tf and variables_tf:
119
+ variablesx_path = config.variablesx_path(resource_type)
120
+ ensure_parents_write_text(variablesx_path, variablesx_tf)
121
+ ensure_parents_write_text(variables_path, variables_tf)
122
+ else:
123
+ ensure_parents_write_text(variables_path, variablesx_tf)
124
+ if output_tf := generate_resource_output(python_module, config):
125
+ output_path = config.output_path(resource_type)
126
+ ensure_parents_write_text(output_path, output_tf)
127
+ if config.skip_python and dataclass_path.is_relative_to(config.module_out_path):
128
+ dataclass_path.unlink(missing_ok=True)
129
+
130
+
131
+ def finalize_and_validate_module(config: ModuleGenConfig) -> Path:
132
+ dump_versions_tf(config.module_out_path, skip_python=config.skip_python)
133
+ logger.info(f"Module dumped to {config.module_out_path}, running checks")
134
+ validate_module(config.module_out_path, tf_cli_config_file=config.settings.tf_cli_config_file)
135
+ return config.module_out_path
136
+
137
+
138
+ OUT_BINARY_PATH = "tfplan.binary"
139
+
140
+
141
+ def validate_module(tf_workdir: Path, *, tf_cli_config_file: Path | None = None):
142
+ terraform_commands = [
143
+ "terraform init",
144
+ "terraform fmt .",
145
+ "terraform validate .",
146
+ ]
147
+ env_extra = {}
148
+ if tf_cli_config_file:
149
+ env_extra["TF_CLI_CONFIG_FILE"] = str(tf_cli_config_file)
150
+ with new_task("Terraform Module Validate Checks", total=len(terraform_commands)) as task:
151
+ for command in terraform_commands:
152
+ attempts = 3 if command == "terraform init" else 1 # terraform init can fail due to network issues
153
+ run_and_wait(command, cwd=tf_workdir, env=env_extra, attempts=attempts)
154
+ task.update(advance=1)
155
+
156
+
157
+ def module_examples_and_readme(config: ModuleGenConfig, *, example_var_file: Path | None = None) -> Path:
158
+ path = config.module_out_path
159
+ if (examples_test := config.examples_test_path) and examples_test.exists():
160
+ with new_task(f"Generating examples from {config.FILENAME_EXAMPLES_TEST}"):
161
+ assert len(config.resource_types) == 1
162
+ resource_type = config.resource_types[0]
163
+ py_module = import_resource_type_python_module(resource_type, config.dataclass_path(resource_type))
164
+ examples_generated = generate_module_examples(config, py_module, resource_type=resource_type)
165
+ if examples_generated:
166
+ with run_pool("Validating examples", total=len(examples_generated), exit_wait_timeout=60) as pool:
167
+ for example_path in examples_generated:
168
+ pool.submit(validate_module, example_path)
169
+
170
+ attribute_descriptions = parse_attribute_descriptions(config.settings)
171
+ settings = config.settings
172
+
173
+ def new_description(name: str, old_description: str, path: Path) -> str:
174
+ resource_type = config.resolve_resource_type(path)
175
+ try:
176
+ return attribute_descriptions.resolve_description(name, resource_type)
177
+ except MissingDescriptionError:
178
+ if new_text := text(
179
+ f"Enter description for variable/output {name} in {resource_type} for {path} (empty to skip)",
180
+ default="",
181
+ ):
182
+ store_updated_attribute_description(attribute_descriptions, settings, name, new_text, resource_type)
183
+ return new_text
184
+
185
+ out_event = update_examples(
186
+ UpdateExamples(
187
+ examples_base_dir=path,
188
+ skip_tf_fmt=True,
189
+ new_description_call=new_description,
190
+ )
191
+ )
192
+ if out_event.changes:
193
+ logger.info(f"Updated attribute descriptions: {len(out_event.changes)}")
194
+ run_and_wait("terraform fmt -recursive .", cwd=path, ansi_content=False, allow_non_zero_exit=True)
195
+ if readme_path := config.readme_path():
196
+ with new_task("Generating README.md"):
197
+ readme_content = generate_readme(config)
198
+ ensure_parents_write_text(readme_path, readme_content)
199
+ if example_var_file:
200
+ examples = read_example_dirs(config.module_out_path)
201
+ if examples:
202
+ failed_examples: list[Path] = []
203
+ with run_pool("Running terraform plan on examples", total=len(examples), exit_wait_timeout=60) as pool:
204
+
205
+ def run_example(example: Path):
206
+ try:
207
+ run_and_wait(f"terraform plan -var-file={example_var_file}", cwd=example)
208
+ except Exception as e:
209
+ logger.error(f"Failed to run terraform plan on {example.name}: {e}")
210
+ failed_examples.append(example)
211
+
212
+ for example in examples:
213
+ pool.submit(run_example, example)
214
+ if failed_examples:
215
+ failed_str = ", ".join(sorted(example.name for example in failed_examples))
216
+ logger.error(f"Failed to run terraform plan on {failed_str} examples")
217
+ return path
218
+
219
+
220
+ def example_plan_checks(config: ModuleGenConfig, timeout_all_seconds: int = 60) -> list[Path]:
221
+ example_checks = config.example_plan_checks
222
+ settings = config.settings
223
+
224
+ def run_check(check: ExamplePlanCheck):
225
+ expected_dir = settings.output_plan_dumps / check.expected_output_dir_name
226
+ variables_path = read_variables_path(expected_dir)
227
+ with TemporaryDirectory() as temp_dir:
228
+ stored_plan = Path(temp_dir) / "plan.json"
229
+ tf_dir = config.example_path(check.example_name)
230
+ validate_module(tf_dir)
231
+ var_arg = f" -var-file={variables_path}" if variables_path else ""
232
+ run_and_wait(f"terraform plan -out={OUT_BINARY_PATH}{var_arg}", cwd=tf_dir)
233
+ run_and_wait(f"terraform show -json {OUT_BINARY_PATH} > {stored_plan}", cwd=tf_dir)
234
+ plan_output = parse_plan_output(stored_plan)
235
+ return generate_expected_actual(settings.output_plan_dumps, check, plan_output)
236
+
237
+ with run_pool("Run Examples", total=len(example_checks), exit_wait_timeout=timeout_all_seconds) as pool:
238
+ futures = {pool.submit(run_check, check): check for check in example_checks}
239
+ diff_paths: list[Path] = []
240
+ for future in futures:
241
+ check = futures[future]
242
+ try:
243
+ expected, actual = future.result(timeout=timeout_all_seconds)
244
+ if expected != actual:
245
+ diff_path = settings.plan_diff_output_path / f"{config.name}_{check.example_name}.html"
246
+ dump_html_diff(expected, actual, diff_path)
247
+ diff_paths.append(diff_path)
248
+ logger.error(f"Example check failed for {check}")
249
+ except Exception as e:
250
+ logger.error(f"Example check failed to run terraform commands for {check}: {e}")
251
+ raise e
252
+ return diff_paths
253
+
254
+
255
+ def dump_html_diff(expected: str, actual: str, diff_path: Path) -> str:
256
+ html_text = difflib.HtmlDiff().make_file(
257
+ expected.splitlines(),
258
+ actual.splitlines(),
259
+ "expected",
260
+ "actual",
261
+ )
262
+ ensure_parents_write_text(diff_path, html_text)
263
+ return html_text
@@ -0,0 +1,124 @@
1
+ import logging
2
+ from pathlib import Path
3
+ from ask_shell import confirm, run_pool
4
+ from concurrent.futures import Future
5
+ from ask_shell.rich_live import get_live_console
6
+ from model_lib import copy_and_validate, parse_model
7
+ from rich.markdown import Markdown
8
+ import typer
9
+ from zero_3rdparty.file_utils import clean_dir
10
+
11
+ from atlas_init.tf_ext.models_module import (
12
+ ModuleGenConfig,
13
+ ProviderGenConfig,
14
+ as_provider_name,
15
+ )
16
+ from atlas_init.tf_ext.provider_schema import parse_atlas_schema_from_settings
17
+ from atlas_init.tf_ext.settings import init_tf_ext_settings
18
+ from atlas_init.tf_ext.tf_mod_gen import finalize_and_validate_module, generate_resource_module
19
+
20
+ logger = logging.getLogger(__name__)
21
+ ATLAS_PROVIDER_PATH = "mongodb/mongodbatlas"
22
+
23
+
24
+ def tf_mod_gen_provider_resource_modules(
25
+ provider_path: str = typer.Option(
26
+ ATLAS_PROVIDER_PATH, "--provider-path", help="Provider path name, {owner}/{repo} from terraform registry"
27
+ ),
28
+ include_only: list[str] = typer.Option(
29
+ ..., "-i", "--include-only", help="Only include these resource types", default_factory=list
30
+ ),
31
+ ):
32
+ settings = init_tf_ext_settings()
33
+ if provider_path != ATLAS_PROVIDER_PATH:
34
+ raise NotImplementedError(f"provider_name must be {ATLAS_PROVIDER_PATH}")
35
+ provider_name = as_provider_name(provider_path)
36
+ repo_out = settings.repo_out
37
+ provider_config_path = repo_out.provider_settings_path(provider_name)
38
+ provider_config = parse_model(provider_config_path, t=ProviderGenConfig)
39
+
40
+ atlas_schema = parse_atlas_schema_from_settings(settings, provider_config)
41
+ include_only_set = set(include_only)
42
+ deprecated_types = set(atlas_schema.deprecated_resource_types)
43
+
44
+ def include_resource_type(resource_type: str) -> bool:
45
+ return (not include_only or resource_type in include_only_set) and resource_type not in deprecated_types
46
+
47
+ resource_types = [
48
+ resource_type for resource_type in atlas_schema.resource_types if include_resource_type(resource_type)
49
+ ]
50
+ if not resource_types:
51
+ raise ValueError(f"No resource types to generate for provider {provider_name} after filtering")
52
+
53
+ def generate_module(module_config: ModuleGenConfig) -> tuple[Path, Path]:
54
+ resource = module_config.resources[0]
55
+ generate_resource_module(module_config, resource.name, atlas_schema)
56
+ module_path = finalize_and_validate_module(module_config)
57
+
58
+ config_single = copy_and_validate(
59
+ module_config,
60
+ resources=[resource.single_variable_version()],
61
+ out_dir=module_path.with_name(module_path.stem + "_single"),
62
+ )
63
+ generate_resource_module(config_single, resource.name, atlas_schema)
64
+ module_path_single = finalize_and_validate_module(config_single)
65
+ return module_path, module_path_single
66
+
67
+ with run_pool(
68
+ "Generating module files for resource types", total=len(resource_types), exit_wait_timeout=60
69
+ ) as pool:
70
+ futures: dict[str, Future] = {}
71
+ for resource_type in resource_types:
72
+ module_config = ModuleGenConfig.from_repo_out(resource_type, provider_config, repo_out)
73
+ module_config.skip_python = True
74
+ futures[resource_type] = pool.submit(generate_module, module_config)
75
+ summary = ["## Generated Resource Modules"]
76
+ failures = []
77
+ generated_module_paths: set[Path] = set()
78
+ generated_py_files: set[Path] = set()
79
+ for resource_type, future in futures.items():
80
+ try:
81
+ module_path, module_path_single = future.result()
82
+ logger.info(f"Generated module for resource type = {resource_type} at {module_path} & {module_path_single}")
83
+ summary.append(f"- {resource_type} -> {module_path}")
84
+ generated_module_paths.add(module_path)
85
+ generated_module_paths.add(module_path_single)
86
+ generated_py_files.add(repo_out.dataclass_path(provider_name, resource_type))
87
+ except Exception:
88
+ failures.append(resource_type)
89
+ logger.exception(f"failed to generate module for resource type = {resource_type}")
90
+ continue
91
+ if failures:
92
+ summary.append("## Failed Resource Modules")
93
+ for resource_type in failures:
94
+ summary.append(f"- {resource_type}")
95
+ get_live_console().print(Markdown("\n".join(summary)))
96
+ if generated_module_paths:
97
+ logger.info(f"Generated a total of: {len(generated_module_paths)} modules")
98
+ if not include_only:
99
+ clean_extra_modules(repo_out.resource_modules_provider_path(provider_name), generated_module_paths)
100
+ clean_extra_py_modules(repo_out.py_provider_module(provider_name), generated_py_files)
101
+
102
+
103
+ def clean_extra_modules(resource_modules_out_dir: Path, generated_module_paths: set[Path]):
104
+ if extra_paths := [
105
+ path for path in resource_modules_out_dir.glob("*") if path.is_dir() and path not in generated_module_paths
106
+ ]:
107
+ logger.warning(f"Found extra paths in {resource_modules_out_dir}: {extra_paths}")
108
+ extra_paths_str = "\n".join(path.name for path in extra_paths)
109
+ if confirm(f"Can delete extra paths in {resource_modules_out_dir}:\n{extra_paths_str}"):
110
+ for path in extra_paths:
111
+ clean_dir(path, recreate=False)
112
+
113
+
114
+ def clean_extra_py_modules(py_modules_out_dir: Path, generated_py_files: set[Path]):
115
+ if extra_paths := [
116
+ path
117
+ for path in py_modules_out_dir.glob("*.py")
118
+ if path.is_file() and not path.name.startswith("_") and path not in generated_py_files
119
+ ]:
120
+ logger.warning(f"Found extra paths in {py_modules_out_dir}: {extra_paths}")
121
+ extra_paths_str = "\n".join(path.name for path in extra_paths)
122
+ if confirm(f"Can delete extra paths in {py_modules_out_dir}:\n{extra_paths_str}"):
123
+ for path in extra_paths:
124
+ path.unlink()
@@ -1,4 +1,5 @@
1
1
  from __future__ import annotations
2
+
2
3
  import logging
3
4
  from dataclasses import dataclass
4
5
  from pathlib import Path
@@ -13,7 +14,7 @@ from zero_3rdparty.iter_utils import flat_map
13
14
 
14
15
  from atlas_init.tf_ext.constants import ATLAS_PROVIDER_NAME
15
16
  from atlas_init.tf_ext.models import ModuleConfig, ModuleConfigs
16
- from atlas_init.tf_ext.settings import TfDepSettings
17
+ from atlas_init.tf_ext.settings import TfExtSettings
17
18
  from atlas_init.tf_ext.tf_dep import FORCE_INTERNAL_NODES, SKIP_NODES, AtlasGraph, edge_src_dest
18
19
 
19
20
  logger = logging.getLogger(__name__)
@@ -142,7 +143,7 @@ def tf_modules(
142
143
  show_default=True,
143
144
  ),
144
145
  ):
145
- settings = TfDepSettings.from_env()
146
+ settings = TfExtSettings.from_env()
146
147
  atlas_graph = parse_atlas_graph(settings)
147
148
  output_dir = settings.static_root
148
149
  with new_task("Write graphs"):
@@ -229,7 +230,7 @@ def generate_module_graphs(skipped_module_resource_types, settings, atlas_graph)
229
230
  return modules
230
231
 
231
232
 
232
- def parse_atlas_graph(settings: TfDepSettings) -> AtlasGraph:
233
+ def parse_atlas_graph(settings: TfExtSettings) -> AtlasGraph:
233
234
  atlas_graph = parse_model(settings.atlas_graph_path, t=AtlasGraph)
234
235
  deprecated_resources = parse_list(settings.schema_resource_types_deprecated_path, format="yaml")
235
236
  atlas_graph.deprecated_resource_types.update(deprecated_resources)
@@ -240,7 +241,7 @@ def parse_atlas_graph(settings: TfDepSettings) -> AtlasGraph:
240
241
 
241
242
 
242
243
  def add_unused_nodes_to_graph(
243
- settings: TfDepSettings, atlas_graph: AtlasGraph, color_coder: ColorCoder, internal_graph: pydot.Dot
244
+ settings: TfExtSettings, atlas_graph: AtlasGraph, color_coder: ColorCoder, internal_graph: pydot.Dot
244
245
  ):
245
246
  schema_resource_types: list[str] = parse_list(settings.schema_resource_types_path, format="yaml")
246
247
  all_nodes = atlas_graph.all_internal_nodes
@@ -2,16 +2,15 @@ from __future__ import annotations
2
2
 
3
3
  import logging
4
4
  from pathlib import Path
5
- from typing import ClassVar, NamedTuple
5
+ from typing import ClassVar
6
6
 
7
- from ask_shell import new_task, run_and_wait
7
+ from ask_shell import new_task
8
8
  from model_lib import IgnoreFalsy, dump
9
9
  from pydantic import Field, RootModel
10
10
  from zero_3rdparty.file_utils import ensure_parents_write_text
11
11
  from zero_3rdparty.str_utils import instance_repr
12
12
 
13
- from atlas_init.tf_ext.args import REPO_PATH_ARG, SKIP_EXAMPLES_DIRS_OPTION
14
- from atlas_init.tf_ext.constants import ATLAS_PROVIDER_NAME
13
+ from atlas_init.tf_ext.args import REPO_PATH_ATLAS_ARG, SKIP_EXAMPLES_DIRS_OPTION
15
14
  from atlas_init.tf_ext.paths import (
16
15
  ResourceTypes,
17
16
  find_resource_types_with_usages,
@@ -19,21 +18,24 @@ from atlas_init.tf_ext.paths import (
19
18
  get_example_directories,
20
19
  is_variable_name_external,
21
20
  )
22
- from atlas_init.tf_ext.settings import TfDepSettings
21
+ from atlas_init.tf_ext.provider_schema import parse_atlas_schema
22
+ from atlas_init.tf_ext.settings import TfExtSettings
23
23
 
24
24
  logger = logging.getLogger(__name__)
25
25
 
26
26
 
27
27
  def tf_vars(
28
- repo_path: Path = REPO_PATH_ARG,
28
+ repo_path: Path = REPO_PATH_ATLAS_ARG,
29
29
  skip_names: list[str] = SKIP_EXAMPLES_DIRS_OPTION,
30
30
  ):
31
- settings = TfDepSettings.from_env()
31
+ settings = TfExtSettings.from_env()
32
32
  logger.info(f"Analyzing Terraform variables in repository: {repo_path}")
33
33
  example_dirs = get_example_directories(repo_path, skip_names)
34
34
  assert example_dirs, "No example directories found. Please check the repository path and skip names."
35
35
  with new_task("Parsing provider schema") as task:
36
- resource_types, resource_types_deprecated = parse_schema_resource_types(example_dirs[0])
36
+ atlas_schema = parse_atlas_schema()
37
+ resource_types = atlas_schema.resource_types
38
+ resource_types_deprecated = atlas_schema.deprecated_resource_types
37
39
  ensure_parents_write_text(settings.schema_resource_types_path, dump(sorted(resource_types), format="yaml"))
38
40
  logger.info(f"Provider schema resource types written to {settings.schema_resource_types_path}")
39
41
  ensure_parents_write_text(
@@ -51,7 +53,7 @@ def tf_vars(
51
53
  logger.warning(f"Missing resource types in examples:\n{'\n'.join(sorted(missing_example_resource_types))}")
52
54
 
53
55
 
54
- def parse_provider_resurce_schema(schema: dict, provider_name: str) -> dict:
56
+ def parse_provider_resource_schema(schema: dict, provider_name: str) -> dict:
55
57
  schemas = schema.get("provider_schemas", {})
56
58
  for provider_url, provider_schema in schemas.items():
57
59
  if provider_url.endswith(provider_name):
@@ -104,7 +106,7 @@ def vars_usage_dumping(variables: TfVarsUsage) -> str:
104
106
  return dump(vars_model, format="yaml")
105
107
 
106
108
 
107
- def update_resource_types(settings: TfDepSettings, example_dirs: list[Path], task: new_task) -> ResourceTypes:
109
+ def update_resource_types(settings: TfExtSettings, example_dirs: list[Path], task: new_task) -> ResourceTypes:
108
110
  resource_types = ResourceTypes(root={})
109
111
  for example_dir in example_dirs:
110
112
  example_resources = find_resource_types_with_usages(example_dir)
@@ -130,7 +132,7 @@ def resource_types_dumping(resource_types: ResourceTypes, with_external: bool =
130
132
  return dump(dict(sorted(resource_types_model.items())), format="yaml")
131
133
 
132
134
 
133
- def update_variables(settings: TfDepSettings, example_dirs: list[Path], task: new_task):
135
+ def update_variables(settings: TfExtSettings, example_dirs: list[Path], task: new_task):
134
136
  variables = parse_all_variables(example_dirs, task)
135
137
  logger.info(f"Found {len(variables.root)} variables in the examples.")
136
138
  vars_yaml = vars_usage_dumping(variables)
@@ -144,23 +146,6 @@ def update_variables(settings: TfDepSettings, example_dirs: list[Path], task: ne
144
146
  logger.info(f"External variables usage written to {settings.vars_external_file_path}")
145
147
 
146
148
 
147
- class ResourceTypesSchema(NamedTuple):
148
- resource_types: list[str]
149
- deprecated_resource_types: list[str]
150
-
151
-
152
- def parse_schema_resource_types(example_dir: Path) -> ResourceTypesSchema:
153
- schema_run = run_and_wait("terraform providers schema -json", cwd=example_dir, ansi_content=False)
154
- parsed = schema_run.parse_output(dict, output_format="json")
155
- resource_schema = parse_provider_resurce_schema(parsed, ATLAS_PROVIDER_NAME)
156
-
157
- def is_deprecated(resource_details: dict) -> bool:
158
- return resource_details["block"].get("deprecated", False)
159
-
160
- deprecated_resource_types = [name for name, details in resource_schema.items() if is_deprecated(details)]
161
- return ResourceTypesSchema(sorted(resource_schema.keys()), sorted(deprecated_resource_types))
162
-
163
-
164
149
  def parse_all_variables(examples_dirs: list[Path], task: new_task) -> TfVarsUsage:
165
150
  variables_usage = TfVarsUsage(root={})
166
151
  for example_dir in examples_dirs:
@@ -1,11 +1,11 @@
1
1
  from ask_shell import configure_logging
2
2
  from typer import Typer
3
3
 
4
- from atlas_init.tf_ext import api_call
4
+ from atlas_init.tf_ext import api_call, settings, tf_desc_gen, tf_mod_gen_provider
5
5
 
6
6
 
7
7
  def typer_main():
8
- from atlas_init.tf_ext import tf_dep, tf_modules, tf_vars
8
+ from atlas_init.tf_ext import tf_dep, tf_mod_gen, tf_modules, tf_vars
9
9
 
10
10
  app = Typer(
11
11
  name="tf-ext",
@@ -14,8 +14,12 @@ def typer_main():
14
14
  app.command(name="dep-graph")(tf_dep.tf_dep_graph)
15
15
  app.command(name="vars")(tf_vars.tf_vars)
16
16
  app.command(name="modules")(tf_modules.tf_modules)
17
+ app.command(name="mod-gen")(tf_mod_gen.tf_mod_gen)
18
+ app.command(name="desc-gen")(tf_desc_gen.tf_desc_gen)
17
19
  app.command(name="api")(api_call.api)
18
20
  app.command(name="api-config")(api_call.api_config)
21
+ app.command(name="mod-gen-provider")(tf_mod_gen_provider.tf_mod_gen_provider_resource_modules)
22
+ app.command(name="check-env-vars")(settings.init_tf_ext_settings)
19
23
  configure_logging(app)
20
24
  app()
21
25
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: atlas-init
3
- Version: 0.7.0
3
+ Version: 0.8.0
4
4
  Project-URL: Documentation, https://github.com/EspenAlbert/atlas-init#readme
5
5
  Project-URL: Issues, https://github.com/EspenAlbert/atlas-init/issues
6
6
  Project-URL: Source, https://github.com/EspenAlbert/atlas-init
@@ -12,10 +12,11 @@ Classifier: Programming Language :: Python
12
12
  Classifier: Programming Language :: Python :: 3.13
13
13
  Requires-Python: >=3.13
14
14
  Requires-Dist: appdirs==1.4.4
15
- Requires-Dist: ask-shell>=0.0.4
15
+ Requires-Dist: ask-shell>=0.0.5
16
16
  Requires-Dist: boto3==1.35.92
17
17
  Requires-Dist: gitpython==3.1.42
18
18
  Requires-Dist: humanize==4.9.0
19
+ Requires-Dist: inflection==0.5.1
19
20
  Requires-Dist: model-lib
20
21
  Requires-Dist: motor==3.7.1
21
22
  Requires-Dist: mypy-boto3-cloudformation==1.37.22
@@ -30,7 +31,7 @@ Requires-Dist: rich==14.0.0
30
31
  Requires-Dist: stringcase==1.2.0
31
32
  Requires-Dist: tenacity==9.0.0
32
33
  Requires-Dist: typer>=0.15.1
33
- Requires-Dist: zero-3rdparty
34
+ Requires-Dist: zero-3rdparty==1.0.0b5
34
35
  Description-Content-Type: text/markdown
35
36
 
36
37
  # Atlas Init - A CLI for developing integrations with MongoDB Atlas