atlas-init 0.1.0__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. atlas_init/__init__.py +3 -3
  2. atlas_init/atlas_init.yaml +51 -34
  3. atlas_init/cli.py +76 -72
  4. atlas_init/cli_cfn/app.py +40 -117
  5. atlas_init/cli_cfn/{cfn.py → aws.py} +129 -14
  6. atlas_init/cli_cfn/cfn_parameter_finder.py +89 -6
  7. atlas_init/cli_cfn/example.py +203 -0
  8. atlas_init/cli_cfn/files.py +63 -0
  9. atlas_init/cli_helper/go.py +6 -3
  10. atlas_init/cli_helper/run.py +18 -2
  11. atlas_init/cli_helper/tf_runner.py +12 -21
  12. atlas_init/cli_root/__init__.py +0 -0
  13. atlas_init/cli_root/trigger.py +153 -0
  14. atlas_init/cli_tf/app.py +211 -4
  15. atlas_init/cli_tf/changelog.py +103 -0
  16. atlas_init/cli_tf/debug_logs.py +221 -0
  17. atlas_init/cli_tf/debug_logs_test_data.py +253 -0
  18. atlas_init/cli_tf/github_logs.py +229 -0
  19. atlas_init/cli_tf/go_test_run.py +194 -0
  20. atlas_init/cli_tf/go_test_run_format.py +31 -0
  21. atlas_init/cli_tf/go_test_summary.py +144 -0
  22. atlas_init/cli_tf/hcl/__init__.py +0 -0
  23. atlas_init/cli_tf/hcl/cli.py +161 -0
  24. atlas_init/cli_tf/hcl/cluster_mig.py +348 -0
  25. atlas_init/cli_tf/hcl/parser.py +140 -0
  26. atlas_init/cli_tf/schema.py +222 -18
  27. atlas_init/cli_tf/schema_go_parser.py +236 -0
  28. atlas_init/cli_tf/schema_table.py +150 -0
  29. atlas_init/cli_tf/schema_table_models.py +155 -0
  30. atlas_init/cli_tf/schema_v2.py +599 -0
  31. atlas_init/cli_tf/schema_v2_api_parsing.py +298 -0
  32. atlas_init/cli_tf/schema_v2_sdk.py +361 -0
  33. atlas_init/cli_tf/schema_v3.py +222 -0
  34. atlas_init/cli_tf/schema_v3_sdk.py +279 -0
  35. atlas_init/cli_tf/schema_v3_sdk_base.py +68 -0
  36. atlas_init/cli_tf/schema_v3_sdk_create.py +216 -0
  37. atlas_init/humps.py +253 -0
  38. atlas_init/repos/cfn.py +6 -1
  39. atlas_init/repos/path.py +3 -3
  40. atlas_init/settings/config.py +30 -11
  41. atlas_init/settings/env_vars.py +29 -3
  42. atlas_init/settings/path.py +12 -1
  43. atlas_init/settings/rich_utils.py +39 -2
  44. atlas_init/terraform.yaml +77 -1
  45. atlas_init/tf/.terraform.lock.hcl +125 -0
  46. atlas_init/tf/always.tf +11 -2
  47. atlas_init/tf/main.tf +3 -0
  48. atlas_init/tf/modules/aws_s3/provider.tf +1 -1
  49. atlas_init/tf/modules/aws_vars/aws_vars.tf +2 -0
  50. atlas_init/tf/modules/aws_vpc/provider.tf +4 -1
  51. atlas_init/tf/modules/cfn/cfn.tf +47 -33
  52. atlas_init/tf/modules/cfn/kms.tf +54 -0
  53. atlas_init/tf/modules/cfn/resource_actions.yaml +1 -0
  54. atlas_init/tf/modules/cfn/variables.tf +31 -0
  55. atlas_init/tf/modules/cloud_provider/cloud_provider.tf +1 -0
  56. atlas_init/tf/modules/cloud_provider/provider.tf +1 -1
  57. atlas_init/tf/modules/cluster/cluster.tf +34 -24
  58. atlas_init/tf/modules/cluster/provider.tf +1 -1
  59. atlas_init/tf/modules/federated_vars/federated_vars.tf +3 -0
  60. atlas_init/tf/modules/federated_vars/provider.tf +1 -1
  61. atlas_init/tf/modules/project_extra/project_extra.tf +15 -1
  62. atlas_init/tf/modules/stream_instance/stream_instance.tf +1 -1
  63. atlas_init/tf/modules/vpc_peering/vpc_peering.tf +1 -1
  64. atlas_init/tf/modules/vpc_privatelink/versions.tf +1 -1
  65. atlas_init/tf/outputs.tf +11 -3
  66. atlas_init/tf/providers.tf +2 -1
  67. atlas_init/tf/variables.tf +17 -0
  68. atlas_init/typer_app.py +76 -0
  69. {atlas_init-0.1.0.dist-info → atlas_init-0.1.4.dist-info}/METADATA +58 -21
  70. atlas_init-0.1.4.dist-info/RECORD +91 -0
  71. {atlas_init-0.1.0.dist-info → atlas_init-0.1.4.dist-info}/WHEEL +1 -1
  72. atlas_init-0.1.0.dist-info/RECORD +0 -61
  73. /atlas_init/tf/modules/aws_vpc/{aws-vpc.tf → aws_vpc.tf} +0 -0
  74. {atlas_init-0.1.0.dist-info → atlas_init-0.1.4.dist-info}/entry_points.txt +0 -0
@@ -2,8 +2,6 @@ import logging
2
2
  import os
3
3
  from pathlib import Path
4
4
 
5
- import dotenv
6
-
7
5
  from atlas_init.cli_helper.run import run_command_is_ok
8
6
  from atlas_init.settings.config import TestSuite
9
7
  from atlas_init.settings.env_vars import AtlasInitSettings
@@ -18,7 +16,12 @@ def run_go_tests(
18
16
  settings: AtlasInitSettings,
19
17
  groups: list[TestSuite],
20
18
  ):
21
- test_env = os.environ | dotenv.dotenv_values(settings.env_vars_vs_code)
19
+ extra_vars = settings.load_env_vars(settings.env_vars_vs_code)
20
+ logger.info(f"go test env-vars-extra: {sorted(extra_vars)}")
21
+ test_env = os.environ | extra_vars
22
+ ci_value = test_env.pop("CI", None)
23
+ if ci_value:
24
+ logger.warning(f"pooped CI={ci_value}")
22
25
  for group in groups:
23
26
  packages = ",".join(f"{package_prefix}/{pkg}" for pkg in group.repo_go_packages.get(repo_alias, []))
24
27
  if not packages:
@@ -13,7 +13,11 @@ StrT = TypeVar("StrT", bound=str)
13
13
 
14
14
 
15
15
  def run_command_is_ok(
16
- cmd: list[StrT], env: dict | None, cwd: Path | str, logger: Logger, output: None | IO = None
16
+ cmd: list[StrT],
17
+ env: dict | None,
18
+ cwd: Path | str,
19
+ logger: Logger,
20
+ output: IO | None = None,
17
21
  ) -> bool:
18
22
  env = env or {**os.environ}
19
23
  command_str = " ".join(cmd)
@@ -73,13 +77,25 @@ def run_command_exit_on_failure(
73
77
  raise typer.Exit(1)
74
78
 
75
79
 
76
- def run_command_receive_result(command: str, cwd: Path, logger: Logger, env: dict | None = None) -> str:
80
+ def run_command_receive_result(
81
+ command: str, cwd: Path, logger: Logger, env: dict | None = None, *, can_fail: bool = False
82
+ ) -> str:
77
83
  with TemporaryDirectory() as temp_dir:
78
84
  result_file = Path(temp_dir) / "file"
79
85
  with open(result_file, "w") as file:
80
86
  is_ok = run_command_is_ok(command.split(), env=env, cwd=cwd, logger=logger, output=file)
81
87
  output_text = result_file.read_text().strip()
82
88
  if not is_ok:
89
+ if can_fail:
90
+ logger.warning(f"command failed {command}, {output_text}")
91
+ return f"FAIL: {output_text}"
83
92
  logger.critical(f"command failed {command}, {output_text}")
84
93
  raise typer.Exit(1)
85
94
  return output_text
95
+
96
+
97
+ def add_to_clipboard(clipboard_content: str, logger: Logger):
98
+ if pb_binary := find_binary_on_path("pbcopy", logger, allow_missing=True):
99
+ subprocess.run(pb_binary, text=True, input=clipboard_content, check=True)
100
+ else:
101
+ logger.warning("pbcopy not found on $PATH")
@@ -1,6 +1,5 @@
1
1
  import logging
2
2
  import os
3
- import subprocess
4
3
  from dataclasses import dataclass
5
4
  from pathlib import Path
6
5
  from typing import Any
@@ -9,7 +8,7 @@ from model_lib import dump
9
8
  from zero_3rdparty.file_utils import copy, iter_paths_and_relative
10
9
 
11
10
  from atlas_init.cli_helper.run import (
12
- find_binary_on_path,
11
+ add_to_clipboard,
13
12
  run_command_is_ok,
14
13
  run_command_receive_result,
15
14
  )
@@ -19,12 +18,14 @@ from atlas_init.settings.env_vars import AtlasInitSettings
19
18
  logger = logging.getLogger(__name__)
20
19
 
21
20
 
22
- def get_tf_vars(settings: AtlasInitSettings, active_groups: list[TestSuite]) -> dict[str, Any]:
23
- tf_vars = TerraformVars()
21
+ def get_tf_vars(settings: AtlasInitSettings, active_groups: list[TestSuite]) -> dict[str, Any]: # type: ignore
22
+ tf_vars = TerraformVars() # type: ignore
24
23
  tf_vars = sum((group.vars for group in active_groups), start=tf_vars)
25
24
  return {
26
25
  "atlas_public_key": settings.MONGODB_ATLAS_PUBLIC_KEY,
27
26
  "atlas_private_key": settings.MONGODB_ATLAS_PRIVATE_KEY,
27
+ "atlas_base_url": settings.MONGODB_ATLAS_BASE_URL,
28
+ "is_mongodbgov_cloud": settings.is_mongodbgov_cloud,
28
29
  "org_id": settings.MONGODB_ATLAS_ORG_ID,
29
30
  "aws_region": settings.AWS_REGION,
30
31
  "project_name": settings.project_name,
@@ -41,28 +42,22 @@ class TerraformRunError(Exception):
41
42
 
42
43
  @dataclass
43
44
  class state_copier: # noqa: N801
44
- profile_path: Path
45
+ state_path: Path
45
46
  tf_path: Path
46
47
 
47
- @property
48
- def state_path(self) -> Path:
49
- return self.profile_path / "tf_state"
50
-
51
48
  def __enter__(self):
52
- if not self.state_path.exists():
53
- return
49
+ self.state_path.mkdir(exist_ok=True, parents=True)
54
50
  for state_path, rel_path in iter_paths_and_relative(self.state_path, "terraform.tfstate*", rglob=False):
55
51
  copy(state_path, self.tf_path / rel_path)
56
52
 
57
53
  def __exit__(self, *_):
58
- if not self.tf_path.exists():
59
- return
54
+ self.tf_path.mkdir(exist_ok=True, parents=True)
60
55
  for state_path, rel_path in iter_paths_and_relative(self.tf_path, "terraform.tfstate*", rglob=False):
61
56
  state_path.rename(self.state_path / rel_path)
62
57
 
63
58
 
64
59
  def run_terraform(settings: AtlasInitSettings, command: str, extra_args: list[str]):
65
- with state_copier(settings.profile_dir, settings.tf_path):
60
+ with state_copier(settings.tf_state_path, settings.tf_path):
66
61
  _run_terraform(settings, command, extra_args)
67
62
 
68
63
 
@@ -87,10 +82,7 @@ def _run_terraform(settings: AtlasInitSettings, command: str, extra_args: list[s
87
82
  env_generated = settings.env_vars_generated
88
83
  if env_generated.exists():
89
84
  clipboard_content = "\n".join(f"export {line}" for line in env_generated.read_text().splitlines())
90
- pb_binary = find_binary_on_path("pbcopy", logger, allow_missing=True)
91
- if not pb_binary:
92
- return
93
- subprocess.run(pb_binary, text=True, input=clipboard_content, check=True)
85
+ add_to_clipboard(clipboard_content, logger)
94
86
  logger.info("loaded env-vars to clipboard ✅")
95
87
 
96
88
 
@@ -103,12 +95,11 @@ def dump_tf_vars(settings: AtlasInitSettings, tf_vars: dict[str, Any]):
103
95
 
104
96
 
105
97
  def export_outputs(settings: AtlasInitSettings) -> None:
106
- output_path = settings.profile_dir / "tf_outputs.json"
107
- with state_copier(settings.profile_dir, settings.tf_path):
98
+ with state_copier(settings.tf_state_path, settings.tf_path):
108
99
  result = run_command_receive_result(
109
100
  "terraform output -json",
110
101
  settings.tf_path,
111
102
  logger,
112
103
  env=os.environ | {"TF_DATA_DIR": settings.tf_data_dir},
113
104
  )
114
- output_path.write_text(result)
105
+ settings.tf_outputs_path.write_text(result)
File without changes
@@ -0,0 +1,153 @@
1
+ import logging
2
+
3
+ import requests
4
+ from zero_3rdparty.id_creator import simple_id
5
+
6
+ from atlas_init.settings.env_vars import init_settings
7
+ from atlas_init.settings.path import dump_dotenv, dump_vscode_dotenv
8
+ from atlas_init.typer_app import app_command
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+
13
+ @app_command()
14
+ def trigger_app():
15
+ settings = init_settings()
16
+ login_req = {
17
+ "username": settings.MONGODB_ATLAS_PUBLIC_KEY,
18
+ "apiKey": settings.MONGODB_ATLAS_PRIVATE_KEY,
19
+ }
20
+ response = requests.post(
21
+ "https://realm-dev.mongodb.com/api/admin/v3.0/auth/providers/mongodb-cloud/login",
22
+ json=login_req,
23
+ headers={"Accept": "application/json", "Content-Type": "application/json"},
24
+ timeout=10,
25
+ )
26
+ response.raise_for_status()
27
+ token_response = response.json()
28
+ access_token = token_response["access_token"]
29
+ logger.info(f"token: {access_token}")
30
+ auth_headers = {"Authorization": f"Bearer {access_token}"}
31
+ env_vars = settings.load_env_vars_generated()
32
+ project_id = env_vars["MONGODB_ATLAS_PROJECT_ID"]
33
+ cluster_name = env_vars["MONGODB_ATLAS_CLUSTER_NAME"]
34
+ apps = list_apps(auth_headers, project_id)
35
+ if apps:
36
+ logger.info(f"got apps: {apps}")
37
+ app_id = apps[0]["_id"]
38
+ else:
39
+ logger.info("no apps found, creating one")
40
+ app = create_app(auth_headers, project_id, cluster_name)
41
+ logger.info(f"created app: {app}")
42
+ app_id = app["_id"]
43
+ logger.info(f"using app_id: {app_id}")
44
+ suffix = simple_id(length=5)
45
+ service = create_service(auth_headers, project_id, cluster_name, app_id, suffix)
46
+ logger.info(f"new service: {service}")
47
+ service_id = service["_id"]
48
+ logger.info(f"using service_id: {service_id}")
49
+ func_response = create_function(auth_headers, project_id, app_id, suffix)
50
+ logger.info(f"new function: {func_response}")
51
+ func_id = func_response["_id"]
52
+ func_name = func_response["name"]
53
+ logger.info(f"using func_id: {func_id}")
54
+ extra_env_vars = {
55
+ "MONGODB_REALM_APP_ID": app_id,
56
+ "MONGODB_REALM_SERVICE_ID": service_id,
57
+ "MONGODB_REALM_FUNCTION_ID": func_id,
58
+ "MONGODB_REALM_FUNCTION_NAME": func_name,
59
+ "MONGODB_REALM_BASE_URL": "https://realm-dev.mongodb.com/",
60
+ }
61
+ dump_dotenv(settings.env_vars_trigger, extra_env_vars)
62
+ logger.info(f"done {settings.env_vars_trigger} created with trigger env-vars ✅")
63
+
64
+ generated_env_vars = settings.load_env_vars_generated()
65
+ generated_env_vars.update(extra_env_vars)
66
+ dump_dotenv(settings.env_vars_generated, generated_env_vars)
67
+ logger.info(f"done {settings.env_vars_generated} updated with trigger env-vars ✅")
68
+
69
+ dump_vscode_dotenv(settings.env_vars_generated, settings.env_vars_vs_code, **extra_env_vars)
70
+ logger.info(f"done {settings.env_vars_vs_code} updated with trigger env-vars ✅")
71
+
72
+
73
+ def list_apps(auth_headers: dict[str, str], project_id: str) -> list[dict]:
74
+ existing_apps_response = requests.get(
75
+ f"https://realm-dev.mongodb.com/api/admin/v3.0/groups/{project_id}/apps",
76
+ headers=auth_headers,
77
+ timeout=10,
78
+ )
79
+ existing_apps_response.raise_for_status()
80
+ apps = existing_apps_response.json()
81
+ assert isinstance(apps, list), f"expected list, got: {apps!r}"
82
+ return apps
83
+
84
+
85
+ def create_app(auth_headers: dict[str, str], project_id: str, cluster_name: str) -> dict:
86
+ create_app_req = {
87
+ "name": "atlas-init-app",
88
+ "location": "US-VA",
89
+ "deployment_model": "GLOBAL",
90
+ "environment": "production",
91
+ "provider_region": "aws-us-east-1",
92
+ "data_source": {
93
+ "name": "mongodb-atlas",
94
+ "type": "mongodb-atlas",
95
+ "config": {
96
+ "clusterName": cluster_name,
97
+ "readPreference": "primary",
98
+ "wireProtocolEnabled": True,
99
+ },
100
+ },
101
+ }
102
+ create_app_response = requests.post(
103
+ f"https://realm-dev.mongodb.com/api/admin/v3.0/groups/{project_id}/apps",
104
+ json=create_app_req,
105
+ headers=auth_headers,
106
+ timeout=10,
107
+ )
108
+ create_app_response.raise_for_status()
109
+ app = create_app_response.json()
110
+ assert isinstance(app, dict), f"expected dict, got: {app!r}"
111
+ return app
112
+
113
+
114
+ def create_service(auth_headers: dict[str, str], project_id: str, cluster_name: str, app_id: str, suffix: str) -> dict:
115
+ create_service_req = {
116
+ "name": f"atlas-init-{suffix}",
117
+ "type": "mongodb-atlas",
118
+ "config": {
119
+ "clusterName": cluster_name,
120
+ "readPreference": "primary",
121
+ "wireProtocolEnabled": True,
122
+ },
123
+ }
124
+ create_service_response = requests.post(
125
+ f"https://realm-dev.mongodb.com/api/admin/v3.0/groups/{project_id}/apps/{app_id}/services",
126
+ json=create_service_req,
127
+ headers=auth_headers,
128
+ timeout=10,
129
+ )
130
+ create_service_response.raise_for_status()
131
+ service = create_service_response.json()
132
+ assert isinstance(service, dict), f"expected dict, got: {service}"
133
+ return service
134
+
135
+
136
+ def create_function(auth_headers: dict[str, str], project_id: str, app_id: str, suffix: str) -> dict:
137
+ create_func_req = {
138
+ "can_evaluate": {},
139
+ "name": f"testfunc-{suffix}",
140
+ "private": True,
141
+ "source": 'exports = function(changeEvent) {console.log("New Document Inserted")};',
142
+ "run_as_system": True,
143
+ }
144
+ create_func_response = requests.post(
145
+ f"https://realm-dev.mongodb.com/api/admin/v3.0/groups/{project_id}/apps/{app_id}/functions",
146
+ json=create_func_req,
147
+ headers=auth_headers,
148
+ timeout=10,
149
+ )
150
+ create_func_response.raise_for_status()
151
+ func = create_func_response.json()
152
+ assert isinstance(func, dict), f"expected dict, got: {func}"
153
+ return func
atlas_init/cli_tf/app.py CHANGED
@@ -1,10 +1,32 @@
1
1
  import logging
2
+ import os
2
3
  import sys
4
+ from collections import defaultdict
5
+ from datetime import timedelta
6
+ from pathlib import Path
3
7
 
4
8
  import typer
9
+ from zero_3rdparty.datetime_utils import utc_now
5
10
  from zero_3rdparty.file_utils import clean_dir
6
11
 
7
- from atlas_init.cli_helper.run import run_binary_command_is_ok
12
+ from atlas_init.cli_helper.run import (
13
+ add_to_clipboard,
14
+ run_binary_command_is_ok,
15
+ run_command_exit_on_failure,
16
+ run_command_receive_result,
17
+ )
18
+ from atlas_init.cli_tf.changelog import convert_to_changelog
19
+ from atlas_init.cli_tf.github_logs import (
20
+ GH_TOKEN_ENV_NAME,
21
+ find_test_runs,
22
+ include_filestems,
23
+ include_test_jobs,
24
+ )
25
+ from atlas_init.cli_tf.go_test_run import GoTestRun
26
+ from atlas_init.cli_tf.go_test_summary import (
27
+ create_detailed_summary,
28
+ create_short_summary,
29
+ )
8
30
  from atlas_init.cli_tf.schema import (
9
31
  download_admin_api,
10
32
  dump_generator_config,
@@ -12,15 +34,24 @@ from atlas_init.cli_tf.schema import (
12
34
  update_provider_code_spec,
13
35
  )
14
36
  from atlas_init.cli_tf.schema_inspection import log_optional_only
37
+ from atlas_init.cli_tf.schema_v2 import (
38
+ generate_resource_go_resource_schema,
39
+ parse_schema,
40
+ )
41
+ from atlas_init.cli_tf.schema_v2_api_parsing import add_api_spec_info
42
+ from atlas_init.cli_tf.schema_v2_sdk import generate_model_go, parse_sdk_model
15
43
  from atlas_init.repos.path import Repo, current_repo_path
16
44
  from atlas_init.settings.env_vars import init_settings
45
+ from atlas_init.settings.interactive import confirm
17
46
 
18
47
  app = typer.Typer(no_args_is_help=True)
19
48
  logger = logging.getLogger(__name__)
20
49
 
21
50
 
22
51
  @app.command()
23
- def schema():
52
+ def schema(
53
+ branch: str = typer.Option("main", "-b", "--branch"),
54
+ ):
24
55
  settings = init_settings()
25
56
  schema_out_path = settings.schema_out_path_computed
26
57
  schema_out_path.mkdir(exist_ok=True)
@@ -31,10 +62,11 @@ def schema():
31
62
  generator_config_path.write_text(generator_config)
32
63
  provider_code_spec_path = schema_out_path / "provider-code-spec.json"
33
64
  admin_api_path = schema_out_path / "admin_api.yaml"
65
+
34
66
  if admin_api_path.exists():
35
67
  logger.warning(f"using existing admin api @ {admin_api_path}")
36
68
  else:
37
- download_admin_api(admin_api_path)
69
+ download_admin_api(admin_api_path, branch=branch)
38
70
 
39
71
  if not run_binary_command_is_ok(
40
72
  cwd=schema_out_path,
@@ -56,7 +88,7 @@ def schema():
56
88
  if not run_binary_command_is_ok(
57
89
  cwd=schema_out_path,
58
90
  binary_name="tfplugingen-framework",
59
- command=f"generate resources --input ./{provider_code_spec_path.name} --output {go_code_output.name}",
91
+ command=f"generate all --input ./{provider_code_spec_path.name} --output {go_code_output.name}",
60
92
  logger=logger,
61
93
  ):
62
94
  logger.critical("failed to generate plugin schema")
@@ -71,3 +103,178 @@ def schema():
71
103
  def schema_optional_only():
72
104
  repo_path = current_repo_path(Repo.TF)
73
105
  log_optional_only(repo_path)
106
+
107
+
108
+ @app.command()
109
+ def changelog(
110
+ pr: str = typer.Argument("", help="the PR number, will read the file in .changelog/$pr_input.txt"),
111
+ delete_input: bool = typer.Option(False, "-d", "--delete-input"),
112
+ ):
113
+ repo_path = current_repo_path(Repo.TF)
114
+ changelog_input_path = repo_path / f".changelog/{pr}_input.txt"
115
+ if not changelog_input_path.exists():
116
+ logger.critical(f"no file @ {changelog_input_path}")
117
+ raise typer.Abort
118
+ changes_in = changelog_input_path.read_text()
119
+ logger.info(f"will generate changelog to {changelog_input_path} based on changes:\n{changes_in}")
120
+ changes_out = convert_to_changelog(changes_in)
121
+ changelog_path = repo_path / f".changelog/{pr}.txt"
122
+ changelog_path.write_text(changes_out)
123
+ logger.info(f"updated file ✅ \n{changes_in}\n--> TO:\n{changes_out} ")
124
+ if delete_input:
125
+ logger.warning(f"deleting input file {changelog_input_path}")
126
+ changelog_input_path.unlink()
127
+
128
+
129
+ @app.command()
130
+ def example_gen(
131
+ in_path: Path = typer.Argument(..., help="Path to the latest code"),
132
+ out_path: Path = typer.Argument("", help="Output path (empty will use input path)"),
133
+ ):
134
+ out_path = out_path or in_path # type: ignore
135
+ assert in_path.is_dir(), f"path not found: {in_path}"
136
+ assert out_path.is_dir(), f"path not found: {out_path}"
137
+ run_command_exit_on_failure("terraform fmt", cwd=in_path, logger=logger)
138
+ if in_path == out_path:
139
+ logger.warning(f"will overwrite/change files in {out_path}")
140
+ else:
141
+ logger.info(f"will use from {in_path} -> {out_path}")
142
+ from zero_3rdparty import file_utils
143
+
144
+ for path, rel_path in file_utils.iter_paths_and_relative(in_path, "*.tf", "*.sh", "*.py", "*.md", rglob=False):
145
+ dest_path = out_path / rel_path
146
+ file_utils.copy(path, dest_path, clean_dest=False)
147
+
148
+
149
+ @app.command()
150
+ def ci_tests(
151
+ test_group_name: str = typer.Option("", "-g"),
152
+ max_days_ago: int = typer.Option(1, "-d", "--days"),
153
+ branch: str = typer.Option("master", "-b", "--branch"),
154
+ workflow_file_stems: str = typer.Option("test-suite,terraform-compatibility-matrix", "-w", "--workflow"),
155
+ only_last_workflow: bool = typer.Option(False, "-l", "--last"),
156
+ names: str = typer.Option(
157
+ "",
158
+ "-n",
159
+ "--test-names",
160
+ help="comma separated list of test names to filter, e.g., TestAccCloudProviderAccessAuthorizationAzure_basic,TestAccBackupSnapshotExportBucket_basicAzure",
161
+ ),
162
+ summary_name: str = typer.Option(
163
+ "",
164
+ "-s",
165
+ "--summary",
166
+ help="the name of the summary directory to store detailed test results",
167
+ ),
168
+ ): # sourcery skip: use-named-expression
169
+ names_set: set[str] = set()
170
+ if names:
171
+ names_set.update(names.split(","))
172
+ logger.info(f"filtering tests by names: {names_set}")
173
+ repo_path = current_repo_path(Repo.TF)
174
+ token = run_command_receive_result("gh auth token", cwd=repo_path, logger=logger)
175
+ os.environ[GH_TOKEN_ENV_NAME] = token
176
+ end_test_date = utc_now()
177
+ start_test_date = end_test_date - timedelta(days=max_days_ago)
178
+ job_runs = find_test_runs(
179
+ start_test_date,
180
+ include_job=include_test_jobs(test_group_name),
181
+ branch=branch,
182
+ include_workflow=include_filestems(set(workflow_file_stems.split(","))),
183
+ )
184
+ test_results: dict[str, list[GoTestRun]] = defaultdict(list)
185
+ workflow_ids = set()
186
+ for key in sorted(job_runs.keys(), reverse=True):
187
+ workflow_id, job_id = key
188
+ workflow_ids.add(workflow_id)
189
+ if only_last_workflow and len(workflow_ids) > 1:
190
+ logger.info("only showing last workflow")
191
+ break
192
+ runs = job_runs[key]
193
+ if not runs:
194
+ logger.warning(f"no go tests for job_id={job_id}")
195
+ continue
196
+ for run in runs:
197
+ test_name = run.name
198
+ if names_set and test_name not in names_set:
199
+ continue
200
+ test_results[test_name].append(run)
201
+
202
+ if summary_name:
203
+ summary = create_detailed_summary(summary_name, end_test_date, start_test_date, test_results, names_set)
204
+ else:
205
+ failing_names = [name for name, name_runs in test_results.items() if all(run.is_failure for run in name_runs)]
206
+ if not failing_names:
207
+ logger.info("ALL TESTS PASSED! ✅")
208
+ return
209
+ summary = create_short_summary(test_results, failing_names)
210
+ summary_str = "\n".join(summary)
211
+ add_to_clipboard(summary_str, logger)
212
+ logger.info(summary_str)
213
+
214
+
215
+ @app.command()
216
+ def schema2(
217
+ resource: str = typer.Argument(
218
+ "",
219
+ help="the resource name to generate the schema for. Must exist in the schema. E.g., 'stream_processor'",
220
+ ),
221
+ branch: str = typer.Option("main", "-b", "--branch", help="the branch for downloading openapi spec"),
222
+ admin_api_path: Path = typer.Option(
223
+ "", "-a", "--admin-api-path", help="the path to store/download the openapi spec"
224
+ ),
225
+ config_path: Path = typer.Option("", "-c", "--config", help="the path to the SchemaV2 config"),
226
+ replace: bool = typer.Option(False, "-r", "--replace", help="replace the existing schema file"),
227
+ sdk_repo_path_str: str = typer.Option("", "-sdk", "--sdk-repo-path", help="the path to the sdk repo"),
228
+ ):
229
+ repo_path = current_repo_path(Repo.TF)
230
+ config_path = config_path or repo_path / "schema_v2.yaml"
231
+ admin_api_path = admin_api_path or repo_path / "admin_api.yaml"
232
+ if admin_api_path.exists():
233
+ logger.info(f"using existing admin api @ {admin_api_path}")
234
+ else:
235
+ download_admin_api(admin_api_path, branch=branch)
236
+ schema = parse_schema(config_path)
237
+ logger.info("adding api spec info to schema")
238
+ add_api_spec_info(schema, admin_api_path, minimal_refs=True)
239
+ go_old = repo_path / f"internal/service/{resource.replace('_', '')}/resource_schema.go"
240
+ if not go_old.exists():
241
+ if confirm(
242
+ f"no file found @ {go_old}, ok to create it?",
243
+ is_interactive=True,
244
+ default=True,
245
+ ):
246
+ go_old.parent.mkdir(exist_ok=True, parents=True)
247
+ else:
248
+ logger.critical(f"no file found @ {go_old}")
249
+ raise typer.Abort
250
+ if replace:
251
+ logger.warning(f"replacing existing schema @ {go_old}")
252
+ go_new = go_old
253
+ else:
254
+ go_new = go_old.with_name("resource_schema_gen.go")
255
+ gen_src = generate_resource_go_resource_schema(schema, resource)
256
+ go_new.write_text(gen_src)
257
+ logger.info(f"generated new schema @ {go_new} ✅")
258
+
259
+ resource_schema = schema.resources[resource]
260
+ if conversion_config := resource_schema.conversion:
261
+ if not confirm(
262
+ f"resource {resource} has conversion, ok to generate conversion functions?",
263
+ is_interactive=True,
264
+ default=True,
265
+ ):
266
+ logger.info("skipping conversion functions")
267
+ return
268
+ logger.info("generating conversion functions")
269
+ if not sdk_repo_path_str:
270
+ logger.critical("must provide sdk repo path when generating conversion functions")
271
+ raise typer.Abort
272
+ sdk_repo_path = Path(sdk_repo_path_str)
273
+ if not sdk_repo_path.exists():
274
+ logger.critical(f"no sdk repo found @ {sdk_repo_path}")
275
+ raise typer.Abort
276
+ for sdk_start_ref in conversion_config.sdk_start_refs:
277
+ sdk_model = parse_sdk_model(sdk_repo_path, sdk_start_ref.name)
278
+ go_conversion_src = generate_model_go(schema, resource_schema, sdk_model)
279
+ go_conversion_path = go_old.with_name("model.go")
280
+ go_conversion_path.write_text(go_conversion_src)
@@ -0,0 +1,103 @@
1
+ import logging
2
+ import re
3
+ from typing import Annotated, Literal
4
+
5
+ import typer
6
+ from model_lib import Entity
7
+ from pydantic import BeforeValidator, model_validator
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ type ChangelogGroup = Literal["rs", "ds", "dsp"]
13
+ CHANGELOG_GROUPS = {"rs", "ds", "dsp"}
14
+ _group_prefixes = {
15
+ "rs": "resource/",
16
+ "ds": "data-source/",
17
+ "dsp": "data-source/",
18
+ }
19
+ _group_suffixes = {
20
+ "rs": "",
21
+ "ds": "",
22
+ "dsp": "s",
23
+ }
24
+ type ChangelogType = Literal["feat", "fix"]
25
+ CHANGELOG_SEPARATOR = "\n\n"
26
+ _types_headers = {"feat": "enhancement", "fix": "bug"}
27
+ _line_regex = re.compile(r"(?P<type>feat|fix)\((?P<groups>[^\]]+)\)\:\s(?P<text>.*)")
28
+
29
+
30
+ def as_group_name(resource_name: str, group: ChangelogGroup):
31
+ prefix = _group_prefixes[group]
32
+ suffix = _group_suffixes[group]
33
+ return f"{prefix}{resource_name}{suffix}"
34
+
35
+
36
+ def changelog_header(group: ChangelogType) -> str:
37
+ return f"release-note:{_types_headers[group]}"
38
+
39
+
40
+ def split_on_plus(groups: str | list[str]) -> list[str]:
41
+ return groups.split("+") if isinstance(groups, str) else groups
42
+
43
+
44
+ type GroupsValidation = Annotated[str, BeforeValidator(split_on_plus)]
45
+
46
+
47
+ class ChangelogPart(Entity):
48
+ type: ChangelogType
49
+ groups: str
50
+ text: str
51
+ resource_name: str
52
+
53
+ @property
54
+ def parsed_groups(self) -> list[ChangelogGroup]:
55
+ groups = set(self.groups.split("+"))
56
+ if invalid_groups := groups - CHANGELOG_GROUPS:
57
+ raise ValueError(f"found invalid groups {invalid_groups}, only {CHANGELOG_GROUPS} are valid")
58
+ return sorted(groups) # type: ignore
59
+
60
+ @model_validator(mode="after")
61
+ def ensure_parsed_groups(self):
62
+ assert self.parsed_groups
63
+ return self
64
+
65
+ def as_changelog(self) -> str:
66
+ parts = []
67
+ for group in self.parsed_groups:
68
+ header = changelog_header(self.type)
69
+ name = as_group_name(self.resource_name, group)
70
+ parts.append(f"```{header}\n{name}: {self.text}\n```")
71
+ return CHANGELOG_SEPARATOR.join(parts)
72
+
73
+ def __str__(self) -> str:
74
+ return self.as_changelog()
75
+
76
+
77
+ def _convert_to_changelog(changes: str) -> str:
78
+ if not changes.startswith("rs="):
79
+ err = "missing rs=YOUR_RESOURCE_NAME e.g., mongodbatlas_federated_settings_org_config"
80
+ raise ValueError(err)
81
+ header, *change_lines = changes.splitlines()
82
+ rs = header.removeprefix("rs=").strip()
83
+ changelog = []
84
+ for change in change_lines:
85
+ if changelog_part := as_changelog_parts(rs, change):
86
+ changelog.append(changelog_part)
87
+ logger.info(f"found a total of {len(changelog)} changes")
88
+ return CHANGELOG_SEPARATOR.join(str(part) for part in changelog)
89
+
90
+
91
+ def as_changelog_parts(resource_name: str, line_raw: str) -> ChangelogPart | None:
92
+ if match := _line_regex.search(line_raw):
93
+ return ChangelogPart(**match.groupdict(), resource_name=resource_name) # type: ignore
94
+ logger.warning(f"unable to parse line: {line_raw}")
95
+ return None
96
+
97
+
98
+ def convert_to_changelog(changes: str) -> str:
99
+ try:
100
+ return _convert_to_changelog(changes)
101
+ except ValueError as e:
102
+ logger.critical(str(e))
103
+ raise typer.Abort from e