atlas-init 0.4.1__py3-none-any.whl → 0.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. atlas_init/__init__.py +1 -1
  2. atlas_init/atlas_init.yaml +3 -0
  3. atlas_init/cli_helper/go.py +103 -57
  4. atlas_init/cli_root/go_test.py +13 -10
  5. atlas_init/cli_tf/app.py +4 -0
  6. atlas_init/cli_tf/example_update.py +142 -0
  7. atlas_init/cli_tf/example_update_test/test_update_example.tf +23 -0
  8. atlas_init/cli_tf/example_update_test.py +96 -0
  9. atlas_init/cli_tf/github_logs.py +4 -1
  10. atlas_init/cli_tf/go_test_run.py +23 -0
  11. atlas_init/cli_tf/go_test_summary.py +7 -1
  12. atlas_init/cli_tf/hcl/modifier.py +144 -0
  13. atlas_init/cli_tf/hcl/modifier_test/test_process_variables_output_.tf +25 -0
  14. atlas_init/cli_tf/hcl/modifier_test/test_process_variables_variable_.tf +24 -0
  15. atlas_init/cli_tf/hcl/modifier_test.py +95 -0
  16. atlas_init/cli_tf/log_clean.py +29 -0
  17. atlas_init/repos/path.py +14 -0
  18. atlas_init/settings/config.py +24 -13
  19. atlas_init/settings/rich_utils.py +1 -1
  20. atlas_init/tf/.terraform.lock.hcl +16 -16
  21. atlas_init/tf/main.tf +25 -1
  22. atlas_init/tf/modules/aws_kms/aws_kms.tf +100 -0
  23. atlas_init/tf/modules/aws_kms/provider.tf +7 -0
  24. atlas_init/tf/modules/cloud_provider/cloud_provider.tf +8 -1
  25. atlas_init/tf/modules/encryption_at_rest/main.tf +29 -0
  26. atlas_init/tf/modules/encryption_at_rest/provider.tf +9 -0
  27. atlas_init/tf/variables.tf +5 -0
  28. {atlas_init-0.4.1.dist-info → atlas_init-0.4.2.dist-info}/METADATA +8 -6
  29. {atlas_init-0.4.1.dist-info → atlas_init-0.4.2.dist-info}/RECORD +31 -19
  30. {atlas_init-0.4.1.dist-info → atlas_init-0.4.2.dist-info}/WHEEL +0 -0
  31. {atlas_init-0.4.1.dist-info → atlas_init-0.4.2.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,144 @@
1
+ import logging
2
+ from collections import defaultdict
3
+ from copy import deepcopy
4
+ from pathlib import Path
5
+
6
+ import hcl2
7
+ from lark import Token, Tree, UnexpectedToken
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+ BLOCK_TYPE_VARIABLE = "variable"
12
+ BLOCK_TYPE_OUTPUT = "output"
13
+
14
+
15
+ def process_token(node: Token, indent=0):
16
+ logger.debug(f"[{indent}] (token)\t|", " " * indent, node.type, node.value)
17
+ return deepcopy(node)
18
+
19
+
20
+ def is_identifier_block_type(tree: Tree | Token, block_type: str) -> bool:
21
+ if not isinstance(tree, Tree):
22
+ return False
23
+ try:
24
+ return tree.children[0].value == block_type # type: ignore
25
+ except (IndexError, AttributeError):
26
+ return False
27
+
28
+
29
+ def is_block_type(tree: Tree, block_type: str) -> bool:
30
+ try:
31
+ return tree.data == "block" and is_identifier_block_type(tree.children[0], block_type)
32
+ except (IndexError, AttributeError):
33
+ return False
34
+
35
+
36
+ def update_description(tree: Tree, new_descriptions: dict[str, str], existing_names: dict[str, list[str]]) -> Tree:
37
+ new_children = tree.children.copy()
38
+ variable_body = new_children[2]
39
+ assert variable_body.data == "body"
40
+ name = token_name(new_children[1])
41
+ old_description = read_description_attribute(variable_body)
42
+ existing_names[name].append(old_description)
43
+ new_description = new_descriptions.get(name, "")
44
+ if not new_description:
45
+ logger.debug(f"no description found for variable {name}")
46
+ return tree
47
+ new_children[2] = update_body_with_description(variable_body, new_description)
48
+ return Tree(tree.data, new_children)
49
+
50
+
51
+ def token_name(token: Token | Tree) -> str:
52
+ if isinstance(token, Token):
53
+ return token.value.strip('"')
54
+ err_msg = f"unexpected token type {type(token)} for token name"
55
+ raise ValueError(err_msg)
56
+
57
+
58
+ def has_attribute_description(maybe_attribute: Token | Tree) -> bool:
59
+ if not isinstance(maybe_attribute, Tree):
60
+ return False
61
+ return maybe_attribute.data == "attribute" and maybe_attribute.children[0].children[0].value == "description" # type: ignore
62
+
63
+
64
+ def update_body_with_description(tree: Tree, new_description: str) -> Tree:
65
+ new_description = new_description.replace('"', '\\"')
66
+ new_children = tree.children.copy()
67
+ found_description = False
68
+ for i, maybe_attribute in enumerate(new_children):
69
+ if has_attribute_description(maybe_attribute):
70
+ found_description = True
71
+ new_children[i] = create_description_attribute(new_description)
72
+ if not found_description:
73
+ new_children.insert(0, new_line())
74
+ new_children.insert(1, create_description_attribute(new_description))
75
+ return Tree(tree.data, new_children)
76
+
77
+
78
+ def new_line() -> Tree:
79
+ return Tree(
80
+ Token("RULE", "new_line_or_comment"),
81
+ [Token("NL_OR_COMMENT", "\n ")],
82
+ )
83
+
84
+
85
+ def read_description_attribute(tree: Tree) -> str:
86
+ return next(
87
+ (
88
+ token_name(maybe_attribute.children[-1].children[0])
89
+ for maybe_attribute in tree.children
90
+ if has_attribute_description(maybe_attribute)
91
+ ),
92
+ "",
93
+ )
94
+
95
+
96
+ def create_description_attribute(description_value: str) -> Tree:
97
+ children = [
98
+ Tree(Token("RULE", "identifier"), [Token("NAME", "description")]),
99
+ Token("EQ", " ="),
100
+ Tree(Token("RULE", "expr_term"), [Token("STRING_LIT", f'"{description_value}"')]),
101
+ ]
102
+ return Tree(Token("RULE", "attribute"), children)
103
+
104
+
105
+ def process_descriptions(
106
+ node: Tree,
107
+ name_updates: dict[str, str],
108
+ existing_names: dict[str, list[str]],
109
+ depth=0,
110
+ *,
111
+ block_type: str,
112
+ ) -> Tree:
113
+ new_children = []
114
+ logger.debug(f"[{depth}] (tree)\t|", " " * depth, node.data)
115
+ for child in node.children:
116
+ if isinstance(child, Tree):
117
+ if is_block_type(child, block_type):
118
+ child = update_description( # noqa: PLW2901
119
+ child, name_updates, existing_names
120
+ )
121
+ new_children.append(
122
+ process_descriptions(child, name_updates, existing_names, depth + 1, block_type=block_type)
123
+ )
124
+ else:
125
+ new_children.append(process_token(child, depth + 1))
126
+
127
+ return Tree(node.data, new_children)
128
+
129
+
130
+ def update_descriptions(tf_path: Path, new_names: dict[str, str], block_type: str) -> tuple[str, dict[str, list[str]]]:
131
+ try:
132
+ tree = hcl2.parses(tf_path.read_text()) # type: ignore
133
+ except UnexpectedToken as e:
134
+ logger.warning(f"failed to parse {tf_path}: {e}")
135
+ return "", {}
136
+ existing_descriptions = defaultdict(list)
137
+ new_tree = process_descriptions(
138
+ tree,
139
+ new_names,
140
+ existing_descriptions,
141
+ block_type=block_type,
142
+ )
143
+ new_tf = hcl2.writes(new_tree) # type: ignore
144
+ return new_tf, existing_descriptions
@@ -0,0 +1,25 @@
1
+ provider "mongodbatlas" {
2
+ public_key = var.public_key
3
+ private_key = var.private_key
4
+ }
5
+
6
+ module "cluster" {
7
+ source = "../../module_maintainer/v3"
8
+
9
+ cluster_name = var.cluster_name
10
+ cluster_type = var.cluster_type
11
+ mongo_db_major_version = var.mongo_db_major_version
12
+ project_id = var.project_id
13
+ replication_specs_new = var.replication_specs_new
14
+ tags = var.tags
15
+ }
16
+
17
+ output "mongodb_connection_strings" {
18
+ description = "new connection strings desc"
19
+ value = module.cluster.mongodb_connection_strings
20
+ }
21
+
22
+ output "with_desc" {
23
+ value = "with_desc"
24
+ description = "description new"
25
+ }
@@ -0,0 +1,24 @@
1
+ variable "cluster_name" {
2
+ description = "description of \"cluster\" name"
3
+ type = string
4
+ }
5
+ variable "replication_specs" {
6
+ description = "List of replication specifications in legacy mongodbatlas_cluster format"
7
+ default = []
8
+ type = list(object({
9
+ num_shards = number
10
+ zone_name = string
11
+ regions_config = set(object({
12
+ region_name = string
13
+ electable_nodes = number
14
+ priority = number
15
+ read_only_nodes = optional(number, 0)
16
+ }))
17
+ }))
18
+ }
19
+
20
+ variable "provider_name" {
21
+ description = "azure/aws/gcp"
22
+ type = string
23
+ default = ""# optional in v3
24
+ }
@@ -0,0 +1,95 @@
1
+ import pytest
2
+
3
+ from atlas_init.cli_tf.hcl.modifier import BLOCK_TYPE_OUTPUT, BLOCK_TYPE_VARIABLE, update_descriptions
4
+
5
+ example_variables_tf = """variable "cluster_name" {
6
+ type = string
7
+ }
8
+ variable "replication_specs" {
9
+ description = "List of replication specifications in legacy mongodbatlas_cluster format"
10
+ default = []
11
+ type = list(object({
12
+ num_shards = number
13
+ zone_name = string
14
+ regions_config = set(object({
15
+ region_name = string
16
+ electable_nodes = number
17
+ priority = number
18
+ read_only_nodes = optional(number, 0)
19
+ }))
20
+ }))
21
+ }
22
+
23
+ variable "provider_name" {
24
+ type = string
25
+ default = "" # optional in v3
26
+ }
27
+ """
28
+
29
+ _existing_descriptions_variables = {
30
+ "cluster_name": [""],
31
+ "provider_name": [""],
32
+ "replication_specs": ["List of replication specifications in legacy "],
33
+ }
34
+
35
+ example_outputs_tf = """provider "mongodbatlas" {
36
+ public_key = var.public_key
37
+ private_key = var.private_key
38
+ }
39
+
40
+ module "cluster" {
41
+ source = "../../module_maintainer/v3"
42
+
43
+ cluster_name = var.cluster_name
44
+ cluster_type = var.cluster_type
45
+ mongo_db_major_version = var.mongo_db_major_version
46
+ project_id = var.project_id
47
+ replication_specs_new = var.replication_specs_new
48
+ tags = var.tags
49
+ }
50
+
51
+ output "mongodb_connection_strings" {
52
+ value = module.cluster.mongodb_connection_strings
53
+ }
54
+
55
+ output "with_desc" {
56
+ value = "with_desc"
57
+ description = "description old"
58
+ }
59
+ """
60
+ _existing_descriptions_outputs = {
61
+ "mongodb_connection_strings": [""],
62
+ "with_desc": ["description old"],
63
+ }
64
+
65
+
66
+ @pytest.mark.parametrize(
67
+ ("block_type", "new_names", "existing_descriptions", "tf_config"),
68
+ [
69
+ (
70
+ BLOCK_TYPE_VARIABLE,
71
+ {
72
+ "cluster_name": 'description of "cluster" name',
73
+ "provider_name": "azure/aws/gcp",
74
+ },
75
+ _existing_descriptions_variables,
76
+ example_variables_tf,
77
+ ),
78
+ (
79
+ BLOCK_TYPE_OUTPUT,
80
+ {
81
+ "with_desc": "description new",
82
+ "mongodb_connection_strings": "new connection strings desc",
83
+ },
84
+ _existing_descriptions_outputs,
85
+ example_outputs_tf,
86
+ ),
87
+ ],
88
+ ids=[BLOCK_TYPE_VARIABLE, BLOCK_TYPE_OUTPUT],
89
+ )
90
+ def test_process_variables(tmp_path, file_regression, block_type, new_names, existing_descriptions, tf_config):
91
+ example_tf_path = tmp_path / "example.tf"
92
+ example_tf_path.write_text(tf_config)
93
+ new_tf, existing_descriptions = update_descriptions(example_tf_path, new_names, block_type=block_type)
94
+ file_regression.check(new_tf, extension=".tf")
95
+ assert dict(existing_descriptions.items()) == existing_descriptions
@@ -0,0 +1,29 @@
1
+ import logging
2
+ from pathlib import Path
3
+
4
+ import typer
5
+
6
+ logger = logging.getLogger(__name__)
7
+ SPLIT_STR = "mongodbatlas: "
8
+
9
+
10
+ def remove_prefix(line: str) -> str:
11
+ """
12
+ >>> remove_prefix(
13
+ ... "2025-02-14T15:47:14.157Z [DEBUG] provider.terraform-provider-mongodbatlas: {"
14
+ ... )
15
+ '{'
16
+ >>> remove_prefix(
17
+ ... '2025-02-14T15:47:14.158Z [DEBUG] provider.terraform-provider-mongodbatlas: "biConnector": {'
18
+ ... )
19
+ ' "biConnector": {'
20
+ """
21
+ return line if SPLIT_STR not in line else line.split(SPLIT_STR, 1)[1]
22
+
23
+
24
+ def log_clean(log_path: str = typer.Argument(..., help="Path to the log file")):
25
+ log_path_parsed = Path(log_path)
26
+ assert log_path_parsed.exists(), f"file not found: {log_path}"
27
+ new_lines = [remove_prefix(line) for line in log_path_parsed.read_text().splitlines()]
28
+ log_path_parsed.write_text("\n".join(new_lines))
29
+ logger.info(f"cleaned log file: {log_path}")
atlas_init/repos/path.py CHANGED
@@ -16,6 +16,15 @@ _KNOWN_OWNER_PROJECTS = {
16
16
  }
17
17
 
18
18
 
19
+ def package_glob(package_path: str) -> str:
20
+ return f"{package_path}/*.go"
21
+
22
+
23
+ def go_package_prefix(repo_path: Path) -> str:
24
+ owner_project = owner_project_name(repo_path)
25
+ return f"github.com/{owner_project}"
26
+
27
+
19
28
  def _owner_project_name(repo_path: Path) -> str:
20
29
  owner_project = owner_project_name(repo_path)
21
30
  if owner_project not in _KNOWN_OWNER_PROJECTS:
@@ -61,6 +70,11 @@ class Repo(StrEnum):
61
70
  TF = "tf"
62
71
 
63
72
 
73
+ def as_repo_alias(path: Path) -> Repo:
74
+ owner = owner_project_name(path)
75
+ return _owner_lookup(owner)
76
+
77
+
64
78
  _owner_repos = {
65
79
  GH_OWNER_TERRAFORM_PROVIDER_MONGODBATLAS: Repo.TF,
66
80
  GH_OWNER_MONGODBATLAS_CLOUDFORMATION_RESOURCES: Repo.CFN,
@@ -2,22 +2,22 @@ from __future__ import annotations
2
2
 
3
3
  import fnmatch
4
4
  import logging
5
+ from collections import defaultdict
5
6
  from collections.abc import Iterable
6
7
  from functools import total_ordering
7
8
  from os import getenv
8
9
  from pathlib import Path
9
10
  from typing import Any
10
11
 
11
- from model_lib import Entity, dump_ignore_falsy
12
+ from model_lib import Entity, IgnoreFalsy
12
13
  from pydantic import Field, model_validator
13
14
 
14
- from atlas_init.repos.path import owner_project_name
15
+ from atlas_init.repos.path import as_repo_alias, go_package_prefix, owner_project_name, package_glob
15
16
 
16
17
  logger = logging.getLogger(__name__)
17
18
 
18
19
 
19
- @dump_ignore_falsy
20
- class TerraformVars(Entity):
20
+ class TerraformVars(IgnoreFalsy):
21
21
  cluster_info: bool = False
22
22
  cluster_info_m10: bool = False
23
23
  stream_instance: bool = False
@@ -28,6 +28,7 @@ class TerraformVars(Entity):
28
28
  use_aws_vpc: bool = False
29
29
  use_aws_s3: bool = False
30
30
  use_federated_vars: bool = False
31
+ use_encryption_at_rest: bool = False
31
32
 
32
33
  def __add__(self, other: TerraformVars): # type: ignore
33
34
  assert isinstance(other, TerraformVars) # type: ignore
@@ -59,6 +60,8 @@ class TerraformVars(Entity):
59
60
  config["use_project_extra"] = True
60
61
  if self.use_federated_vars:
61
62
  config["use_federated_vars"] = True
63
+ if self.use_encryption_at_rest:
64
+ config["use_encryption_at_rest"] = True
62
65
  if self.stream_instance:
63
66
  # hack until backend bug with stream instance is fixed
64
67
  config["stream_instance_config"] = {"name": getenv("ATLAS_STREAM_INSTANCE_NAME", "atlas-init")}
@@ -70,15 +73,13 @@ class PyHook(Entity):
70
73
  locate: str
71
74
 
72
75
 
73
- @dump_ignore_falsy
74
76
  @total_ordering
75
- class TestSuite(Entity):
77
+ class TestSuite(IgnoreFalsy):
76
78
  __test__ = False
77
79
 
78
80
  name: str
79
81
  sequential_tests: bool = False
80
82
  repo_go_packages: dict[str, list[str]] = Field(default_factory=dict)
81
- repo_globs: dict[str, list[str]] = Field(default_factory=dict)
82
83
  vars: TerraformVars = Field(default_factory=TerraformVars) # type: ignore
83
84
  post_apply_hooks: list[PyHook] = Field(default_factory=list)
84
85
 
@@ -87,13 +88,23 @@ class TestSuite(Entity):
87
88
  raise TypeError
88
89
  return self.name < other.name
89
90
 
90
- def all_globs(self, repo_alias: str) -> list[str]:
91
- go_packages = self.repo_go_packages.get(repo_alias, [])
92
- return self.repo_globs.get(repo_alias, []) + [f"{pkg}/*.go" for pkg in go_packages] + go_packages
91
+ def package_url_tests(self, repo_path: Path, prefix: str = "") -> dict[str, dict[str, Path]]:
92
+ alias = as_repo_alias(repo_path)
93
+ packages = self.repo_go_packages.get(alias, [])
94
+ names = defaultdict(dict)
95
+ for package in packages:
96
+ pkg_name = f"{go_package_prefix(repo_path)}/{package}"
97
+ for go_file in repo_path.glob(f"{package}/*.go"):
98
+ with go_file.open() as f:
99
+ for line in f:
100
+ if line.startswith(f"func {prefix}"):
101
+ test_name = line.split("(")[0].strip().removeprefix("func ")
102
+ names[pkg_name][test_name] = go_file.parent
103
+ return names
93
104
 
94
105
  def is_active(self, repo_alias: str, change_paths: Iterable[str]) -> bool:
95
106
  """changes paths should be relative to the repo"""
96
- globs = self.all_globs(repo_alias)
107
+ globs = [package_glob(pkg) for pkg in self.repo_go_packages.get(repo_alias, [])]
97
108
  return any(any(fnmatch.fnmatch(path, glob) for glob in globs) for path in change_paths)
98
109
 
99
110
  def cwd_is_repo_go_pkg(self, cwd: Path, repo_alias: str) -> bool:
@@ -145,9 +156,9 @@ class AtlasInitConfig(Entity):
145
156
  @model_validator(mode="after")
146
157
  def ensure_all_repo_aliases_are_found(self):
147
158
  missing_aliases = set()
148
- aliases = set(self.repo_aliases.keys())
159
+ aliases = set(self.repo_aliases.values())
149
160
  for group in self.test_suites:
150
- if more_missing := group.repo_globs.keys() - aliases:
161
+ if more_missing := (group.repo_go_packages.keys() - aliases):
151
162
  logger.warning(f"repo aliases not found for group={group.name}: {more_missing}")
152
163
  missing_aliases |= more_missing
153
164
  if missing_aliases:
@@ -36,7 +36,7 @@ def hide_secrets(handler: logging.Handler, secrets_dict: dict[str, str]) -> None
36
36
  if not isinstance(value, str):
37
37
  continue
38
38
  key_lower = key.lower()
39
- if key_lower in {"true", "false"} or value.lower() in {"true", "false"}:
39
+ if key_lower in {"true", "false"} or value.lower() in {"true", "false"} or value.isdigit():
40
40
  continue
41
41
  if any(safe in key_lower for safe in safe_keys):
42
42
  continue
@@ -2,25 +2,25 @@
2
2
  # Manual edits may be lost in future updates.
3
3
 
4
4
  provider "registry.terraform.io/hashicorp/aws" {
5
- version = "5.67.0"
5
+ version = "5.87.0"
6
6
  constraints = "~> 5.0"
7
7
  hashes = [
8
- "h1:8wkuQvQiqjjm2+gQepy6xFBfimGoesKz1BPcVKWvED8=",
9
- "zh:1259c8106c0a3fc0ed3b3eb814ab88d6a672e678b533f47d1bbbe3107949f43e",
10
- "zh:226414049afd6d334cc16ff5d6cef23683620a9b56da67a21422a113d9cce4ab",
11
- "zh:3c89b103aea20ef82a84e889abaeb971cb168de8292b61b34b83e807c40085a9",
12
- "zh:3dd88e994fb7d7a6c6eafd3c01393274e4f776021176acea2e980f73fbd4acbc",
13
- "zh:487e0dda221c84a20a143904c1cee4e63fce6c5c57c21368ea79beee87b108da",
14
- "zh:7693bdcec8181aafcbda2c41c35b1386997e2c92b6f011df058009e4c8b300e1",
15
- "zh:82679536250420f9e8e6edfd0fa9a1bab99a7f31fe5f049ac7a2e0d8c287b56f",
16
- "zh:8685218dae921740083820c52afa66cdf14cf130539da1efd7d9a78bfb6ade64",
8
+ "h1:IYq3by7O/eJuXzJwOF920z2nZEkw08PkDFdw2xkyhrs=",
9
+ "zh:017f237466875c919330b9e214fb33af14fffbff830d3755e8976d8fa3c963c2",
10
+ "zh:0776d1e60aa93c85ecbb01144aed2789c8e180bb0f1c811a0aba17ca7247b26c",
11
+ "zh:0dfa5c6cfb3724494fdc73f7d042515e88a20da8968959f48b3ec0b937bd8c8f",
12
+ "zh:1707a5ead36a7980cb3f83e8b69a67a14ae725bfc990ddfcc209b59400b57b04",
13
+ "zh:1c71f54fdd6adcbe547d6577dbb843d72a30fef0ab882d0afbeb8a7b348bc442",
14
+ "zh:3563c850a29790957ec3f4d3ba203bfa2e084ac7319035b3f43b91f818a2c9b4",
15
+ "zh:520bf6cef53785a92226651d5bebacbbf9314bdbc3211d0bf0903bce4e45149d",
16
+ "zh:56f9778575830f6e5c23462c2eccbf2c9afaddb00a69275fcfb33cd1a6d17f4d",
17
+ "zh:73e381cb0b1e76d471d7b0952f3d2a80350b507d15bda9b7041ea69077e3b5b5",
18
+ "zh:7da74b48f8fa088be758a92407980400cb4b039a8d9ba3c108907e4055e9ad6f",
19
+ "zh:8dacfa9623ba2e0197fe7db6faaaa0820a3b91fe00ba9e5d8a646340522bc8dd",
17
20
  "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
18
- "zh:9e553a3ec05eedea779d393447fc316689ba6c4d4d8d569b986898e6dbe58fee",
19
- "zh:a36c24acd3c75bac8211fefde58c459778021eb871ff8339be1c26ad8fd67ee1",
20
- "zh:ce48bd1e35d6f996f1a09d8f99e8084469b7fec5611e67a50a63e96375b87ebe",
21
- "zh:d6c76a24205513725269e4783da14be9648e9086fb621496052f4b37d52d785e",
22
- "zh:d95a31745affb178ea48fa8e0be94691a8f7507ea55c0d0a4b6e0a8ef6fcb929",
23
- "zh:f061ce59fac1bc425c1092e6647ed4bb1b61824416041b46dbf336e01a63ad89",
21
+ "zh:9c2ebd21d697e1a611fe201788dc9e1678949a088afc85d4589563bca484d835",
22
+ "zh:ac5d0bbf36f9a6cedbfb63993f6baf0aabdaf21c8d7fc3b1e69ba8cbf344b5f3",
23
+ "zh:c2329644179f78a0458b6cf2dd5eaadca4c610fc3577a1b50620544d92df13e8",
24
24
  ]
25
25
  }
26
26
 
atlas_init/tf/main.tf CHANGED
@@ -5,7 +5,8 @@ locals {
5
5
  Owner = "terraform-atlas-init"
6
6
  }
7
7
  use_aws_vpc = var.use_private_link || var.use_vpc_peering || var.use_aws_vpc
8
- use_cloud_provider = var.use_aws_s3
8
+ use_aws_kms = var.use_encryption_at_rest
9
+ use_cloud_provider = var.use_aws_s3 || var.use_encryption_at_rest
9
10
  # https://www.mongodb.com/docs/atlas/reference/amazon-aws/
10
11
  atlas_region = replace(upper(var.aws_region), "-", "_")
11
12
  use_cluster = var.cluster_config.name != ""
@@ -133,6 +134,18 @@ module "aws_s3" {
133
134
  iam_role_name = module.cloud_provider[0].iam_role_name
134
135
  }
135
136
 
137
+ module "aws_kms" {
138
+ source = "./modules/aws_kms"
139
+ count = local.use_aws_kms ? 1 : 0
140
+
141
+ access_iam_role_arns = {
142
+ atlas = module.cloud_provider[0].iam_role_arn
143
+ }
144
+ aws_account_id = local.aws_account_id
145
+ aws_region = var.aws_region
146
+ key_suffix = var.project_name
147
+ }
148
+
136
149
  module "federated_vars" {
137
150
  source = "./modules/federated_vars"
138
151
  count = var.use_federated_vars ? 1 : 0
@@ -142,3 +155,14 @@ module "federated_vars" {
142
155
  project_id = local.project_id
143
156
  base_url = var.atlas_base_url
144
157
  }
158
+
159
+ module "encryption_at_rest" {
160
+ source = "./modules/encryption_at_rest"
161
+ count = var.use_encryption_at_rest ? 1 : 0
162
+
163
+ project_id = local.project_id
164
+ atlas_role_id = module.cloud_provider[0].atlas_role_id
165
+ kms_key_id = module.aws_kms[0].kms_key_id
166
+ atlas_regions = [local.atlas_region]
167
+
168
+ }
@@ -0,0 +1,100 @@
1
+ variable "aws_account_id" {
2
+ type = string
3
+ }
4
+ variable "aws_region" {
5
+ type = string
6
+ }
7
+ variable "access_iam_role_arns" {
8
+ type = map(string)
9
+ description = "static name to arn"
10
+ }
11
+
12
+ variable "key_suffix" {
13
+ type = string
14
+ }
15
+
16
+ locals {
17
+ kms_secretsmanager_condition = {
18
+ StringEquals = {
19
+ "kms:CallerAccount" = var.aws_account_id
20
+ "kms:ViaService" = "secretsmanager.${var.aws_region}.amazonaws.com"
21
+ }
22
+ }
23
+ role_names = { for static_name, role_arn in var.access_iam_role_arns : split("/", role_arn)[length(split("/", role_arn)) - 1] => role_arn }
24
+ kms_key_policy_statements = [
25
+ {
26
+ Sid = "Enable IAM User Permissions Current AWS Account",
27
+ Effect = "Allow",
28
+ Principal = {
29
+ AWS = var.aws_account_id
30
+ },
31
+ Action = "kms:*",
32
+ Resource = "*"
33
+ },
34
+ # { useful to check our example guide
35
+ # "Sid" : "Allow access through AWS Secrets Manager for all principals in the account that are authorized to use AWS Secrets Manager",
36
+ # "Effect" : "Allow",
37
+ # # "Principal" : { "AWS" : [aws_iam_role.execution_role.arn] },
38
+ # "Principal" : { "AWS" : "*" },
39
+ # "Action" : [
40
+ # "kms:Decrypt",
41
+ # ],
42
+ # "Resource" : "*",
43
+ # "Condition" : local.kms_secretsmanager_condition
44
+ # },
45
+ ]
46
+
47
+ access_roles = [for role_name, role_arn in local.role_names :
48
+ {
49
+ Sid = "Enable IAM Permissions for Role ${role_name}",
50
+ Effect = "Allow",
51
+ Principal = {
52
+ AWS = "*"
53
+ }
54
+ Action = "kms:*",
55
+ Resource = "*"
56
+ Condition = {
57
+ StringEquals = {
58
+ "aws:PrincipalArn" = role_arn
59
+ }
60
+ }
61
+ }
62
+ ]
63
+ kms_key_policy_json = jsonencode({
64
+ Version = "2012-10-17",
65
+ Statement = concat(local.kms_key_policy_statements, local.access_roles)
66
+ })
67
+ }
68
+ resource "aws_kms_key" "this" {
69
+ description = "KMS key for atlas-init ${var.key_suffix}"
70
+ deletion_window_in_days = 7
71
+ multi_region = true
72
+ policy = local.kms_key_policy_json
73
+ }
74
+
75
+ resource "aws_iam_role_policy" "kms_access" {
76
+ for_each = var.access_iam_role_arns
77
+ name = "atlas-init-${each.key}-kms-access"
78
+ role = split("/", each.value)[length(split("/", each.value)) - 1]
79
+
80
+ policy = <<-EOF
81
+ {
82
+ "Version": "2012-10-17",
83
+ "Statement": [
84
+ {
85
+ "Effect": "Allow",
86
+ "Action": [
87
+ "kms:*"
88
+ ],
89
+ "Resource": [
90
+ "${aws_kms_key.this.arn}"
91
+ ]
92
+ }
93
+ ]
94
+ }
95
+ EOF
96
+ }
97
+
98
+ output "kms_key_id" {
99
+ value = aws_kms_key.this.id
100
+ }
@@ -0,0 +1,7 @@
1
+ terraform {
2
+ required_providers {
3
+ aws = {
4
+ source = "hashicorp/aws"
5
+ }
6
+ }
7
+ }
@@ -53,7 +53,14 @@ output "env_vars" {
53
53
  }
54
54
  }
55
55
 
56
-
57
56
  output "iam_role_name" {
58
57
  value = aws_iam_role.aws_role.name
58
+ }
59
+
60
+ output "atlas_role_id" {
61
+ value = mongodbatlas_cloud_provider_access_authorization.auth_role.role_id
62
+ }
63
+
64
+ output "iam_role_arn" {
65
+ value = aws_iam_role.aws_role.arn
59
66
  }