atlas-init 0.4.1__py3-none-any.whl → 0.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- atlas_init/__init__.py +4 -7
- atlas_init/atlas_init.yaml +3 -0
- atlas_init/cli_helper/go.py +103 -57
- atlas_init/cli_root/go_test.py +13 -10
- atlas_init/cli_tf/app.py +4 -0
- atlas_init/cli_tf/debug_logs.py +4 -4
- atlas_init/cli_tf/example_update.py +142 -0
- atlas_init/cli_tf/example_update_test/test_update_example.tf +23 -0
- atlas_init/cli_tf/example_update_test.py +96 -0
- atlas_init/cli_tf/github_logs.py +4 -1
- atlas_init/cli_tf/go_test_run.py +23 -0
- atlas_init/cli_tf/go_test_summary.py +7 -1
- atlas_init/cli_tf/hcl/modifier.py +144 -0
- atlas_init/cli_tf/hcl/modifier_test/test_process_variables_output_.tf +25 -0
- atlas_init/cli_tf/hcl/modifier_test/test_process_variables_variable_.tf +24 -0
- atlas_init/cli_tf/hcl/modifier_test.py +95 -0
- atlas_init/cli_tf/log_clean.py +29 -0
- atlas_init/cli_tf/mock_tf_log.py +1 -1
- atlas_init/cli_tf/schema_v2.py +2 -2
- atlas_init/cli_tf/schema_v2_api_parsing.py +3 -3
- atlas_init/repos/path.py +14 -0
- atlas_init/settings/config.py +24 -13
- atlas_init/settings/path.py +1 -1
- atlas_init/settings/rich_utils.py +1 -1
- atlas_init/tf/.terraform.lock.hcl +16 -16
- atlas_init/tf/main.tf +25 -1
- atlas_init/tf/modules/aws_kms/aws_kms.tf +100 -0
- atlas_init/tf/modules/aws_kms/provider.tf +7 -0
- atlas_init/tf/modules/cloud_provider/cloud_provider.tf +8 -1
- atlas_init/tf/modules/encryption_at_rest/main.tf +29 -0
- atlas_init/tf/modules/encryption_at_rest/provider.tf +9 -0
- atlas_init/tf/variables.tf +5 -0
- {atlas_init-0.4.1.dist-info → atlas_init-0.4.3.dist-info}/METADATA +12 -9
- {atlas_init-0.4.1.dist-info → atlas_init-0.4.3.dist-info}/RECORD +37 -24
- atlas_init-0.4.3.dist-info/licenses/LICENSE +21 -0
- {atlas_init-0.4.1.dist-info → atlas_init-0.4.3.dist-info}/WHEEL +0 -0
- {atlas_init-0.4.1.dist-info → atlas_init-0.4.3.dist-info}/entry_points.txt +0 -0
atlas_init/cli_tf/go_test_run.py
CHANGED
@@ -55,6 +55,25 @@ class GoTestContext(Entity):
|
|
55
55
|
# return cls(name=name, steps=steps)
|
56
56
|
|
57
57
|
|
58
|
+
def extract_group_name(log_path: Path | None) -> str:
|
59
|
+
"""
|
60
|
+
>>> extract_group_name(
|
61
|
+
... Path(
|
62
|
+
... "40216340925_tests-1.11.x-latest_tests-1.11.x-latest-false_search_deployment.txt"
|
63
|
+
... )
|
64
|
+
... )
|
65
|
+
'search_deployment'
|
66
|
+
>>> extract_group_name(None)
|
67
|
+
''
|
68
|
+
"""
|
69
|
+
if log_path is None:
|
70
|
+
return ""
|
71
|
+
if "-" not in log_path.name:
|
72
|
+
return ""
|
73
|
+
last_part = log_path.stem.split("-")[-1]
|
74
|
+
return "_".join(last_part.split("_")[1:]) if "_" in last_part else last_part
|
75
|
+
|
76
|
+
|
58
77
|
@total_ordering
|
59
78
|
class GoTestRun(Entity):
|
60
79
|
name: str
|
@@ -112,6 +131,10 @@ class GoTestRun(Entity):
|
|
112
131
|
def is_pass(self) -> bool:
|
113
132
|
return self.status == GoTestStatus.PASS
|
114
133
|
|
134
|
+
@property
|
135
|
+
def group_name(self) -> str:
|
136
|
+
return extract_group_name(self.log_path)
|
137
|
+
|
115
138
|
def add_line_match(self, match: LineMatch, line: str, line_number: int) -> None:
|
116
139
|
self.run_seconds = match.run_seconds or self.run_seconds
|
117
140
|
self.finish_line = LineInfo(number=line_number, text=line)
|
@@ -43,6 +43,10 @@ class GoTestSummary(Entity):
|
|
43
43
|
def success_rate_human(self) -> str:
|
44
44
|
return f"{self.success_rate:.2%}"
|
45
45
|
|
46
|
+
@property
|
47
|
+
def group_name(self) -> str:
|
48
|
+
return next((r.group_name for r in self.results if r.group_name), "unknown-group")
|
49
|
+
|
46
50
|
def last_pass_human(self) -> str:
|
47
51
|
return next(
|
48
52
|
(f"Passed {test.when}" for test in reversed(self.results) if test.status == GoTestStatus.PASS),
|
@@ -124,7 +128,9 @@ def create_detailed_summary(
|
|
124
128
|
test_summary_path = summary_dir_path / f"{summary.success_rate_human}_{summary.name}.md"
|
125
129
|
test_summary_md = summary_str(summary, start_test_date, end_test_date)
|
126
130
|
file_utils.ensure_parents_write_text(test_summary_path, test_summary_md)
|
127
|
-
top_level_summary.append(
|
131
|
+
top_level_summary.append(
|
132
|
+
f"- {summary.name} - {summary.group_name} ({summary.success_rate_human}) ({summary.last_pass_human()}) ('{test_summary_path}')"
|
133
|
+
)
|
128
134
|
return top_level_summary
|
129
135
|
|
130
136
|
|
@@ -0,0 +1,144 @@
|
|
1
|
+
import logging
|
2
|
+
from collections import defaultdict
|
3
|
+
from copy import deepcopy
|
4
|
+
from pathlib import Path
|
5
|
+
|
6
|
+
import hcl2
|
7
|
+
from lark import Token, Tree, UnexpectedToken
|
8
|
+
|
9
|
+
logger = logging.getLogger(__name__)
|
10
|
+
|
11
|
+
BLOCK_TYPE_VARIABLE = "variable"
|
12
|
+
BLOCK_TYPE_OUTPUT = "output"
|
13
|
+
|
14
|
+
|
15
|
+
def process_token(node: Token, indent=0):
|
16
|
+
logger.debug(f"[{indent}] (token)\t|", " " * indent, node.type, node.value)
|
17
|
+
return deepcopy(node)
|
18
|
+
|
19
|
+
|
20
|
+
def is_identifier_block_type(tree: Tree | Token, block_type: str) -> bool:
|
21
|
+
if not isinstance(tree, Tree):
|
22
|
+
return False
|
23
|
+
try:
|
24
|
+
return tree.children[0].value == block_type # type: ignore
|
25
|
+
except (IndexError, AttributeError):
|
26
|
+
return False
|
27
|
+
|
28
|
+
|
29
|
+
def is_block_type(tree: Tree, block_type: str) -> bool:
|
30
|
+
try:
|
31
|
+
return tree.data == "block" and is_identifier_block_type(tree.children[0], block_type)
|
32
|
+
except (IndexError, AttributeError):
|
33
|
+
return False
|
34
|
+
|
35
|
+
|
36
|
+
def update_description(tree: Tree, new_descriptions: dict[str, str], existing_names: dict[str, list[str]]) -> Tree:
|
37
|
+
new_children = tree.children.copy()
|
38
|
+
variable_body = new_children[2]
|
39
|
+
assert variable_body.data == "body"
|
40
|
+
name = token_name(new_children[1])
|
41
|
+
old_description = read_description_attribute(variable_body)
|
42
|
+
existing_names[name].append(old_description)
|
43
|
+
new_description = new_descriptions.get(name, "")
|
44
|
+
if not new_description:
|
45
|
+
logger.debug(f"no description found for variable {name}")
|
46
|
+
return tree
|
47
|
+
new_children[2] = update_body_with_description(variable_body, new_description)
|
48
|
+
return Tree(tree.data, new_children)
|
49
|
+
|
50
|
+
|
51
|
+
def token_name(token: Token | Tree) -> str:
|
52
|
+
if isinstance(token, Token):
|
53
|
+
return token.value.strip('"')
|
54
|
+
err_msg = f"unexpected token type {type(token)} for token name"
|
55
|
+
raise ValueError(err_msg)
|
56
|
+
|
57
|
+
|
58
|
+
def has_attribute_description(maybe_attribute: Token | Tree) -> bool:
|
59
|
+
if not isinstance(maybe_attribute, Tree):
|
60
|
+
return False
|
61
|
+
return maybe_attribute.data == "attribute" and maybe_attribute.children[0].children[0].value == "description" # type: ignore
|
62
|
+
|
63
|
+
|
64
|
+
def update_body_with_description(tree: Tree, new_description: str) -> Tree:
|
65
|
+
new_description = new_description.replace('"', '\\"')
|
66
|
+
new_children = tree.children.copy()
|
67
|
+
found_description = False
|
68
|
+
for i, maybe_attribute in enumerate(new_children):
|
69
|
+
if has_attribute_description(maybe_attribute):
|
70
|
+
found_description = True
|
71
|
+
new_children[i] = create_description_attribute(new_description)
|
72
|
+
if not found_description:
|
73
|
+
new_children.insert(0, new_line())
|
74
|
+
new_children.insert(1, create_description_attribute(new_description))
|
75
|
+
return Tree(tree.data, new_children)
|
76
|
+
|
77
|
+
|
78
|
+
def new_line() -> Tree:
|
79
|
+
return Tree(
|
80
|
+
Token("RULE", "new_line_or_comment"),
|
81
|
+
[Token("NL_OR_COMMENT", "\n ")],
|
82
|
+
)
|
83
|
+
|
84
|
+
|
85
|
+
def read_description_attribute(tree: Tree) -> str:
|
86
|
+
return next(
|
87
|
+
(
|
88
|
+
token_name(maybe_attribute.children[-1].children[0])
|
89
|
+
for maybe_attribute in tree.children
|
90
|
+
if has_attribute_description(maybe_attribute)
|
91
|
+
),
|
92
|
+
"",
|
93
|
+
)
|
94
|
+
|
95
|
+
|
96
|
+
def create_description_attribute(description_value: str) -> Tree:
|
97
|
+
children = [
|
98
|
+
Tree(Token("RULE", "identifier"), [Token("NAME", "description")]),
|
99
|
+
Token("EQ", " ="),
|
100
|
+
Tree(Token("RULE", "expr_term"), [Token("STRING_LIT", f'"{description_value}"')]),
|
101
|
+
]
|
102
|
+
return Tree(Token("RULE", "attribute"), children)
|
103
|
+
|
104
|
+
|
105
|
+
def process_descriptions(
|
106
|
+
node: Tree,
|
107
|
+
name_updates: dict[str, str],
|
108
|
+
existing_names: dict[str, list[str]],
|
109
|
+
depth=0,
|
110
|
+
*,
|
111
|
+
block_type: str,
|
112
|
+
) -> Tree:
|
113
|
+
new_children = []
|
114
|
+
logger.debug(f"[{depth}] (tree)\t|", " " * depth, node.data)
|
115
|
+
for child in node.children:
|
116
|
+
if isinstance(child, Tree):
|
117
|
+
if is_block_type(child, block_type):
|
118
|
+
child = update_description( # noqa: PLW2901
|
119
|
+
child, name_updates, existing_names
|
120
|
+
)
|
121
|
+
new_children.append(
|
122
|
+
process_descriptions(child, name_updates, existing_names, depth + 1, block_type=block_type)
|
123
|
+
)
|
124
|
+
else:
|
125
|
+
new_children.append(process_token(child, depth + 1))
|
126
|
+
|
127
|
+
return Tree(node.data, new_children)
|
128
|
+
|
129
|
+
|
130
|
+
def update_descriptions(tf_path: Path, new_names: dict[str, str], block_type: str) -> tuple[str, dict[str, list[str]]]:
|
131
|
+
try:
|
132
|
+
tree = hcl2.parses(tf_path.read_text()) # type: ignore
|
133
|
+
except UnexpectedToken as e:
|
134
|
+
logger.warning(f"failed to parse {tf_path}: {e}")
|
135
|
+
return "", {}
|
136
|
+
existing_descriptions = defaultdict(list)
|
137
|
+
new_tree = process_descriptions(
|
138
|
+
tree,
|
139
|
+
new_names,
|
140
|
+
existing_descriptions,
|
141
|
+
block_type=block_type,
|
142
|
+
)
|
143
|
+
new_tf = hcl2.writes(new_tree) # type: ignore
|
144
|
+
return new_tf, existing_descriptions
|
@@ -0,0 +1,25 @@
|
|
1
|
+
provider "mongodbatlas" {
|
2
|
+
public_key = var.public_key
|
3
|
+
private_key = var.private_key
|
4
|
+
}
|
5
|
+
|
6
|
+
module "cluster" {
|
7
|
+
source = "../../module_maintainer/v3"
|
8
|
+
|
9
|
+
cluster_name = var.cluster_name
|
10
|
+
cluster_type = var.cluster_type
|
11
|
+
mongo_db_major_version = var.mongo_db_major_version
|
12
|
+
project_id = var.project_id
|
13
|
+
replication_specs_new = var.replication_specs_new
|
14
|
+
tags = var.tags
|
15
|
+
}
|
16
|
+
|
17
|
+
output "mongodb_connection_strings" {
|
18
|
+
description = "new connection strings desc"
|
19
|
+
value = module.cluster.mongodb_connection_strings
|
20
|
+
}
|
21
|
+
|
22
|
+
output "with_desc" {
|
23
|
+
value = "with_desc"
|
24
|
+
description = "description new"
|
25
|
+
}
|
@@ -0,0 +1,24 @@
|
|
1
|
+
variable "cluster_name" {
|
2
|
+
description = "description of \"cluster\" name"
|
3
|
+
type = string
|
4
|
+
}
|
5
|
+
variable "replication_specs" {
|
6
|
+
description = "List of replication specifications in legacy mongodbatlas_cluster format"
|
7
|
+
default = []
|
8
|
+
type = list(object({
|
9
|
+
num_shards = number
|
10
|
+
zone_name = string
|
11
|
+
regions_config = set(object({
|
12
|
+
region_name = string
|
13
|
+
electable_nodes = number
|
14
|
+
priority = number
|
15
|
+
read_only_nodes = optional(number, 0)
|
16
|
+
}))
|
17
|
+
}))
|
18
|
+
}
|
19
|
+
|
20
|
+
variable "provider_name" {
|
21
|
+
description = "azure/aws/gcp"
|
22
|
+
type = string
|
23
|
+
default = ""# optional in v3
|
24
|
+
}
|
@@ -0,0 +1,95 @@
|
|
1
|
+
import pytest
|
2
|
+
|
3
|
+
from atlas_init.cli_tf.hcl.modifier import BLOCK_TYPE_OUTPUT, BLOCK_TYPE_VARIABLE, update_descriptions
|
4
|
+
|
5
|
+
example_variables_tf = """variable "cluster_name" {
|
6
|
+
type = string
|
7
|
+
}
|
8
|
+
variable "replication_specs" {
|
9
|
+
description = "List of replication specifications in legacy mongodbatlas_cluster format"
|
10
|
+
default = []
|
11
|
+
type = list(object({
|
12
|
+
num_shards = number
|
13
|
+
zone_name = string
|
14
|
+
regions_config = set(object({
|
15
|
+
region_name = string
|
16
|
+
electable_nodes = number
|
17
|
+
priority = number
|
18
|
+
read_only_nodes = optional(number, 0)
|
19
|
+
}))
|
20
|
+
}))
|
21
|
+
}
|
22
|
+
|
23
|
+
variable "provider_name" {
|
24
|
+
type = string
|
25
|
+
default = "" # optional in v3
|
26
|
+
}
|
27
|
+
"""
|
28
|
+
|
29
|
+
_existing_descriptions_variables = {
|
30
|
+
"cluster_name": [""],
|
31
|
+
"provider_name": [""],
|
32
|
+
"replication_specs": ["List of replication specifications in legacy "],
|
33
|
+
}
|
34
|
+
|
35
|
+
example_outputs_tf = """provider "mongodbatlas" {
|
36
|
+
public_key = var.public_key
|
37
|
+
private_key = var.private_key
|
38
|
+
}
|
39
|
+
|
40
|
+
module "cluster" {
|
41
|
+
source = "../../module_maintainer/v3"
|
42
|
+
|
43
|
+
cluster_name = var.cluster_name
|
44
|
+
cluster_type = var.cluster_type
|
45
|
+
mongo_db_major_version = var.mongo_db_major_version
|
46
|
+
project_id = var.project_id
|
47
|
+
replication_specs_new = var.replication_specs_new
|
48
|
+
tags = var.tags
|
49
|
+
}
|
50
|
+
|
51
|
+
output "mongodb_connection_strings" {
|
52
|
+
value = module.cluster.mongodb_connection_strings
|
53
|
+
}
|
54
|
+
|
55
|
+
output "with_desc" {
|
56
|
+
value = "with_desc"
|
57
|
+
description = "description old"
|
58
|
+
}
|
59
|
+
"""
|
60
|
+
_existing_descriptions_outputs = {
|
61
|
+
"mongodb_connection_strings": [""],
|
62
|
+
"with_desc": ["description old"],
|
63
|
+
}
|
64
|
+
|
65
|
+
|
66
|
+
@pytest.mark.parametrize(
|
67
|
+
("block_type", "new_names", "existing_descriptions", "tf_config"),
|
68
|
+
[
|
69
|
+
(
|
70
|
+
BLOCK_TYPE_VARIABLE,
|
71
|
+
{
|
72
|
+
"cluster_name": 'description of "cluster" name',
|
73
|
+
"provider_name": "azure/aws/gcp",
|
74
|
+
},
|
75
|
+
_existing_descriptions_variables,
|
76
|
+
example_variables_tf,
|
77
|
+
),
|
78
|
+
(
|
79
|
+
BLOCK_TYPE_OUTPUT,
|
80
|
+
{
|
81
|
+
"with_desc": "description new",
|
82
|
+
"mongodb_connection_strings": "new connection strings desc",
|
83
|
+
},
|
84
|
+
_existing_descriptions_outputs,
|
85
|
+
example_outputs_tf,
|
86
|
+
),
|
87
|
+
],
|
88
|
+
ids=[BLOCK_TYPE_VARIABLE, BLOCK_TYPE_OUTPUT],
|
89
|
+
)
|
90
|
+
def test_process_variables(tmp_path, file_regression, block_type, new_names, existing_descriptions, tf_config):
|
91
|
+
example_tf_path = tmp_path / "example.tf"
|
92
|
+
example_tf_path.write_text(tf_config)
|
93
|
+
new_tf, existing_descriptions = update_descriptions(example_tf_path, new_names, block_type=block_type)
|
94
|
+
file_regression.check(new_tf, extension=".tf")
|
95
|
+
assert dict(existing_descriptions.items()) == existing_descriptions
|
@@ -0,0 +1,29 @@
|
|
1
|
+
import logging
|
2
|
+
from pathlib import Path
|
3
|
+
|
4
|
+
import typer
|
5
|
+
|
6
|
+
logger = logging.getLogger(__name__)
|
7
|
+
SPLIT_STR = "mongodbatlas: "
|
8
|
+
|
9
|
+
|
10
|
+
def remove_prefix(line: str) -> str:
|
11
|
+
"""
|
12
|
+
>>> remove_prefix(
|
13
|
+
... "2025-02-14T15:47:14.157Z [DEBUG] provider.terraform-provider-mongodbatlas: {"
|
14
|
+
... )
|
15
|
+
'{'
|
16
|
+
>>> remove_prefix(
|
17
|
+
... '2025-02-14T15:47:14.158Z [DEBUG] provider.terraform-provider-mongodbatlas: "biConnector": {'
|
18
|
+
... )
|
19
|
+
' "biConnector": {'
|
20
|
+
"""
|
21
|
+
return line if SPLIT_STR not in line else line.split(SPLIT_STR, 1)[1]
|
22
|
+
|
23
|
+
|
24
|
+
def log_clean(log_path: str = typer.Argument(..., help="Path to the log file")):
|
25
|
+
log_path_parsed = Path(log_path)
|
26
|
+
assert log_path_parsed.exists(), f"file not found: {log_path}"
|
27
|
+
new_lines = [remove_prefix(line) for line in log_path_parsed.read_text().splitlines()]
|
28
|
+
log_path_parsed.write_text("\n".join(new_lines))
|
29
|
+
logger.info(f"cleaned log file: {log_path}")
|
atlas_init/cli_tf/mock_tf_log.py
CHANGED
@@ -149,7 +149,7 @@ def is_cache_up_to_date(cache_path: Path, cache_ttl: int) -> bool:
|
|
149
149
|
if cache_path.exists():
|
150
150
|
modified_ts = file_utils.file_modified_time(cache_path)
|
151
151
|
if modified_ts > time.time() - cache_ttl:
|
152
|
-
logger.info(f"using cached admin api: {cache_path} downloaded {time.time()-modified_ts:.0f}s ago")
|
152
|
+
logger.info(f"using cached admin api: {cache_path} downloaded {time.time() - modified_ts:.0f}s ago")
|
153
153
|
return True
|
154
154
|
return False
|
155
155
|
|
atlas_init/cli_tf/schema_v2.py
CHANGED
@@ -466,8 +466,8 @@ def generate_go_attribute_schema_lines(
|
|
466
466
|
attr_name = attr.tf_name
|
467
467
|
lines = [indent(line_indent, f'"{attr_name}": {attribute_header(attr)}{{')]
|
468
468
|
if desc := attr.description or attr.is_nested and (desc := schema.ref_resource(attr.schema_ref).description):
|
469
|
-
lines.append(indent(line_indent + 1, f'Description: "{desc.replace(
|
470
|
-
lines.append(indent(line_indent + 1, f'MarkdownDescription: "{desc.replace(
|
469
|
+
lines.append(indent(line_indent + 1, f'Description: "{desc.replace("\n", "\\n")}",'))
|
470
|
+
lines.append(indent(line_indent + 1, f'MarkdownDescription: "{desc.replace("\n", "\\n")}",'))
|
471
471
|
if attr.is_required:
|
472
472
|
lines.append(indent(line_indent + 1, "Required: true,"))
|
473
473
|
if attr.is_optional:
|
@@ -32,9 +32,9 @@ def api_spec_text_changes(schema: SchemaV2, api_spec_parsed: OpenapiSchema) -> O
|
|
32
32
|
if name.startswith(prefix):
|
33
33
|
schema_to_update.pop(name)
|
34
34
|
name_no_prefix = name.removeprefix(prefix)
|
35
|
-
assert (
|
36
|
-
|
37
|
-
)
|
35
|
+
assert name_no_prefix not in schema_to_update, (
|
36
|
+
f"removed {prefix} from {name} in schema but {name_no_prefix} already exists"
|
37
|
+
)
|
38
38
|
schema_to_update[name_no_prefix] = value
|
39
39
|
openapi_yaml = dump(api_spec_parsed, "yaml")
|
40
40
|
for prefix in openapi_changes.schema_prefix_removal:
|
atlas_init/repos/path.py
CHANGED
@@ -16,6 +16,15 @@ _KNOWN_OWNER_PROJECTS = {
|
|
16
16
|
}
|
17
17
|
|
18
18
|
|
19
|
+
def package_glob(package_path: str) -> str:
|
20
|
+
return f"{package_path}/*.go"
|
21
|
+
|
22
|
+
|
23
|
+
def go_package_prefix(repo_path: Path) -> str:
|
24
|
+
owner_project = owner_project_name(repo_path)
|
25
|
+
return f"github.com/{owner_project}"
|
26
|
+
|
27
|
+
|
19
28
|
def _owner_project_name(repo_path: Path) -> str:
|
20
29
|
owner_project = owner_project_name(repo_path)
|
21
30
|
if owner_project not in _KNOWN_OWNER_PROJECTS:
|
@@ -61,6 +70,11 @@ class Repo(StrEnum):
|
|
61
70
|
TF = "tf"
|
62
71
|
|
63
72
|
|
73
|
+
def as_repo_alias(path: Path) -> Repo:
|
74
|
+
owner = owner_project_name(path)
|
75
|
+
return _owner_lookup(owner)
|
76
|
+
|
77
|
+
|
64
78
|
_owner_repos = {
|
65
79
|
GH_OWNER_TERRAFORM_PROVIDER_MONGODBATLAS: Repo.TF,
|
66
80
|
GH_OWNER_MONGODBATLAS_CLOUDFORMATION_RESOURCES: Repo.CFN,
|
atlas_init/settings/config.py
CHANGED
@@ -2,22 +2,22 @@ from __future__ import annotations
|
|
2
2
|
|
3
3
|
import fnmatch
|
4
4
|
import logging
|
5
|
+
from collections import defaultdict
|
5
6
|
from collections.abc import Iterable
|
6
7
|
from functools import total_ordering
|
7
8
|
from os import getenv
|
8
9
|
from pathlib import Path
|
9
10
|
from typing import Any
|
10
11
|
|
11
|
-
from model_lib import Entity,
|
12
|
+
from model_lib import Entity, IgnoreFalsy
|
12
13
|
from pydantic import Field, model_validator
|
13
14
|
|
14
|
-
from atlas_init.repos.path import owner_project_name
|
15
|
+
from atlas_init.repos.path import as_repo_alias, go_package_prefix, owner_project_name, package_glob
|
15
16
|
|
16
17
|
logger = logging.getLogger(__name__)
|
17
18
|
|
18
19
|
|
19
|
-
|
20
|
-
class TerraformVars(Entity):
|
20
|
+
class TerraformVars(IgnoreFalsy):
|
21
21
|
cluster_info: bool = False
|
22
22
|
cluster_info_m10: bool = False
|
23
23
|
stream_instance: bool = False
|
@@ -28,6 +28,7 @@ class TerraformVars(Entity):
|
|
28
28
|
use_aws_vpc: bool = False
|
29
29
|
use_aws_s3: bool = False
|
30
30
|
use_federated_vars: bool = False
|
31
|
+
use_encryption_at_rest: bool = False
|
31
32
|
|
32
33
|
def __add__(self, other: TerraformVars): # type: ignore
|
33
34
|
assert isinstance(other, TerraformVars) # type: ignore
|
@@ -59,6 +60,8 @@ class TerraformVars(Entity):
|
|
59
60
|
config["use_project_extra"] = True
|
60
61
|
if self.use_federated_vars:
|
61
62
|
config["use_federated_vars"] = True
|
63
|
+
if self.use_encryption_at_rest:
|
64
|
+
config["use_encryption_at_rest"] = True
|
62
65
|
if self.stream_instance:
|
63
66
|
# hack until backend bug with stream instance is fixed
|
64
67
|
config["stream_instance_config"] = {"name": getenv("ATLAS_STREAM_INSTANCE_NAME", "atlas-init")}
|
@@ -70,15 +73,13 @@ class PyHook(Entity):
|
|
70
73
|
locate: str
|
71
74
|
|
72
75
|
|
73
|
-
@dump_ignore_falsy
|
74
76
|
@total_ordering
|
75
|
-
class TestSuite(
|
77
|
+
class TestSuite(IgnoreFalsy):
|
76
78
|
__test__ = False
|
77
79
|
|
78
80
|
name: str
|
79
81
|
sequential_tests: bool = False
|
80
82
|
repo_go_packages: dict[str, list[str]] = Field(default_factory=dict)
|
81
|
-
repo_globs: dict[str, list[str]] = Field(default_factory=dict)
|
82
83
|
vars: TerraformVars = Field(default_factory=TerraformVars) # type: ignore
|
83
84
|
post_apply_hooks: list[PyHook] = Field(default_factory=list)
|
84
85
|
|
@@ -87,13 +88,23 @@ class TestSuite(Entity):
|
|
87
88
|
raise TypeError
|
88
89
|
return self.name < other.name
|
89
90
|
|
90
|
-
def
|
91
|
-
|
92
|
-
|
91
|
+
def package_url_tests(self, repo_path: Path, prefix: str = "") -> dict[str, dict[str, Path]]:
|
92
|
+
alias = as_repo_alias(repo_path)
|
93
|
+
packages = self.repo_go_packages.get(alias, [])
|
94
|
+
names = defaultdict(dict)
|
95
|
+
for package in packages:
|
96
|
+
pkg_name = f"{go_package_prefix(repo_path)}/{package}"
|
97
|
+
for go_file in repo_path.glob(f"{package}/*.go"):
|
98
|
+
with go_file.open() as f:
|
99
|
+
for line in f:
|
100
|
+
if line.startswith(f"func {prefix}"):
|
101
|
+
test_name = line.split("(")[0].strip().removeprefix("func ")
|
102
|
+
names[pkg_name][test_name] = go_file.parent
|
103
|
+
return names
|
93
104
|
|
94
105
|
def is_active(self, repo_alias: str, change_paths: Iterable[str]) -> bool:
|
95
106
|
"""changes paths should be relative to the repo"""
|
96
|
-
globs = self.
|
107
|
+
globs = [package_glob(pkg) for pkg in self.repo_go_packages.get(repo_alias, [])]
|
97
108
|
return any(any(fnmatch.fnmatch(path, glob) for glob in globs) for path in change_paths)
|
98
109
|
|
99
110
|
def cwd_is_repo_go_pkg(self, cwd: Path, repo_alias: str) -> bool:
|
@@ -145,9 +156,9 @@ class AtlasInitConfig(Entity):
|
|
145
156
|
@model_validator(mode="after")
|
146
157
|
def ensure_all_repo_aliases_are_found(self):
|
147
158
|
missing_aliases = set()
|
148
|
-
aliases = set(self.repo_aliases.
|
159
|
+
aliases = set(self.repo_aliases.values())
|
149
160
|
for group in self.test_suites:
|
150
|
-
if more_missing := group.
|
161
|
+
if more_missing := (group.repo_go_packages.keys() - aliases):
|
151
162
|
logger.warning(f"repo aliases not found for group={group.name}: {more_missing}")
|
152
163
|
missing_aliases |= more_missing
|
153
164
|
if missing_aliases:
|
atlas_init/settings/path.py
CHANGED
@@ -12,7 +12,7 @@ from atlas_init import running_in_repo
|
|
12
12
|
logger = logging.getLogger(__name__)
|
13
13
|
"""WARNING these variables should only be used through the AtlasInitSettings, not directly"""
|
14
14
|
if running_in_repo():
|
15
|
-
ROOT_PATH = Path(__file__).parent.parent.parent
|
15
|
+
ROOT_PATH = Path(__file__).parent.parent.parent # atlas_init REPO_PATH
|
16
16
|
DEFAULT_PROFILES_PATH = ROOT_PATH / "profiles"
|
17
17
|
else:
|
18
18
|
ROOT_PATH = Path(__file__).parent.parent # site package install directory
|
@@ -36,7 +36,7 @@ def hide_secrets(handler: logging.Handler, secrets_dict: dict[str, str]) -> None
|
|
36
36
|
if not isinstance(value, str):
|
37
37
|
continue
|
38
38
|
key_lower = key.lower()
|
39
|
-
if key_lower in {"true", "false"} or value.lower() in {"true", "false"}:
|
39
|
+
if key_lower in {"true", "false"} or value.lower() in {"true", "false"} or value.isdigit():
|
40
40
|
continue
|
41
41
|
if any(safe in key_lower for safe in safe_keys):
|
42
42
|
continue
|
@@ -2,25 +2,25 @@
|
|
2
2
|
# Manual edits may be lost in future updates.
|
3
3
|
|
4
4
|
provider "registry.terraform.io/hashicorp/aws" {
|
5
|
-
version = "5.
|
5
|
+
version = "5.87.0"
|
6
6
|
constraints = "~> 5.0"
|
7
7
|
hashes = [
|
8
|
-
"h1:
|
9
|
-
"zh:
|
10
|
-
"zh:
|
11
|
-
"zh:
|
12
|
-
"zh:
|
13
|
-
"zh:
|
14
|
-
"zh:
|
15
|
-
"zh:
|
16
|
-
"zh:
|
8
|
+
"h1:IYq3by7O/eJuXzJwOF920z2nZEkw08PkDFdw2xkyhrs=",
|
9
|
+
"zh:017f237466875c919330b9e214fb33af14fffbff830d3755e8976d8fa3c963c2",
|
10
|
+
"zh:0776d1e60aa93c85ecbb01144aed2789c8e180bb0f1c811a0aba17ca7247b26c",
|
11
|
+
"zh:0dfa5c6cfb3724494fdc73f7d042515e88a20da8968959f48b3ec0b937bd8c8f",
|
12
|
+
"zh:1707a5ead36a7980cb3f83e8b69a67a14ae725bfc990ddfcc209b59400b57b04",
|
13
|
+
"zh:1c71f54fdd6adcbe547d6577dbb843d72a30fef0ab882d0afbeb8a7b348bc442",
|
14
|
+
"zh:3563c850a29790957ec3f4d3ba203bfa2e084ac7319035b3f43b91f818a2c9b4",
|
15
|
+
"zh:520bf6cef53785a92226651d5bebacbbf9314bdbc3211d0bf0903bce4e45149d",
|
16
|
+
"zh:56f9778575830f6e5c23462c2eccbf2c9afaddb00a69275fcfb33cd1a6d17f4d",
|
17
|
+
"zh:73e381cb0b1e76d471d7b0952f3d2a80350b507d15bda9b7041ea69077e3b5b5",
|
18
|
+
"zh:7da74b48f8fa088be758a92407980400cb4b039a8d9ba3c108907e4055e9ad6f",
|
19
|
+
"zh:8dacfa9623ba2e0197fe7db6faaaa0820a3b91fe00ba9e5d8a646340522bc8dd",
|
17
20
|
"zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
|
18
|
-
"zh:
|
19
|
-
"zh:
|
20
|
-
"zh:
|
21
|
-
"zh:d6c76a24205513725269e4783da14be9648e9086fb621496052f4b37d52d785e",
|
22
|
-
"zh:d95a31745affb178ea48fa8e0be94691a8f7507ea55c0d0a4b6e0a8ef6fcb929",
|
23
|
-
"zh:f061ce59fac1bc425c1092e6647ed4bb1b61824416041b46dbf336e01a63ad89",
|
21
|
+
"zh:9c2ebd21d697e1a611fe201788dc9e1678949a088afc85d4589563bca484d835",
|
22
|
+
"zh:ac5d0bbf36f9a6cedbfb63993f6baf0aabdaf21c8d7fc3b1e69ba8cbf344b5f3",
|
23
|
+
"zh:c2329644179f78a0458b6cf2dd5eaadca4c610fc3577a1b50620544d92df13e8",
|
24
24
|
]
|
25
25
|
}
|
26
26
|
|
atlas_init/tf/main.tf
CHANGED
@@ -5,7 +5,8 @@ locals {
|
|
5
5
|
Owner = "terraform-atlas-init"
|
6
6
|
}
|
7
7
|
use_aws_vpc = var.use_private_link || var.use_vpc_peering || var.use_aws_vpc
|
8
|
-
|
8
|
+
use_aws_kms = var.use_encryption_at_rest
|
9
|
+
use_cloud_provider = var.use_aws_s3 || var.use_encryption_at_rest
|
9
10
|
# https://www.mongodb.com/docs/atlas/reference/amazon-aws/
|
10
11
|
atlas_region = replace(upper(var.aws_region), "-", "_")
|
11
12
|
use_cluster = var.cluster_config.name != ""
|
@@ -133,6 +134,18 @@ module "aws_s3" {
|
|
133
134
|
iam_role_name = module.cloud_provider[0].iam_role_name
|
134
135
|
}
|
135
136
|
|
137
|
+
module "aws_kms" {
|
138
|
+
source = "./modules/aws_kms"
|
139
|
+
count = local.use_aws_kms ? 1 : 0
|
140
|
+
|
141
|
+
access_iam_role_arns = {
|
142
|
+
atlas = module.cloud_provider[0].iam_role_arn
|
143
|
+
}
|
144
|
+
aws_account_id = local.aws_account_id
|
145
|
+
aws_region = var.aws_region
|
146
|
+
key_suffix = var.project_name
|
147
|
+
}
|
148
|
+
|
136
149
|
module "federated_vars" {
|
137
150
|
source = "./modules/federated_vars"
|
138
151
|
count = var.use_federated_vars ? 1 : 0
|
@@ -142,3 +155,14 @@ module "federated_vars" {
|
|
142
155
|
project_id = local.project_id
|
143
156
|
base_url = var.atlas_base_url
|
144
157
|
}
|
158
|
+
|
159
|
+
module "encryption_at_rest" {
|
160
|
+
source = "./modules/encryption_at_rest"
|
161
|
+
count = var.use_encryption_at_rest ? 1 : 0
|
162
|
+
|
163
|
+
project_id = local.project_id
|
164
|
+
atlas_role_id = module.cloud_provider[0].atlas_role_id
|
165
|
+
kms_key_id = module.aws_kms[0].kms_key_id
|
166
|
+
atlas_regions = [local.atlas_region]
|
167
|
+
|
168
|
+
}
|