dbt-platform-helper 13.0.1__py3-none-any.whl → 13.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbt_platform_helper/COMMANDS.md +2 -2
- dbt_platform_helper/commands/config.py +26 -33
- dbt_platform_helper/commands/copilot.py +1 -1
- dbt_platform_helper/commands/environment.py +1 -1
- dbt_platform_helper/commands/generate.py +2 -2
- dbt_platform_helper/commands/pipeline.py +1 -1
- dbt_platform_helper/commands/version.py +30 -30
- dbt_platform_helper/domain/copilot_environment.py +11 -10
- dbt_platform_helper/domain/database_copy.py +1 -1
- dbt_platform_helper/domain/maintenance_page.py +32 -7
- dbt_platform_helper/domain/pipelines.py +17 -1
- dbt_platform_helper/domain/terraform_environment.py +17 -61
- dbt_platform_helper/providers/config.py +12 -2
- dbt_platform_helper/{domain → providers}/config_validator.py +10 -5
- dbt_platform_helper/providers/files.py +13 -12
- dbt_platform_helper/providers/platform_config_schema.py +18 -13
- dbt_platform_helper/providers/semantic_version.py +126 -0
- dbt_platform_helper/providers/terraform_manifest.py +126 -29
- dbt_platform_helper/providers/validation.py +0 -14
- dbt_platform_helper/providers/version.py +36 -0
- dbt_platform_helper/providers/yaml_file.py +5 -3
- dbt_platform_helper/templates/environment-pipelines/main.tf +1 -1
- dbt_platform_helper/utils/application.py +3 -2
- dbt_platform_helper/utils/validation.py +1 -1
- dbt_platform_helper/utils/versioning.py +152 -225
- {dbt_platform_helper-13.0.1.dist-info → dbt_platform_helper-13.1.0.dist-info}/METADATA +1 -1
- {dbt_platform_helper-13.0.1.dist-info → dbt_platform_helper-13.1.0.dist-info}/RECORD +31 -32
- {dbt_platform_helper-13.0.1.dist-info → dbt_platform_helper-13.1.0.dist-info}/WHEEL +1 -1
- platform_helper.py +2 -2
- dbt_platform_helper/domain/test_platform_terraform_manifest_generator.py +0 -100
- dbt_platform_helper/templates/environments/main.tf +0 -46
- dbt_platform_helper/utils/platform_config.py +0 -20
- {dbt_platform_helper-13.0.1.dist-info → dbt_platform_helper-13.1.0.dist-info}/LICENSE +0 -0
- {dbt_platform_helper-13.0.1.dist-info → dbt_platform_helper-13.1.0.dist-info}/entry_points.txt +0 -0
|
@@ -4,7 +4,8 @@ from pathlib import Path
|
|
|
4
4
|
from schema import SchemaError
|
|
5
5
|
|
|
6
6
|
from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
|
|
7
|
-
from dbt_platform_helper.
|
|
7
|
+
from dbt_platform_helper.providers.config_validator import ConfigValidator
|
|
8
|
+
from dbt_platform_helper.providers.config_validator import ConfigValidatorError
|
|
8
9
|
from dbt_platform_helper.providers.io import ClickIOProvider
|
|
9
10
|
from dbt_platform_helper.providers.platform_config_schema import PlatformConfigSchema
|
|
10
11
|
from dbt_platform_helper.providers.yaml_file import FileNotFoundException
|
|
@@ -35,7 +36,10 @@ class ConfigProvider:
|
|
|
35
36
|
# also, we apply defaults but discard that data. Should we just apply
|
|
36
37
|
# defaults to config returned by load_and_validate
|
|
37
38
|
enriched_config = ConfigProvider.apply_environment_defaults(self.config)
|
|
38
|
-
|
|
39
|
+
try:
|
|
40
|
+
self.validator.run_validations(enriched_config)
|
|
41
|
+
except ConfigValidatorError as exc:
|
|
42
|
+
self.io.abort_with_error(f"Config validation has failed.\n{str(exc)}")
|
|
39
43
|
|
|
40
44
|
def load_and_validate_platform_config(self, path=PLATFORM_CONFIG_FILE):
|
|
41
45
|
try:
|
|
@@ -54,6 +58,12 @@ class ConfigProvider:
|
|
|
54
58
|
|
|
55
59
|
return self.config
|
|
56
60
|
|
|
61
|
+
def load_unvalidated_config_file(self, path=PLATFORM_CONFIG_FILE):
|
|
62
|
+
try:
|
|
63
|
+
return self.file_provider.load(path)
|
|
64
|
+
except FileProviderException:
|
|
65
|
+
return {}
|
|
66
|
+
|
|
57
67
|
# TODO this general function should be moved out of ConfigProvider
|
|
58
68
|
def config_file_check(self, path=PLATFORM_CONFIG_FILE):
|
|
59
69
|
if not Path(path).exists():
|
|
@@ -2,11 +2,16 @@ from typing import Callable
|
|
|
2
2
|
|
|
3
3
|
import boto3
|
|
4
4
|
|
|
5
|
+
from dbt_platform_helper.platform_exception import PlatformException
|
|
5
6
|
from dbt_platform_helper.providers.io import ClickIOProvider
|
|
6
7
|
from dbt_platform_helper.providers.opensearch import OpensearchProvider
|
|
7
8
|
from dbt_platform_helper.providers.redis import RedisProvider
|
|
8
9
|
|
|
9
10
|
|
|
11
|
+
class ConfigValidatorError(PlatformException):
|
|
12
|
+
pass
|
|
13
|
+
|
|
14
|
+
|
|
10
15
|
class ConfigValidator:
|
|
11
16
|
|
|
12
17
|
def __init__(
|
|
@@ -110,7 +115,7 @@ class ConfigValidator:
|
|
|
110
115
|
envs = detail["bad_envs"]
|
|
111
116
|
acc = detail["account"]
|
|
112
117
|
message += f" '{pipeline}' - these environments are not in the '{acc}' account: {', '.join(envs)}\n"
|
|
113
|
-
|
|
118
|
+
raise ConfigValidatorError(message)
|
|
114
119
|
|
|
115
120
|
def validate_environment_pipelines_triggers(self, config):
|
|
116
121
|
errors = []
|
|
@@ -134,7 +139,7 @@ class ConfigValidator:
|
|
|
134
139
|
|
|
135
140
|
if errors:
|
|
136
141
|
error_message = "The following pipelines are misconfigured: \n"
|
|
137
|
-
|
|
142
|
+
raise ConfigValidatorError(error_message + "\n ".join(errors))
|
|
138
143
|
|
|
139
144
|
def validate_database_copy_section(self, config):
|
|
140
145
|
extensions = config.get("extensions", {})
|
|
@@ -220,7 +225,7 @@ class ConfigValidator:
|
|
|
220
225
|
)
|
|
221
226
|
|
|
222
227
|
if errors:
|
|
223
|
-
|
|
228
|
+
raise ConfigValidatorError("\n".join(errors))
|
|
224
229
|
|
|
225
230
|
def validate_database_migration_input_sources(self, config: dict):
|
|
226
231
|
extensions = config.get("extensions", {})
|
|
@@ -247,7 +252,7 @@ class ConfigValidator:
|
|
|
247
252
|
)
|
|
248
253
|
if "import" not in data_migration and "import_sources" not in data_migration:
|
|
249
254
|
errors.append(
|
|
250
|
-
f"
|
|
255
|
+
f"'import_sources' property in '{extension_name}.environments.{env}.data_migration' is missing."
|
|
251
256
|
)
|
|
252
257
|
if errors:
|
|
253
|
-
|
|
258
|
+
raise ConfigValidatorError("\n".join(errors))
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
from os import makedirs
|
|
2
1
|
from pathlib import Path
|
|
3
2
|
|
|
4
3
|
|
|
@@ -8,19 +7,21 @@ class FileProvider:
|
|
|
8
7
|
pass
|
|
9
8
|
|
|
10
9
|
@staticmethod
|
|
11
|
-
def mkfile(base_path: str,
|
|
12
|
-
file_path = Path(
|
|
13
|
-
|
|
14
|
-
file_exists = file.exists()
|
|
15
|
-
|
|
16
|
-
if not file_path.parent.exists():
|
|
17
|
-
makedirs(file_path.parent)
|
|
18
|
-
|
|
10
|
+
def mkfile(base_path: str, file_name: str, contents: str, overwrite=False) -> str:
|
|
11
|
+
file_path = Path(base_path).joinpath(file_name)
|
|
12
|
+
file_exists = file_path.exists()
|
|
19
13
|
if file_exists and not overwrite:
|
|
20
14
|
return f"File {file_path} exists; doing nothing"
|
|
21
15
|
|
|
22
|
-
|
|
16
|
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
17
|
+
file_path.write_text(contents)
|
|
23
18
|
|
|
24
|
-
|
|
19
|
+
action = "overwritten" if file_exists and overwrite else "created"
|
|
20
|
+
return f"File {file_name} {action}"
|
|
25
21
|
|
|
26
|
-
|
|
22
|
+
@staticmethod
|
|
23
|
+
def delete_file(base_path: str, file_name: str):
|
|
24
|
+
file_path = Path(base_path) / file_name
|
|
25
|
+
if file_path.exists():
|
|
26
|
+
file_path.unlink()
|
|
27
|
+
return f"{str(file_path)} has been deleted"
|
|
@@ -14,8 +14,8 @@ class PlatformConfigSchema:
|
|
|
14
14
|
def schema() -> Schema:
|
|
15
15
|
return Schema(
|
|
16
16
|
{
|
|
17
|
-
# The following line is for the AWS Copilot version, will be removed under DBTP-1002
|
|
18
17
|
"application": str,
|
|
18
|
+
Optional("deploy_repository"): str,
|
|
19
19
|
Optional("default_versions"): PlatformConfigSchema.__default_versions_schema(),
|
|
20
20
|
Optional("environments"): PlatformConfigSchema.__environments_schema(),
|
|
21
21
|
Optional("codebase_pipelines"): PlatformConfigSchema.__codebase_pipelines_schema(),
|
|
@@ -134,7 +134,7 @@ class PlatformConfigSchema:
|
|
|
134
134
|
Optional("additional_ecr_repository"): str,
|
|
135
135
|
Optional("deploy_repository_branch"): str,
|
|
136
136
|
"services": [{str: [str]}],
|
|
137
|
-
"pipelines": [
|
|
137
|
+
Optional("pipelines"): [
|
|
138
138
|
Or(
|
|
139
139
|
{
|
|
140
140
|
"name": str,
|
|
@@ -435,7 +435,7 @@ class PlatformConfigSchema:
|
|
|
435
435
|
return True
|
|
436
436
|
|
|
437
437
|
@staticmethod
|
|
438
|
-
def
|
|
438
|
+
def __s3_bucket_schema() -> dict:
|
|
439
439
|
def _valid_s3_bucket_arn(key):
|
|
440
440
|
return Regex(
|
|
441
441
|
r"^arn:aws:s3::.*",
|
|
@@ -485,6 +485,10 @@ class PlatformConfigSchema:
|
|
|
485
485
|
|
|
486
486
|
return dict(
|
|
487
487
|
{
|
|
488
|
+
"type": "s3",
|
|
489
|
+
Optional("objects"): [
|
|
490
|
+
{"key": str, Optional("body"): str, Optional("content_type"): str}
|
|
491
|
+
],
|
|
488
492
|
Optional("readonly"): bool,
|
|
489
493
|
Optional("serve_static_content"): bool,
|
|
490
494
|
Optional("serve_static_param_name"): str,
|
|
@@ -518,18 +522,19 @@ class PlatformConfigSchema:
|
|
|
518
522
|
}
|
|
519
523
|
)
|
|
520
524
|
|
|
521
|
-
@staticmethod
|
|
522
|
-
def __s3_bucket_schema() -> dict:
|
|
523
|
-
return PlatformConfigSchema.__valid_s3_base_definition() | {
|
|
524
|
-
"type": "s3",
|
|
525
|
-
Optional("objects"): [
|
|
526
|
-
{"key": str, Optional("body"): str, Optional("content_type"): str}
|
|
527
|
-
],
|
|
528
|
-
}
|
|
529
|
-
|
|
530
525
|
@staticmethod
|
|
531
526
|
def __s3_bucket_policy_schema() -> dict:
|
|
532
|
-
return
|
|
527
|
+
return dict(
|
|
528
|
+
{
|
|
529
|
+
"type": "s3-policy",
|
|
530
|
+
Optional("services"): Or("__all__", [str]),
|
|
531
|
+
Optional("environments"): {
|
|
532
|
+
PlatformConfigSchema.__valid_environment_name(): {
|
|
533
|
+
"bucket_name": PlatformConfigSchema.valid_s3_bucket_name,
|
|
534
|
+
},
|
|
535
|
+
},
|
|
536
|
+
}
|
|
537
|
+
)
|
|
533
538
|
|
|
534
539
|
@staticmethod
|
|
535
540
|
def string_matching_regex(regex_pattern: str) -> Callable:
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from dataclasses import field
|
|
4
|
+
from typing import Dict
|
|
5
|
+
from typing import Optional
|
|
6
|
+
from typing import Union
|
|
7
|
+
|
|
8
|
+
from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
|
|
9
|
+
from dbt_platform_helper.constants import PLATFORM_HELPER_VERSION_FILE
|
|
10
|
+
from dbt_platform_helper.providers.validation import ValidationException
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class IncompatibleMajorVersionException(ValidationException):
|
|
14
|
+
def __init__(self, app_version: str, check_version: str):
|
|
15
|
+
super().__init__()
|
|
16
|
+
self.app_version = app_version
|
|
17
|
+
self.check_version = check_version
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class IncompatibleMinorVersionException(ValidationException):
|
|
21
|
+
def __init__(self, app_version: str, check_version: str):
|
|
22
|
+
super().__init__()
|
|
23
|
+
self.app_version = app_version
|
|
24
|
+
self.check_version = check_version
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class SemanticVersion:
|
|
28
|
+
def __init__(self, major, minor, patch):
|
|
29
|
+
self.major = major
|
|
30
|
+
self.minor = minor
|
|
31
|
+
self.patch = patch
|
|
32
|
+
|
|
33
|
+
def __str__(self) -> str:
|
|
34
|
+
if self.major is None:
|
|
35
|
+
return "unknown"
|
|
36
|
+
return ".".join([str(s) for s in [self.major, self.minor, self.patch]])
|
|
37
|
+
|
|
38
|
+
def __lt__(self, other) -> bool:
|
|
39
|
+
return (self.major, self.minor, self.patch) < (other.major, other.minor, other.patch)
|
|
40
|
+
|
|
41
|
+
def __eq__(self, other) -> bool:
|
|
42
|
+
return (self.major, self.minor, self.patch) == (other.major, other.minor, other.patch)
|
|
43
|
+
|
|
44
|
+
def validate_compatibility_with(self, other):
|
|
45
|
+
if (self.major == 0 and other.major == 0) and (
|
|
46
|
+
self.minor != other.minor or self.patch != other.patch
|
|
47
|
+
):
|
|
48
|
+
raise IncompatibleMajorVersionException(str(self), str(other))
|
|
49
|
+
|
|
50
|
+
if self.major != other.major:
|
|
51
|
+
raise IncompatibleMajorVersionException(str(self), str(other))
|
|
52
|
+
|
|
53
|
+
if self.minor != other.minor:
|
|
54
|
+
raise IncompatibleMinorVersionException(str(self), str(other))
|
|
55
|
+
|
|
56
|
+
@staticmethod
|
|
57
|
+
def from_string(version_string: Union[str, None]):
|
|
58
|
+
if version_string is None:
|
|
59
|
+
return None
|
|
60
|
+
|
|
61
|
+
version_plain = version_string.replace("v", "")
|
|
62
|
+
version_segments = re.split(r"[.\-]", version_plain)
|
|
63
|
+
|
|
64
|
+
if len(version_segments) != 3:
|
|
65
|
+
return None
|
|
66
|
+
|
|
67
|
+
output_version = [0, 0, 0]
|
|
68
|
+
for index, segment in enumerate(version_segments):
|
|
69
|
+
try:
|
|
70
|
+
output_version[index] = int(segment)
|
|
71
|
+
except ValueError:
|
|
72
|
+
output_version[index] = -1
|
|
73
|
+
|
|
74
|
+
return SemanticVersion(output_version[0], output_version[1], output_version[2])
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class VersionStatus:
|
|
78
|
+
def __init__(
|
|
79
|
+
self, local_version: SemanticVersion = None, latest_release: SemanticVersion = None
|
|
80
|
+
):
|
|
81
|
+
self.local = local_version
|
|
82
|
+
self.latest = latest_release
|
|
83
|
+
|
|
84
|
+
def is_outdated(self):
|
|
85
|
+
return self.local != self.latest
|
|
86
|
+
|
|
87
|
+
def warn(self):
|
|
88
|
+
pass
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
@dataclass
|
|
92
|
+
class PlatformHelperVersionStatus(VersionStatus):
|
|
93
|
+
local: Optional[SemanticVersion] = None
|
|
94
|
+
latest: Optional[SemanticVersion] = None
|
|
95
|
+
deprecated_version_file: Optional[SemanticVersion] = None
|
|
96
|
+
platform_config_default: Optional[SemanticVersion] = None
|
|
97
|
+
pipeline_overrides: Optional[Dict[str, str]] = field(default_factory=dict)
|
|
98
|
+
|
|
99
|
+
def warn(self) -> dict:
|
|
100
|
+
if self.platform_config_default and not self.deprecated_version_file:
|
|
101
|
+
return {}
|
|
102
|
+
|
|
103
|
+
warnings = []
|
|
104
|
+
errors = []
|
|
105
|
+
|
|
106
|
+
missing_default_version_message = f"Create a section in the root of '{PLATFORM_CONFIG_FILE}':\n\ndefault_versions:\n platform-helper: "
|
|
107
|
+
deprecation_message = (
|
|
108
|
+
f"Please delete '{PLATFORM_HELPER_VERSION_FILE}' as it is now deprecated."
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
if self.platform_config_default and self.deprecated_version_file:
|
|
112
|
+
warnings.append(deprecation_message)
|
|
113
|
+
|
|
114
|
+
if not self.platform_config_default and self.deprecated_version_file:
|
|
115
|
+
warnings.append(deprecation_message)
|
|
116
|
+
warnings.append(f"{missing_default_version_message}{self.deprecated_version_file}\n")
|
|
117
|
+
|
|
118
|
+
if not self.platform_config_default and not self.deprecated_version_file:
|
|
119
|
+
message = f"Cannot get dbt-platform-helper version from '{PLATFORM_CONFIG_FILE}'.\n"
|
|
120
|
+
message += f"{missing_default_version_message}{self.local}\n"
|
|
121
|
+
errors.append(message)
|
|
122
|
+
|
|
123
|
+
return {
|
|
124
|
+
"warnings": warnings,
|
|
125
|
+
"errors": errors,
|
|
126
|
+
}
|
|
@@ -2,50 +2,74 @@ import json
|
|
|
2
2
|
from datetime import datetime
|
|
3
3
|
from importlib.metadata import version
|
|
4
4
|
from pathlib import Path
|
|
5
|
-
from typing import Callable
|
|
6
|
-
|
|
7
|
-
import click
|
|
8
5
|
|
|
9
6
|
from dbt_platform_helper.constants import SUPPORTED_AWS_PROVIDER_VERSION
|
|
10
7
|
from dbt_platform_helper.constants import SUPPORTED_TERRAFORM_VERSION
|
|
8
|
+
from dbt_platform_helper.providers.config import ConfigProvider
|
|
11
9
|
from dbt_platform_helper.providers.files import FileProvider
|
|
10
|
+
from dbt_platform_helper.providers.io import ClickIOProvider
|
|
12
11
|
|
|
13
12
|
|
|
14
13
|
class TerraformManifestProvider:
|
|
15
14
|
def __init__(
|
|
16
|
-
self, file_provider: FileProvider = FileProvider(),
|
|
15
|
+
self, file_provider: FileProvider = FileProvider(), io: ClickIOProvider = ClickIOProvider()
|
|
17
16
|
):
|
|
18
17
|
self.file_provider = file_provider
|
|
19
|
-
self.
|
|
18
|
+
self.io = io
|
|
20
19
|
|
|
21
20
|
def generate_codebase_pipeline_config(
|
|
22
21
|
self,
|
|
23
22
|
platform_config: dict,
|
|
24
23
|
terraform_platform_modules_version: str,
|
|
25
24
|
ecr_imports: dict[str, str],
|
|
25
|
+
deploy_repository: str,
|
|
26
26
|
):
|
|
27
|
-
default_account = (
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
.get("accounts", {})
|
|
31
|
-
.get("deploy", {})
|
|
32
|
-
.get("name")
|
|
33
|
-
)
|
|
27
|
+
default_account = self._get_account_for_env("*", platform_config)
|
|
28
|
+
state_key_suffix = f"{platform_config['application']}-codebase-pipelines"
|
|
29
|
+
|
|
34
30
|
terraform = {}
|
|
35
31
|
self._add_header(terraform)
|
|
36
|
-
self.
|
|
32
|
+
self._add_codebase_pipeline_locals(terraform)
|
|
37
33
|
self._add_provider(terraform, default_account)
|
|
38
|
-
self._add_backend(terraform, platform_config, default_account)
|
|
39
|
-
self._add_codebase_pipeline_module(
|
|
34
|
+
self._add_backend(terraform, platform_config, default_account, state_key_suffix)
|
|
35
|
+
self._add_codebase_pipeline_module(
|
|
36
|
+
terraform, terraform_platform_modules_version, deploy_repository
|
|
37
|
+
)
|
|
40
38
|
self._add_imports(terraform, ecr_imports)
|
|
39
|
+
self._write_terraform_json(terraform, "terraform/codebase-pipelines")
|
|
41
40
|
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
41
|
+
def generate_environment_config(
|
|
42
|
+
self,
|
|
43
|
+
platform_config: dict,
|
|
44
|
+
env: str,
|
|
45
|
+
terraform_platform_modules_version: str,
|
|
46
|
+
):
|
|
47
|
+
platform_config = ConfigProvider.apply_environment_defaults(platform_config)
|
|
48
|
+
account = self._get_account_for_env(env, platform_config)
|
|
49
|
+
|
|
50
|
+
application_name = platform_config["application"]
|
|
51
|
+
state_key_suffix = f"{platform_config['application']}-{env}"
|
|
52
|
+
env_dir = f"terraform/environments/{env}"
|
|
53
|
+
|
|
54
|
+
terraform = {}
|
|
55
|
+
self._add_header(terraform)
|
|
56
|
+
self._add_environment_locals(terraform, application_name)
|
|
57
|
+
self._add_backend(terraform, platform_config, account, state_key_suffix)
|
|
58
|
+
self._add_extensions_module(terraform, terraform_platform_modules_version, env)
|
|
59
|
+
self._add_moved(terraform, platform_config)
|
|
60
|
+
self._ensure_no_hcl_manifest_file(env_dir)
|
|
61
|
+
self._write_terraform_json(terraform, env_dir)
|
|
62
|
+
|
|
63
|
+
@staticmethod
|
|
64
|
+
def _get_account_for_env(env, platform_config):
|
|
65
|
+
account = (
|
|
66
|
+
platform_config.get("environments", {})
|
|
67
|
+
.get(env, {})
|
|
68
|
+
.get("accounts", {})
|
|
69
|
+
.get("deploy", {})
|
|
70
|
+
.get("name")
|
|
47
71
|
)
|
|
48
|
-
|
|
72
|
+
return account
|
|
49
73
|
|
|
50
74
|
@staticmethod
|
|
51
75
|
def _add_header(terraform: dict):
|
|
@@ -56,7 +80,7 @@ class TerraformManifestProvider:
|
|
|
56
80
|
terraform["//"] = f"{version_header} {warning}"
|
|
57
81
|
|
|
58
82
|
@staticmethod
|
|
59
|
-
def
|
|
83
|
+
def _add_codebase_pipeline_locals(terraform: dict):
|
|
60
84
|
terraform["locals"] = {
|
|
61
85
|
"platform_config": '${yamldecode(file("../../platform-config.yml"))}',
|
|
62
86
|
"application": '${local.platform_config["application"]}',
|
|
@@ -73,17 +97,17 @@ class TerraformManifestProvider:
|
|
|
73
97
|
terraform["provider"]["aws"]["shared_credentials_files"] = ["~/.aws/config"]
|
|
74
98
|
|
|
75
99
|
@staticmethod
|
|
76
|
-
def _add_backend(terraform: dict, platform_config: dict,
|
|
100
|
+
def _add_backend(terraform: dict, platform_config: dict, account: str, state_key_suffix: str):
|
|
77
101
|
terraform["terraform"] = {
|
|
78
102
|
"required_version": SUPPORTED_TERRAFORM_VERSION,
|
|
79
103
|
"backend": {
|
|
80
104
|
"s3": {
|
|
81
|
-
"bucket": f"terraform-platform-state-{
|
|
82
|
-
"key": f"tfstate/application/{
|
|
105
|
+
"bucket": f"terraform-platform-state-{account}",
|
|
106
|
+
"key": f"tfstate/application/{state_key_suffix}.tfstate",
|
|
83
107
|
"region": "eu-west-2",
|
|
84
108
|
"encrypt": True,
|
|
85
|
-
"kms_key_id": f"alias/terraform-platform-state-s3-key-{
|
|
86
|
-
"dynamodb_table": f"terraform-platform-lockdb-{
|
|
109
|
+
"kms_key_id": f"alias/terraform-platform-state-s3-key-{account}",
|
|
110
|
+
"dynamodb_table": f"terraform-platform-lockdb-{account}",
|
|
87
111
|
}
|
|
88
112
|
},
|
|
89
113
|
"required_providers": {
|
|
@@ -92,7 +116,9 @@ class TerraformManifestProvider:
|
|
|
92
116
|
}
|
|
93
117
|
|
|
94
118
|
@staticmethod
|
|
95
|
-
def _add_codebase_pipeline_module(
|
|
119
|
+
def _add_codebase_pipeline_module(
|
|
120
|
+
terraform: dict, terraform_platform_modules_version: str, deploy_repository: str
|
|
121
|
+
):
|
|
96
122
|
source = f"git::https://github.com/uktrade/terraform-platform-modules.git//codebase-pipelines?depth=1&ref={terraform_platform_modules_version}"
|
|
97
123
|
terraform["module"] = {
|
|
98
124
|
"codebase-pipelines": {
|
|
@@ -101,8 +127,9 @@ class TerraformManifestProvider:
|
|
|
101
127
|
"application": "${local.application}",
|
|
102
128
|
"codebase": "${each.key}",
|
|
103
129
|
"repository": "${each.value.repository}",
|
|
130
|
+
"deploy_repository": f"{deploy_repository}",
|
|
104
131
|
"additional_ecr_repository": '${lookup(each.value, "additional_ecr_repository", null)}',
|
|
105
|
-
"pipelines":
|
|
132
|
+
"pipelines": '${lookup(each.value, "pipelines", [])}',
|
|
106
133
|
"services": "${each.value.services}",
|
|
107
134
|
"requires_image_build": '${lookup(each.value, "requires_image_build", true)}',
|
|
108
135
|
"slack_channel": '${lookup(each.value, "slack_channel", "/codebuild/slack_oauth_channel")}',
|
|
@@ -110,6 +137,13 @@ class TerraformManifestProvider:
|
|
|
110
137
|
}
|
|
111
138
|
}
|
|
112
139
|
|
|
140
|
+
@staticmethod
|
|
141
|
+
def _add_extensions_module(terraform: dict, terraform_platform_modules_version: str, env: str):
|
|
142
|
+
source = f"git::https://github.com/uktrade/terraform-platform-modules.git//extensions?depth=1&ref={terraform_platform_modules_version}"
|
|
143
|
+
terraform["module"] = {
|
|
144
|
+
"extensions": {"source": source, "args": "${local.args}", "environment": env}
|
|
145
|
+
}
|
|
146
|
+
|
|
113
147
|
@staticmethod
|
|
114
148
|
def _add_imports(terraform: dict, ecr_imports: dict[str, str]):
|
|
115
149
|
if ecr_imports:
|
|
@@ -118,3 +152,66 @@ class TerraformManifestProvider:
|
|
|
118
152
|
"id": "${each.value}",
|
|
119
153
|
"to": "module.codebase-pipelines[each.key].aws_ecr_repository.this",
|
|
120
154
|
}
|
|
155
|
+
|
|
156
|
+
@staticmethod
|
|
157
|
+
def _add_environment_locals(terraform: dict, app: str):
|
|
158
|
+
terraform["locals"] = {
|
|
159
|
+
"config": '${yamldecode(file("../../../platform-config.yml"))}',
|
|
160
|
+
"environments": '${local.config["environments"]}',
|
|
161
|
+
"env_config": '${{for name, config in local.environments: name => merge(lookup(local.environments, "*", {}), config)}}',
|
|
162
|
+
"args": {
|
|
163
|
+
"application": app,
|
|
164
|
+
"services": '${local.config["extensions"]}',
|
|
165
|
+
"env_config": "${local.env_config}",
|
|
166
|
+
},
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
@staticmethod
|
|
170
|
+
def _add_moved(terraform, platform_config):
|
|
171
|
+
extensions_comment = "Moved extensions-tf to just extensions - this block tells terraform this. Can be removed once all services have moved to the new naming."
|
|
172
|
+
terraform["moved"] = [
|
|
173
|
+
{
|
|
174
|
+
"//": extensions_comment,
|
|
175
|
+
"from": "module.extensions-tf",
|
|
176
|
+
"to": "module.extensions",
|
|
177
|
+
}
|
|
178
|
+
]
|
|
179
|
+
|
|
180
|
+
extensions = platform_config.get("extensions", {})
|
|
181
|
+
s3_extension_names = [
|
|
182
|
+
extension_name
|
|
183
|
+
for extension_name, extension in extensions.items()
|
|
184
|
+
if extension["type"] == "s3"
|
|
185
|
+
]
|
|
186
|
+
s3_comment = "S3 bucket resources are now indexed. Can be removed once all services have moved to terraform-platform-modules 5.x."
|
|
187
|
+
|
|
188
|
+
for name in s3_extension_names:
|
|
189
|
+
resources = [
|
|
190
|
+
"aws_s3_bucket_server_side_encryption_configuration.encryption-config",
|
|
191
|
+
"aws_s3_bucket_policy.bucket-policy",
|
|
192
|
+
"aws_kms_key.kms-key",
|
|
193
|
+
"aws_kms_alias.s3-bucket",
|
|
194
|
+
]
|
|
195
|
+
moves = [f'module.extensions.module.s3["{name}"].{resource}' for resource in resources]
|
|
196
|
+
for move in moves:
|
|
197
|
+
terraform["moved"].append(
|
|
198
|
+
{
|
|
199
|
+
"//": s3_comment,
|
|
200
|
+
"from": move,
|
|
201
|
+
"to": f"{move}[0]",
|
|
202
|
+
}
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
def _write_terraform_json(self, terraform: dict, env_dir: str):
|
|
206
|
+
message = self.file_provider.mkfile(
|
|
207
|
+
str(Path(env_dir).absolute()),
|
|
208
|
+
"main.tf.json",
|
|
209
|
+
json.dumps(terraform, indent=2),
|
|
210
|
+
True,
|
|
211
|
+
)
|
|
212
|
+
self.io.info(message)
|
|
213
|
+
|
|
214
|
+
def _ensure_no_hcl_manifest_file(self, env_dir):
|
|
215
|
+
message = self.file_provider.delete_file(env_dir, "main.tf")
|
|
216
|
+
if message:
|
|
217
|
+
self.io.info(f"Manifest has moved to main.tf.json. {message}")
|
|
@@ -3,17 +3,3 @@ from dbt_platform_helper.platform_exception import PlatformException
|
|
|
3
3
|
|
|
4
4
|
class ValidationException(PlatformException):
|
|
5
5
|
pass
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class IncompatibleMajorVersionException(ValidationException):
|
|
9
|
-
def __init__(self, app_version: str, check_version: str):
|
|
10
|
-
super().__init__()
|
|
11
|
-
self.app_version = app_version
|
|
12
|
-
self.check_version = check_version
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
class IncompatibleMinorVersionException(ValidationException):
|
|
16
|
-
def __init__(self, app_version: str, check_version: str):
|
|
17
|
-
super().__init__()
|
|
18
|
-
self.app_version = app_version
|
|
19
|
-
self.check_version = check_version
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from abc import ABC
|
|
2
|
+
|
|
3
|
+
import requests
|
|
4
|
+
|
|
5
|
+
from dbt_platform_helper.providers.semantic_version import SemanticVersion
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class VersionProvider(ABC):
|
|
9
|
+
pass
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
# TODO add timeouts and exception handling for requests
|
|
13
|
+
# TODO Alternatively use the gitpython package?
|
|
14
|
+
class GithubVersionProvider(VersionProvider):
|
|
15
|
+
@staticmethod
|
|
16
|
+
def get_latest_version(repo_name: str, tags: bool = False) -> SemanticVersion:
|
|
17
|
+
if tags:
|
|
18
|
+
tags_list = requests.get(f"https://api.github.com/repos/{repo_name}/tags").json()
|
|
19
|
+
versions = [SemanticVersion.from_string(v["name"]) for v in tags_list]
|
|
20
|
+
versions.sort(reverse=True)
|
|
21
|
+
return versions[0]
|
|
22
|
+
|
|
23
|
+
package_info = requests.get(
|
|
24
|
+
f"https://api.github.com/repos/{repo_name}/releases/latest"
|
|
25
|
+
).json()
|
|
26
|
+
return SemanticVersion.from_string(package_info["tag_name"])
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class PyPiVersionProvider(VersionProvider):
|
|
30
|
+
@staticmethod
|
|
31
|
+
def get_latest_version(project_name: str) -> SemanticVersion:
|
|
32
|
+
package_info = requests.get(f"https://pypi.org/pypi/{project_name}/json").json()
|
|
33
|
+
released_versions = package_info["releases"].keys()
|
|
34
|
+
parsed_released_versions = [SemanticVersion.from_string(v) for v in released_versions]
|
|
35
|
+
parsed_released_versions.sort(reverse=True)
|
|
36
|
+
return parsed_released_versions[0]
|
|
@@ -19,11 +19,13 @@ class FileNotFoundException(FileProviderException):
|
|
|
19
19
|
|
|
20
20
|
|
|
21
21
|
class InvalidYamlException(YamlFileProviderException):
|
|
22
|
-
|
|
22
|
+
def __init__(self, path: str):
|
|
23
|
+
super().__init__(f"""{path} is not valid YAML.""")
|
|
23
24
|
|
|
24
25
|
|
|
25
26
|
class DuplicateKeysException(YamlFileProviderException):
|
|
26
|
-
|
|
27
|
+
def __init__(self, duplicate_keys: str):
|
|
28
|
+
super().__init__(f"""Duplicate keys found in your config file: {duplicate_keys}.""")
|
|
27
29
|
|
|
28
30
|
|
|
29
31
|
class YamlFileProvider:
|
|
@@ -39,7 +41,7 @@ class YamlFileProvider:
|
|
|
39
41
|
try:
|
|
40
42
|
yaml_content = yaml.safe_load(Path(path).read_text())
|
|
41
43
|
except ParserError:
|
|
42
|
-
raise InvalidYamlException(
|
|
44
|
+
raise InvalidYamlException(path)
|
|
43
45
|
|
|
44
46
|
if not yaml_content:
|
|
45
47
|
return {}
|
|
@@ -40,7 +40,7 @@ module "environment-pipelines" {
|
|
|
40
40
|
|
|
41
41
|
application = "{{ application }}"
|
|
42
42
|
pipeline_name = each.key
|
|
43
|
-
repository = "
|
|
43
|
+
repository = "{{ deploy_repository }}"
|
|
44
44
|
|
|
45
45
|
environments = each.value.environments
|
|
46
46
|
all_pipelines = local.all_pipelines
|
|
@@ -10,11 +10,11 @@ import boto3
|
|
|
10
10
|
|
|
11
11
|
from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
|
|
12
12
|
from dbt_platform_helper.platform_exception import PlatformException
|
|
13
|
+
from dbt_platform_helper.providers.config import ConfigProvider
|
|
13
14
|
from dbt_platform_helper.utils.aws import get_aws_session_or_abort
|
|
14
15
|
from dbt_platform_helper.utils.aws import get_profile_name_from_account_id
|
|
15
16
|
from dbt_platform_helper.utils.aws import get_ssm_secrets
|
|
16
17
|
from dbt_platform_helper.utils.messages import abort_with_error
|
|
17
|
-
from dbt_platform_helper.utils.platform_config import load_unvalidated_config_file
|
|
18
18
|
|
|
19
19
|
|
|
20
20
|
@dataclass
|
|
@@ -125,8 +125,9 @@ def load_application(app=None, default_session=None) -> Application:
|
|
|
125
125
|
|
|
126
126
|
def get_application_name(abort=abort_with_error):
|
|
127
127
|
if Path(PLATFORM_CONFIG_FILE).exists():
|
|
128
|
+
config = ConfigProvider()
|
|
128
129
|
try:
|
|
129
|
-
app_config = load_unvalidated_config_file()
|
|
130
|
+
app_config = config.load_unvalidated_config_file()
|
|
130
131
|
return app_config["application"]
|
|
131
132
|
except KeyError:
|
|
132
133
|
abort(
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from schema import SchemaError
|
|
2
2
|
|
|
3
|
-
from dbt_platform_helper.
|
|
3
|
+
from dbt_platform_helper.providers.config_validator import ConfigValidator
|
|
4
4
|
from dbt_platform_helper.providers.platform_config_schema import PlatformConfigSchema
|
|
5
5
|
|
|
6
6
|
|