dbt-platform-helper 13.4.0__py3-none-any.whl → 14.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbt-platform-helper might be problematic. Click here for more details.
- dbt_platform_helper/COMMANDS.md +26 -57
- dbt_platform_helper/commands/config.py +9 -0
- dbt_platform_helper/commands/environment.py +3 -7
- dbt_platform_helper/commands/notify.py +24 -77
- dbt_platform_helper/commands/pipeline.py +6 -12
- dbt_platform_helper/commands/secrets.py +1 -1
- dbt_platform_helper/constants.py +7 -5
- dbt_platform_helper/domain/codebase.py +0 -5
- dbt_platform_helper/domain/config.py +16 -9
- dbt_platform_helper/domain/copilot_environment.py +3 -3
- dbt_platform_helper/domain/database_copy.py +1 -1
- dbt_platform_helper/domain/maintenance_page.py +42 -38
- dbt_platform_helper/domain/notify.py +64 -0
- dbt_platform_helper/domain/pipelines.py +20 -16
- dbt_platform_helper/domain/terraform_environment.py +18 -11
- dbt_platform_helper/domain/versioning.py +18 -78
- dbt_platform_helper/providers/aws/exceptions.py +1 -1
- dbt_platform_helper/providers/cloudformation.py +1 -1
- dbt_platform_helper/providers/config.py +119 -17
- dbt_platform_helper/providers/config_validator.py +4 -31
- dbt_platform_helper/providers/copilot.py +3 -3
- dbt_platform_helper/providers/io.py +1 -1
- dbt_platform_helper/providers/load_balancers.py +6 -6
- dbt_platform_helper/providers/platform_config_schema.py +24 -29
- dbt_platform_helper/providers/schema_migrations/__init__.py +0 -0
- dbt_platform_helper/providers/schema_migrations/schema_v0_to_v1_migration.py +43 -0
- dbt_platform_helper/providers/schema_migrator.py +77 -0
- dbt_platform_helper/providers/secrets.py +5 -5
- dbt_platform_helper/providers/semantic_version.py +6 -1
- dbt_platform_helper/providers/slack_channel_notifier.py +62 -0
- dbt_platform_helper/providers/terraform_manifest.py +8 -10
- dbt_platform_helper/providers/version.py +1 -18
- dbt_platform_helper/providers/version_status.py +8 -61
- dbt_platform_helper/providers/yaml_file.py +23 -1
- dbt_platform_helper/templates/environment-pipelines/main.tf +1 -1
- dbt_platform_helper/utils/application.py +1 -1
- dbt_platform_helper/utils/aws.py +3 -3
- dbt_platform_helper/utils/git.py +0 -15
- {dbt_platform_helper-13.4.0.dist-info → dbt_platform_helper-14.0.0.dist-info}/METADATA +4 -4
- {dbt_platform_helper-13.4.0.dist-info → dbt_platform_helper-14.0.0.dist-info}/RECORD +44 -41
- platform_helper.py +0 -2
- dbt_platform_helper/commands/version.py +0 -37
- dbt_platform_helper/utils/tool_versioning.py +0 -12
- {dbt_platform_helper-13.4.0.dist-info → dbt_platform_helper-14.0.0.dist-info}/LICENSE +0 -0
- {dbt_platform_helper-13.4.0.dist-info → dbt_platform_helper-14.0.0.dist-info}/WHEEL +0 -0
- {dbt_platform_helper-13.4.0.dist-info → dbt_platform_helper-14.0.0.dist-info}/entry_points.txt +0 -0
|
@@ -1,41 +1,59 @@
|
|
|
1
1
|
from copy import deepcopy
|
|
2
|
+
from datetime import datetime
|
|
2
3
|
from pathlib import Path
|
|
3
4
|
|
|
4
5
|
from schema import SchemaError
|
|
5
6
|
|
|
7
|
+
from dbt_platform_helper.constants import FIRST_UPGRADABLE_PLATFORM_HELPER_MAJOR_VERSION
|
|
6
8
|
from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
|
|
9
|
+
from dbt_platform_helper.constants import PLATFORM_CONFIG_SCHEMA_VERSION
|
|
10
|
+
from dbt_platform_helper.constants import PLATFORM_HELPER_PACKAGE_NAME
|
|
7
11
|
from dbt_platform_helper.providers.config_validator import ConfigValidator
|
|
8
12
|
from dbt_platform_helper.providers.config_validator import ConfigValidatorError
|
|
9
13
|
from dbt_platform_helper.providers.io import ClickIOProvider
|
|
10
14
|
from dbt_platform_helper.providers.platform_config_schema import PlatformConfigSchema
|
|
15
|
+
from dbt_platform_helper.providers.semantic_version import SemanticVersion
|
|
16
|
+
from dbt_platform_helper.providers.version import InstalledVersionProvider
|
|
11
17
|
from dbt_platform_helper.providers.yaml_file import FileNotFoundException
|
|
12
18
|
from dbt_platform_helper.providers.yaml_file import FileProviderException
|
|
13
19
|
from dbt_platform_helper.providers.yaml_file import YamlFileProvider
|
|
14
20
|
|
|
21
|
+
SCHEMA_VERSION_MESSAGE = """Installed version: platform-helper: {installed_platform_helper_version} (schema version: {installed_schema_version})
|
|
22
|
+
'platform-config.yml' version: platform-helper: {config_platform_helper_version} (schema version: {config_schema_version})"""
|
|
23
|
+
PLEASE_UPGRADE_TO_V13_MESSAGE = """Please ensure that you have already upgraded to platform-helper 13, following the instructions in https://platform.readme.trade.gov.uk/reference/upgrading-platform-helper/.
|
|
24
|
+
|
|
25
|
+
Then upgrade platform-helper to version {installed_platform_helper_version} and run 'platform-helper config migrate' to upgrade the configuration to the current schema version."""
|
|
26
|
+
|
|
15
27
|
|
|
16
28
|
class ConfigProvider:
|
|
17
29
|
def __init__(
|
|
18
30
|
self,
|
|
19
|
-
config_validator: ConfigValidator =
|
|
20
|
-
file_provider: YamlFileProvider =
|
|
21
|
-
io: ClickIOProvider =
|
|
31
|
+
config_validator: ConfigValidator = ConfigValidator(),
|
|
32
|
+
file_provider: YamlFileProvider = YamlFileProvider,
|
|
33
|
+
io: ClickIOProvider = ClickIOProvider(),
|
|
34
|
+
schema_version_for_installed_platform_helper: int = PLATFORM_CONFIG_SCHEMA_VERSION,
|
|
35
|
+
installed_version_provider: InstalledVersionProvider = InstalledVersionProvider,
|
|
22
36
|
):
|
|
23
37
|
self.config = {}
|
|
24
|
-
self.validator = config_validator
|
|
25
|
-
self.io = io
|
|
26
|
-
self.file_provider = file_provider
|
|
27
|
-
|
|
28
|
-
|
|
38
|
+
self.validator = config_validator
|
|
39
|
+
self.io = io
|
|
40
|
+
self.file_provider = file_provider
|
|
41
|
+
self.schema_version_for_installed_platform_helper = (
|
|
42
|
+
schema_version_for_installed_platform_helper
|
|
43
|
+
)
|
|
44
|
+
self.installed_version_provider = installed_version_provider
|
|
45
|
+
|
|
46
|
+
# TODO: DBTP-1964: refactor so that apply_environment_defaults isn't set, discarded and set again
|
|
29
47
|
def get_enriched_config(self):
|
|
30
48
|
return self.apply_environment_defaults(self.load_and_validate_platform_config())
|
|
31
49
|
|
|
32
50
|
def _validate_platform_config(self):
|
|
33
51
|
PlatformConfigSchema.schema().validate(self.config)
|
|
34
|
-
|
|
35
|
-
# TODO= logically this isn't validation but loading + parsing, to move.
|
|
52
|
+
# TODO: DBTP-1964: = logically this isn't validation but loading + parsing, to move.
|
|
36
53
|
# also, we apply defaults but discard that data. Should we just apply
|
|
37
54
|
# defaults to config returned by load_and_validate
|
|
38
55
|
enriched_config = ConfigProvider.apply_environment_defaults(self.config)
|
|
56
|
+
|
|
39
57
|
try:
|
|
40
58
|
self.validator.run_validations(enriched_config)
|
|
41
59
|
except ConfigValidatorError as exc:
|
|
@@ -51,6 +69,8 @@ class ConfigProvider:
|
|
|
51
69
|
except FileProviderException as e:
|
|
52
70
|
self.io.abort_with_error(f"Error loading configuration from {path}: {e}")
|
|
53
71
|
|
|
72
|
+
self._validate_schema_version()
|
|
73
|
+
|
|
54
74
|
try:
|
|
55
75
|
self._validate_platform_config()
|
|
56
76
|
except SchemaError as e:
|
|
@@ -58,13 +78,90 @@ class ConfigProvider:
|
|
|
58
78
|
|
|
59
79
|
return self.config
|
|
60
80
|
|
|
81
|
+
def _abort_due_to_schema_version_error(self, config_description: str, action_required: str):
|
|
82
|
+
self.io.abort_with_error(
|
|
83
|
+
"\n".join(
|
|
84
|
+
[
|
|
85
|
+
config_description,
|
|
86
|
+
"",
|
|
87
|
+
action_required,
|
|
88
|
+
]
|
|
89
|
+
)
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
def _validate_schema_version(self):
|
|
93
|
+
config_schema_version = self.config.get("schema_version")
|
|
94
|
+
config_platform_helper_version = self.config.get("default_versions", {}).get(
|
|
95
|
+
"platform-helper", ""
|
|
96
|
+
)
|
|
97
|
+
header = SCHEMA_VERSION_MESSAGE.format(
|
|
98
|
+
installed_platform_helper_version=self._installed_platform_helper_version(),
|
|
99
|
+
installed_schema_version=self.schema_version_for_installed_platform_helper,
|
|
100
|
+
config_platform_helper_version=(
|
|
101
|
+
config_platform_helper_version if config_platform_helper_version else "N/A"
|
|
102
|
+
),
|
|
103
|
+
config_schema_version=(config_schema_version if config_schema_version else "N/A"),
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
if config_schema_version:
|
|
107
|
+
self._handle_schema_version_mismatch(config_schema_version, header)
|
|
108
|
+
else:
|
|
109
|
+
self._handle_missing_schema_version(config_platform_helper_version, header)
|
|
110
|
+
|
|
111
|
+
def _handle_schema_version_mismatch(self, platform_config_schema_version: int, header: str):
|
|
112
|
+
platform_config_schema_version_is_old = (
|
|
113
|
+
platform_config_schema_version < self.schema_version_for_installed_platform_helper
|
|
114
|
+
)
|
|
115
|
+
installed_platform_helper_is_old = (
|
|
116
|
+
platform_config_schema_version > self.schema_version_for_installed_platform_helper
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
if platform_config_schema_version_is_old:
|
|
120
|
+
self._abort_due_to_schema_version_error(
|
|
121
|
+
header,
|
|
122
|
+
"Please upgrade your platform-config.yml by running 'platform-helper config migrate'.",
|
|
123
|
+
)
|
|
124
|
+
elif installed_platform_helper_is_old:
|
|
125
|
+
self._abort_due_to_schema_version_error(
|
|
126
|
+
header,
|
|
127
|
+
f"Please update your platform-helper to a version that supports schema_version: {platform_config_schema_version}.",
|
|
128
|
+
)
|
|
129
|
+
# else the schema_version is the correct one so continue.
|
|
130
|
+
|
|
131
|
+
def _handle_missing_schema_version(self, config_platform_helper_version: str, header: str):
|
|
132
|
+
config_p_h_version_semver = SemanticVersion.from_string(config_platform_helper_version)
|
|
133
|
+
major_version = config_p_h_version_semver and config_p_h_version_semver.major
|
|
134
|
+
platform_config_is_old_but_supported_by_migrations = (
|
|
135
|
+
major_version and major_version == FIRST_UPGRADABLE_PLATFORM_HELPER_MAJOR_VERSION
|
|
136
|
+
)
|
|
137
|
+
platform_config_is_old = (
|
|
138
|
+
major_version and major_version < FIRST_UPGRADABLE_PLATFORM_HELPER_MAJOR_VERSION
|
|
139
|
+
)
|
|
140
|
+
platform_config_is_really_old = not major_version
|
|
141
|
+
installed_platform_helper_version = self._installed_platform_helper_version()
|
|
142
|
+
|
|
143
|
+
if platform_config_is_old_but_supported_by_migrations:
|
|
144
|
+
self._abort_due_to_schema_version_error(
|
|
145
|
+
header,
|
|
146
|
+
f"Please upgrade your platform-config.yml to be compatible with {installed_platform_helper_version} by running: 'platform-helper config migrate'.",
|
|
147
|
+
)
|
|
148
|
+
elif platform_config_is_old or platform_config_is_really_old:
|
|
149
|
+
self._abort_due_to_schema_version_error(
|
|
150
|
+
header,
|
|
151
|
+
PLEASE_UPGRADE_TO_V13_MESSAGE.format(
|
|
152
|
+
installed_platform_helper_version=installed_platform_helper_version,
|
|
153
|
+
),
|
|
154
|
+
)
|
|
155
|
+
# if major_version and major_version > FIRST_UPGRADABLE_PLATFORM_HELPER_MAJOR_VERSION then
|
|
156
|
+
# the platform-config.yml is malformed and so should progress to validation if appropriate.
|
|
157
|
+
|
|
61
158
|
def load_unvalidated_config_file(self, path=PLATFORM_CONFIG_FILE):
|
|
62
159
|
try:
|
|
63
160
|
return self.file_provider.load(path)
|
|
64
161
|
except FileProviderException:
|
|
65
162
|
return {}
|
|
66
163
|
|
|
67
|
-
# TODO remove function and push logic to where this is called.
|
|
164
|
+
# TODO: DBTP-1888: remove function and push logic to where this is called.
|
|
68
165
|
# removed usage from config domain, code is very generic and doesn't require the overhead of a function
|
|
69
166
|
def config_file_check(self, path=PLATFORM_CONFIG_FILE):
|
|
70
167
|
if not Path(path).exists():
|
|
@@ -86,17 +183,12 @@ class ConfigProvider:
|
|
|
86
183
|
name: data if data else {} for name, data in environments.items() if name != "*"
|
|
87
184
|
}
|
|
88
185
|
|
|
89
|
-
|
|
186
|
+
config.get("default_versions", {})
|
|
90
187
|
|
|
91
188
|
def combine_env_data(data):
|
|
92
189
|
return {
|
|
93
190
|
**env_defaults,
|
|
94
191
|
**data,
|
|
95
|
-
"versions": {
|
|
96
|
-
**default_versions,
|
|
97
|
-
**env_defaults.get("versions", {}),
|
|
98
|
-
**data.get("versions", {}),
|
|
99
|
-
},
|
|
100
192
|
}
|
|
101
193
|
|
|
102
194
|
defaulted_envs = {
|
|
@@ -107,3 +199,13 @@ class ConfigProvider:
|
|
|
107
199
|
enriched_config["environments"] = defaulted_envs
|
|
108
200
|
|
|
109
201
|
return enriched_config
|
|
202
|
+
|
|
203
|
+
def write_platform_config(self, new_platform_config):
|
|
204
|
+
current_date = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
205
|
+
message = f"# Generated by platform-helper {self._installed_platform_helper_version()} / {current_date}.\n\n"
|
|
206
|
+
self.file_provider.write(PLATFORM_CONFIG_FILE, new_platform_config, message)
|
|
207
|
+
|
|
208
|
+
def _installed_platform_helper_version(self) -> str:
|
|
209
|
+
return str(
|
|
210
|
+
self.installed_version_provider.get_semantic_version(PLATFORM_HELPER_PACKAGE_NAME)
|
|
211
|
+
)
|
|
@@ -89,16 +89,16 @@ class ConfigValidator:
|
|
|
89
89
|
return self._validate_extension_supported_versions(
|
|
90
90
|
config=config,
|
|
91
91
|
aws_provider=Redis(self._get_client("elasticache")),
|
|
92
|
-
extension_type="redis", # TODO this is information which can live in the RedisProvider
|
|
93
|
-
version_key="engine", # TODO this is information which can live in the RedisProvider
|
|
92
|
+
extension_type="redis", # TODO: DBTP-1888: this is information which can live in the RedisProvider
|
|
93
|
+
version_key="engine", # TODO: DBTP-1888: this is information which can live in the RedisProvider
|
|
94
94
|
)
|
|
95
95
|
|
|
96
96
|
def validate_supported_opensearch_versions(self, config):
|
|
97
97
|
return self._validate_extension_supported_versions(
|
|
98
98
|
config=config,
|
|
99
99
|
aws_provider=Opensearch(self._get_client("opensearch")),
|
|
100
|
-
extension_type="opensearch", # TODO this is information which can live in the OpensearchProvider
|
|
101
|
-
version_key="engine", # TODO this is information which can live in the OpensearchProvider
|
|
100
|
+
extension_type="opensearch", # TODO: DBTP-1888: this is information which can live in the OpensearchProvider
|
|
101
|
+
version_key="engine", # TODO: DBTP-1888: this is information which can live in the OpensearchProvider
|
|
102
102
|
)
|
|
103
103
|
|
|
104
104
|
def validate_environment_pipelines(self, config):
|
|
@@ -180,21 +180,6 @@ class ConfigValidator:
|
|
|
180
180
|
from_env = section["from"]
|
|
181
181
|
to_env = section["to"]
|
|
182
182
|
|
|
183
|
-
from_account = (
|
|
184
|
-
config.get("environments", {})
|
|
185
|
-
.get(from_env, {})
|
|
186
|
-
.get("accounts", {})
|
|
187
|
-
.get("deploy", {})
|
|
188
|
-
.get("id")
|
|
189
|
-
)
|
|
190
|
-
to_account = (
|
|
191
|
-
config.get("environments", {})
|
|
192
|
-
.get(to_env, {})
|
|
193
|
-
.get("accounts", {})
|
|
194
|
-
.get("deploy", {})
|
|
195
|
-
.get("id")
|
|
196
|
-
)
|
|
197
|
-
|
|
198
183
|
if from_env == to_env:
|
|
199
184
|
errors.append(
|
|
200
185
|
f"database_copy 'to' and 'from' cannot be the same environment in extension '{extension_name}'."
|
|
@@ -215,18 +200,6 @@ class ConfigValidator:
|
|
|
215
200
|
f"database_copy 'to' parameter must be a valid environment ({all_envs_string}) but was '{to_env}' in extension '{extension_name}'."
|
|
216
201
|
)
|
|
217
202
|
|
|
218
|
-
# TODO - The from_account and to_account properties are deprecated and will be removed when terraform-platform-modules is merged with platform-tools
|
|
219
|
-
if from_account != to_account:
|
|
220
|
-
if "from_account" in section and section["from_account"] != from_account:
|
|
221
|
-
errors.append(
|
|
222
|
-
f"Incorrect value for 'from_account' for environment '{from_env}'"
|
|
223
|
-
)
|
|
224
|
-
|
|
225
|
-
if "to_account" in section and section["to_account"] != to_account:
|
|
226
|
-
errors.append(
|
|
227
|
-
f"Incorrect value for 'to_account' for environment '{to_env}'"
|
|
228
|
-
)
|
|
229
|
-
|
|
230
203
|
if errors:
|
|
231
204
|
raise ConfigValidatorError("\n".join(errors))
|
|
232
205
|
|
|
@@ -53,7 +53,7 @@ def create_addon_client_task(
|
|
|
53
53
|
# We cannot check for botocore.errorfactory.NoSuchEntityException as botocore generates that class on the fly as part of errorfactory.
|
|
54
54
|
# factory. Checking the error code is the recommended way of handling these exceptions.
|
|
55
55
|
if ex.response.get("Error", {}).get("Code", None) != "NoSuchEntity":
|
|
56
|
-
# TODO When we are refactoring this, raise an exception to be caught at the command layer
|
|
56
|
+
# TODO: DBTP-1946: When we are refactoring this, raise an exception to be caught at the command layer
|
|
57
57
|
abort_with_error(
|
|
58
58
|
f"cannot obtain Role {role_name}: {ex.response.get('Error', {}).get('Message', '')}"
|
|
59
59
|
)
|
|
@@ -132,7 +132,7 @@ def connect_to_addon_client_task(
|
|
|
132
132
|
tries = 0
|
|
133
133
|
while tries < 15 and not running:
|
|
134
134
|
tries += 1
|
|
135
|
-
#
|
|
135
|
+
# TODO: DBTP-1946: Use from ECS provider when we refactor this
|
|
136
136
|
if get_ecs_task_arns(ecs_client, cluster_arn, task_name):
|
|
137
137
|
subprocess.call(
|
|
138
138
|
"copilot task exec "
|
|
@@ -154,7 +154,7 @@ def _normalise_secret_name(addon_name: str) -> str:
|
|
|
154
154
|
|
|
155
155
|
|
|
156
156
|
def _get_secrets_provider(application: Application, env: str) -> Secrets:
|
|
157
|
-
#
|
|
157
|
+
# TODO: DBTP-1946: We instantiate the secrets provider here to avoid rabbit holing, but something better probably possible when we are refactoring this area
|
|
158
158
|
return Secrets(
|
|
159
159
|
application.environments[env].session.client("ssm"),
|
|
160
160
|
application.environments[env].session.client("secretsmanager"),
|
|
@@ -29,7 +29,7 @@ class ClickIOProvider:
|
|
|
29
29
|
click.secho(f"Error: {message}", err=True, fg="red")
|
|
30
30
|
exit(1)
|
|
31
31
|
|
|
32
|
-
# TODO messages will be a ValidationMessages class rather than a free-rein dictionary
|
|
32
|
+
# TODO: DBTP-1979: messages will be a ValidationMessages class rather than a free-rein dictionary
|
|
33
33
|
def process_messages(self, messages: dict):
|
|
34
34
|
if not messages:
|
|
35
35
|
return
|
|
@@ -48,7 +48,7 @@ class LoadBalancerProvider:
|
|
|
48
48
|
ResourceTypeFilters=[
|
|
49
49
|
"elasticloadbalancing:targetgroup",
|
|
50
50
|
],
|
|
51
|
-
) # TODO should be paginated
|
|
51
|
+
) # TODO: DBTP-1942: should be paginated
|
|
52
52
|
for resource in response["ResourceTagMappingList"]:
|
|
53
53
|
tags = {tag["Key"]: tag["Value"] for tag in resource["Tags"]}
|
|
54
54
|
|
|
@@ -73,7 +73,7 @@ class LoadBalancerProvider:
|
|
|
73
73
|
listener_arn = self.get_https_listener_for_application(app, env)
|
|
74
74
|
certificates = self.evlb_client.describe_listener_certificates(ListenerArn=listener_arn)[
|
|
75
75
|
"Certificates"
|
|
76
|
-
] # TODO should be paginated
|
|
76
|
+
] # TODO: DBTP-1942: should be paginated
|
|
77
77
|
|
|
78
78
|
try:
|
|
79
79
|
certificate_arn = next(c["CertificateArn"] for c in certificates if c["IsDefault"])
|
|
@@ -87,7 +87,7 @@ class LoadBalancerProvider:
|
|
|
87
87
|
|
|
88
88
|
listeners = self.evlb_client.describe_listeners(LoadBalancerArn=load_balancer_arn)[
|
|
89
89
|
"Listeners"
|
|
90
|
-
] # TODO should be paginated
|
|
90
|
+
] # TODO: DBTP-1942: should be paginated
|
|
91
91
|
|
|
92
92
|
listener_arn = None
|
|
93
93
|
|
|
@@ -114,7 +114,7 @@ class LoadBalancerProvider:
|
|
|
114
114
|
|
|
115
115
|
for lb in tag_descriptions:
|
|
116
116
|
tags = {t["Key"]: t["Value"] for t in lb["Tags"]}
|
|
117
|
-
# TODO copilot hangover, creates coupling to specific tags could update to check application and environment
|
|
117
|
+
# TODO: DBTP-1967: copilot hangover, creates coupling to specific tags could update to check application and environment
|
|
118
118
|
if tags.get("copilot-application") == app and tags.get("copilot-environment") == env:
|
|
119
119
|
return lb["ResourceArn"]
|
|
120
120
|
|
|
@@ -123,7 +123,7 @@ class LoadBalancerProvider:
|
|
|
123
123
|
def get_host_header_conditions(self, listener_arn: str, target_group_arn: str) -> list:
|
|
124
124
|
rules = self.evlb_client.describe_rules(ListenerArn=listener_arn)[
|
|
125
125
|
"Rules"
|
|
126
|
-
] # TODO should be paginated
|
|
126
|
+
] # TODO: DBTP-1942: should be paginated
|
|
127
127
|
|
|
128
128
|
conditions = []
|
|
129
129
|
|
|
@@ -152,7 +152,7 @@ class LoadBalancerProvider:
|
|
|
152
152
|
def get_rules_tag_descriptions_by_listener_arn(self, listener_arn: str) -> list:
|
|
153
153
|
rules = self.evlb_client.describe_rules(ListenerArn=listener_arn)[
|
|
154
154
|
"Rules"
|
|
155
|
-
] # TODO should be paginated
|
|
155
|
+
] # TODO: DBTP-1942: should be paginated
|
|
156
156
|
return self.get_rules_tag_descriptions(rules)
|
|
157
157
|
|
|
158
158
|
def get_rules_tag_descriptions(self, rules: list) -> list:
|
|
@@ -8,15 +8,18 @@ from schema import Regex
|
|
|
8
8
|
from schema import Schema
|
|
9
9
|
from schema import SchemaError
|
|
10
10
|
|
|
11
|
+
from dbt_platform_helper.constants import PLATFORM_CONFIG_SCHEMA_VERSION
|
|
12
|
+
|
|
11
13
|
|
|
12
14
|
class PlatformConfigSchema:
|
|
13
15
|
@staticmethod
|
|
14
16
|
def schema() -> Schema:
|
|
15
17
|
return Schema(
|
|
16
18
|
{
|
|
19
|
+
"schema_version": PLATFORM_CONFIG_SCHEMA_VERSION,
|
|
17
20
|
"application": str,
|
|
18
21
|
Optional("deploy_repository"): str,
|
|
19
|
-
|
|
22
|
+
"default_versions": PlatformConfigSchema.__default_versions_schema(),
|
|
20
23
|
Optional("environments"): PlatformConfigSchema.__environments_schema(),
|
|
21
24
|
Optional("codebase_pipelines"): PlatformConfigSchema.__codebase_pipelines_schema(),
|
|
22
25
|
Optional(
|
|
@@ -55,7 +58,7 @@ class PlatformConfigSchema:
|
|
|
55
58
|
"subscription-filter": PlatformConfigSchema.__no_configuration_required_schema(
|
|
56
59
|
"subscription-filter"
|
|
57
60
|
),
|
|
58
|
-
#
|
|
61
|
+
# TODO: DBTP-1943: The next three are no longer relevant. Remove them.
|
|
59
62
|
"monitoring": Schema(PlatformConfigSchema.__monitoring_schema()),
|
|
60
63
|
"vpc": PlatformConfigSchema.__no_configuration_required_schema("vpc"),
|
|
61
64
|
"xray": PlatformConfigSchema.__no_configuration_required_schema("xray"),
|
|
@@ -166,16 +169,11 @@ class PlatformConfigSchema:
|
|
|
166
169
|
@staticmethod
|
|
167
170
|
def __default_versions_schema() -> dict:
|
|
168
171
|
return {
|
|
169
|
-
|
|
170
|
-
Optional("platform-helper"): str,
|
|
172
|
+
"platform-helper": str,
|
|
171
173
|
}
|
|
172
174
|
|
|
173
175
|
@staticmethod
|
|
174
176
|
def __environments_schema() -> dict:
|
|
175
|
-
_valid_environment_specific_version_overrides = {
|
|
176
|
-
Optional("terraform-platform-modules"): str,
|
|
177
|
-
}
|
|
178
|
-
|
|
179
177
|
return {
|
|
180
178
|
str: Or(
|
|
181
179
|
None,
|
|
@@ -190,9 +188,8 @@ class PlatformConfigSchema:
|
|
|
190
188
|
"id": str,
|
|
191
189
|
},
|
|
192
190
|
},
|
|
193
|
-
#
|
|
191
|
+
# TODO: DBTP-1943: requires_approval is no longer relevant since we don't have AWS Copilot manage environment pipelines
|
|
194
192
|
Optional("requires_approval"): bool,
|
|
195
|
-
Optional("versions"): _valid_environment_specific_version_overrides,
|
|
196
193
|
Optional("vpc"): str,
|
|
197
194
|
},
|
|
198
195
|
)
|
|
@@ -250,7 +247,7 @@ class PlatformConfigSchema:
|
|
|
250
247
|
|
|
251
248
|
@staticmethod
|
|
252
249
|
def __opensearch_schema() -> dict:
|
|
253
|
-
#
|
|
250
|
+
# TODO: DBTP-1943: Move to OpenSearch provider?
|
|
254
251
|
_valid_opensearch_plans = Or(
|
|
255
252
|
"tiny",
|
|
256
253
|
"small",
|
|
@@ -288,7 +285,7 @@ class PlatformConfigSchema:
|
|
|
288
285
|
|
|
289
286
|
@staticmethod
|
|
290
287
|
def __postgres_schema() -> dict:
|
|
291
|
-
#
|
|
288
|
+
# TODO: DBTP-1943: Move to Postgres provider?
|
|
292
289
|
_valid_postgres_plans = Or(
|
|
293
290
|
"tiny",
|
|
294
291
|
"small",
|
|
@@ -311,14 +308,12 @@ class PlatformConfigSchema:
|
|
|
311
308
|
"4x-large-high-io",
|
|
312
309
|
)
|
|
313
310
|
|
|
314
|
-
#
|
|
311
|
+
# TODO: DBTP-1943: Move to Postgres provider?
|
|
315
312
|
_valid_postgres_storage_types = Or("gp2", "gp3", "io1", "io2")
|
|
316
313
|
|
|
317
314
|
_valid_postgres_database_copy = {
|
|
318
315
|
"from": PlatformConfigSchema.__valid_environment_name(),
|
|
319
316
|
"to": PlatformConfigSchema.__valid_environment_name(),
|
|
320
|
-
Optional("from_account"): str,
|
|
321
|
-
Optional("to_account"): str,
|
|
322
317
|
Optional("pipeline"): {Optional("schedule"): str},
|
|
323
318
|
}
|
|
324
319
|
|
|
@@ -366,7 +361,7 @@ class PlatformConfigSchema:
|
|
|
366
361
|
|
|
367
362
|
@staticmethod
|
|
368
363
|
def __redis_schema() -> dict:
|
|
369
|
-
#
|
|
364
|
+
# TODO: DBTP-1943: move to Redis provider?
|
|
370
365
|
_valid_redis_plans = Or(
|
|
371
366
|
"micro",
|
|
372
367
|
"micro-ha",
|
|
@@ -400,7 +395,7 @@ class PlatformConfigSchema:
|
|
|
400
395
|
|
|
401
396
|
@staticmethod
|
|
402
397
|
def valid_s3_bucket_name(name: str):
|
|
403
|
-
#
|
|
398
|
+
# TODO: DBTP-1943: This is a public method becasue that's what the test expect. Perhaps it belongs in an S3 provider?
|
|
404
399
|
errors = []
|
|
405
400
|
if not (2 < len(name) < 64):
|
|
406
401
|
errors.append("Length must be between 3 and 63 characters inclusive.")
|
|
@@ -429,9 +424,9 @@ class PlatformConfigSchema:
|
|
|
429
424
|
errors.append(f"Names cannot be suffixed '{suffix}'.")
|
|
430
425
|
|
|
431
426
|
if errors:
|
|
432
|
-
#
|
|
427
|
+
# TODO: DBTP-1943: Raise suitable PlatformException?
|
|
433
428
|
raise SchemaError(
|
|
434
|
-
"Bucket name '{}' is invalid:\n
|
|
429
|
+
f"Bucket name '{name}' is invalid:\n" + "\n".join(f" {e}" for e in errors)
|
|
435
430
|
)
|
|
436
431
|
|
|
437
432
|
return True
|
|
@@ -557,10 +552,10 @@ class PlatformConfigSchema:
|
|
|
557
552
|
|
|
558
553
|
@staticmethod
|
|
559
554
|
def string_matching_regex(regex_pattern: str) -> Callable:
|
|
560
|
-
#
|
|
555
|
+
# TODO: DBTP-1943: public for the unit tests, not sure about testing what could be a private method. Perhaps it's covered by other tests anyway?
|
|
561
556
|
def validate(string):
|
|
562
557
|
if not re.match(regex_pattern, string):
|
|
563
|
-
#
|
|
558
|
+
# TODO: DBTP-1943: Raise suitable PlatformException?
|
|
564
559
|
raise SchemaError(
|
|
565
560
|
f"String '{string}' does not match the required pattern '{regex_pattern}'."
|
|
566
561
|
)
|
|
@@ -570,11 +565,11 @@ class PlatformConfigSchema:
|
|
|
570
565
|
|
|
571
566
|
@staticmethod
|
|
572
567
|
def is_integer_between(lower_limit, upper_limit) -> Callable:
|
|
573
|
-
#
|
|
568
|
+
# TODO: DBTP-1943: public for the unit tests, not sure about testing what could be a private method. Perhaps it's covered by other tests anyway?
|
|
574
569
|
def validate(value):
|
|
575
570
|
if isinstance(value, int) and lower_limit <= value <= upper_limit:
|
|
576
571
|
return True
|
|
577
|
-
#
|
|
572
|
+
# TODO: DBTP-1943: Raise suitable PlatformException?
|
|
578
573
|
raise SchemaError(f"should be an integer between {lower_limit} and {upper_limit}")
|
|
579
574
|
|
|
580
575
|
return validate
|
|
@@ -588,7 +583,7 @@ class PlatformConfigSchema:
|
|
|
588
583
|
|
|
589
584
|
@staticmethod
|
|
590
585
|
def __valid_branch_name() -> Callable:
|
|
591
|
-
#
|
|
586
|
+
# TODO: DBTP-1943: Make this actually validate a git branch name properly; https://git-scm.com/docs/git-check-ref-format
|
|
592
587
|
return PlatformConfigSchema.string_matching_regex(r"^((?!\*).)*(\*)?$")
|
|
593
588
|
|
|
594
589
|
@staticmethod
|
|
@@ -634,10 +629,10 @@ class PlatformConfigSchema:
|
|
|
634
629
|
|
|
635
630
|
|
|
636
631
|
class ConditionalOpensSearchSchema(Schema):
|
|
637
|
-
#
|
|
632
|
+
# TODO: DBTP-1943: Move to OpenSearch provider?
|
|
638
633
|
_valid_opensearch_min_volume_size: int = 10
|
|
639
634
|
|
|
640
|
-
#
|
|
635
|
+
# TODO: DBTP-1943: Move to OpenSearch provider?
|
|
641
636
|
_valid_opensearch_max_volume_size: dict = {
|
|
642
637
|
"tiny": 100,
|
|
643
638
|
"small": 200,
|
|
@@ -671,11 +666,11 @@ class ConditionalOpensSearchSchema(Schema):
|
|
|
671
666
|
|
|
672
667
|
if volume_size:
|
|
673
668
|
if not plan:
|
|
674
|
-
#
|
|
669
|
+
# TODO: DBTP-1943: Raise suitable PlatformException?
|
|
675
670
|
raise SchemaError(f"Missing key: 'plan'")
|
|
676
671
|
|
|
677
672
|
if volume_size < self._valid_opensearch_min_volume_size:
|
|
678
|
-
#
|
|
673
|
+
# TODO: DBTP-1943: Raise suitable PlatformException?
|
|
679
674
|
raise SchemaError(
|
|
680
675
|
f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer greater than {self._valid_opensearch_min_volume_size}"
|
|
681
676
|
)
|
|
@@ -685,7 +680,7 @@ class ConditionalOpensSearchSchema(Schema):
|
|
|
685
680
|
plan == key
|
|
686
681
|
and not volume_size <= self._valid_opensearch_max_volume_size[key]
|
|
687
682
|
):
|
|
688
|
-
#
|
|
683
|
+
# TODO: DBTP-1943: Raise suitable PlatformException?
|
|
689
684
|
raise SchemaError(
|
|
690
685
|
f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer between {self._valid_opensearch_min_volume_size} and {self._valid_opensearch_max_volume_size[key]} for plan {plan}"
|
|
691
686
|
)
|
|
File without changes
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
from copy import deepcopy
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class SchemaV0ToV1Migration:
|
|
5
|
+
def from_version(self) -> int:
|
|
6
|
+
return 0
|
|
7
|
+
|
|
8
|
+
def migrate(self, platform_config: dict) -> dict:
|
|
9
|
+
migrated_config = deepcopy(platform_config)
|
|
10
|
+
|
|
11
|
+
self._remove_terraform_platform_modules_default_version(migrated_config)
|
|
12
|
+
self._remove_versions_from_env_config(migrated_config)
|
|
13
|
+
self._remove_to_account_and_from_account_from_database_copy(migrated_config)
|
|
14
|
+
self._remove_pipeline_platform_helper_override(migrated_config)
|
|
15
|
+
|
|
16
|
+
return migrated_config
|
|
17
|
+
|
|
18
|
+
def _remove_versions_from_env_config(self, migrated_config: dict) -> None:
|
|
19
|
+
for env_name, env in migrated_config.get("environments", {}).items():
|
|
20
|
+
if env and "versions" in env:
|
|
21
|
+
del env["versions"]
|
|
22
|
+
|
|
23
|
+
def _remove_terraform_platform_modules_default_version(self, migrated_config: dict) -> None:
|
|
24
|
+
if "default_versions" in migrated_config:
|
|
25
|
+
default_versions = migrated_config["default_versions"]
|
|
26
|
+
if "terraform-platform-modules" in default_versions:
|
|
27
|
+
del default_versions["terraform-platform-modules"]
|
|
28
|
+
|
|
29
|
+
def _remove_to_account_and_from_account_from_database_copy(self, migrated_config: dict) -> None:
|
|
30
|
+
for extension_name, extension in migrated_config.get("extensions", {}).items():
|
|
31
|
+
if extension.get("type") == "postgres" and "database_copy" in extension:
|
|
32
|
+
for database_copy_block in extension["database_copy"]:
|
|
33
|
+
if "from_account" in database_copy_block:
|
|
34
|
+
del database_copy_block["from_account"]
|
|
35
|
+
if "to_account" in database_copy_block:
|
|
36
|
+
del database_copy_block["to_account"]
|
|
37
|
+
|
|
38
|
+
def _remove_pipeline_platform_helper_override(self, migrated_config: dict) -> None:
|
|
39
|
+
for pipeline_name, pipeline_config in migrated_config.get(
|
|
40
|
+
"environment_pipelines", {}
|
|
41
|
+
).items():
|
|
42
|
+
if "versions" in pipeline_config:
|
|
43
|
+
del pipeline_config["versions"]
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
from collections import Counter
|
|
2
|
+
from collections import OrderedDict
|
|
3
|
+
from copy import deepcopy
|
|
4
|
+
from typing import Protocol
|
|
5
|
+
|
|
6
|
+
from dbt_platform_helper.platform_exception import PlatformException
|
|
7
|
+
from dbt_platform_helper.providers.io import ClickIOProvider
|
|
8
|
+
from dbt_platform_helper.providers.schema_migrations.schema_v0_to_v1_migration import (
|
|
9
|
+
SchemaV0ToV1Migration,
|
|
10
|
+
)
|
|
11
|
+
from dbt_platform_helper.providers.version import InstalledVersionProvider
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class InvalidMigrationConfigurationException(PlatformException):
|
|
15
|
+
pass
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class SchemaMigrationProtocol(Protocol):
|
|
19
|
+
def from_version(self) -> int: ...
|
|
20
|
+
|
|
21
|
+
def migrate(self, platform_config: dict) -> dict: ...
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
# TODO: Possibly get this programmatically?
|
|
25
|
+
ALL_MIGRATIONS = [SchemaV0ToV1Migration()]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class Migrator:
|
|
29
|
+
def __init__(
|
|
30
|
+
self,
|
|
31
|
+
migrations: list[SchemaMigrationProtocol],
|
|
32
|
+
installed_version_provider: InstalledVersionProvider = InstalledVersionProvider,
|
|
33
|
+
io_provider: ClickIOProvider = ClickIOProvider(),
|
|
34
|
+
):
|
|
35
|
+
self.migrations = sorted(migrations, key=lambda m: m.from_version())
|
|
36
|
+
self.installed_version_provider = installed_version_provider
|
|
37
|
+
self.io_provider = io_provider
|
|
38
|
+
from_version_counts = Counter([migration.from_version() for migration in self.migrations])
|
|
39
|
+
duplicate_from_versions = [count for count in from_version_counts.values() if count > 1]
|
|
40
|
+
|
|
41
|
+
if duplicate_from_versions:
|
|
42
|
+
raise InvalidMigrationConfigurationException(
|
|
43
|
+
"`from_version` parameters must be unique amongst migrations"
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
def migrate(self, platform_config: dict) -> dict:
|
|
47
|
+
out = OrderedDict(deepcopy(platform_config))
|
|
48
|
+
if "schema_version" not in out:
|
|
49
|
+
out["schema_version"] = 0
|
|
50
|
+
|
|
51
|
+
if "default_versions" in out:
|
|
52
|
+
out.move_to_end("default_versions", last=False)
|
|
53
|
+
if "schema_version" in out:
|
|
54
|
+
out.move_to_end("schema_version", last=False)
|
|
55
|
+
if "application" in out:
|
|
56
|
+
out.move_to_end("application", last=False)
|
|
57
|
+
|
|
58
|
+
for migration in self.migrations:
|
|
59
|
+
migration_can_be_applied = migration.from_version() == out["schema_version"]
|
|
60
|
+
if migration_can_be_applied:
|
|
61
|
+
out = migration.migrate(out)
|
|
62
|
+
schema_version = out["schema_version"]
|
|
63
|
+
self.io_provider.info(
|
|
64
|
+
f"Migrating from platform config schema version {schema_version} to version {schema_version + 1}"
|
|
65
|
+
)
|
|
66
|
+
out["schema_version"] += 1
|
|
67
|
+
|
|
68
|
+
if "default_versions" not in out:
|
|
69
|
+
out["default_versions"] = {}
|
|
70
|
+
|
|
71
|
+
out["default_versions"]["platform-helper"] = str(
|
|
72
|
+
self.installed_version_provider.get_semantic_version("dbt-platform-helper")
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
self.io_provider.info("\nMigration complete")
|
|
76
|
+
|
|
77
|
+
return dict(out)
|