dbt-platform-helper 12.4.0__py3-none-any.whl → 12.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbt-platform-helper might be problematic. Click here for more details.
- dbt_platform_helper/COMMANDS.md +0 -3
- dbt_platform_helper/commands/config.py +2 -2
- dbt_platform_helper/commands/copilot.py +47 -28
- dbt_platform_helper/commands/environment.py +16 -178
- dbt_platform_helper/commands/pipeline.py +5 -34
- dbt_platform_helper/constants.py +12 -1
- dbt_platform_helper/domain/config_validator.py +242 -0
- dbt_platform_helper/domain/copilot_environment.py +204 -0
- dbt_platform_helper/domain/database_copy.py +7 -5
- dbt_platform_helper/domain/maintenance_page.py +1 -1
- dbt_platform_helper/domain/terraform_environment.py +53 -0
- dbt_platform_helper/jinja2_tags.py +1 -1
- dbt_platform_helper/providers/cache.py +77 -0
- dbt_platform_helper/providers/cloudformation.py +0 -1
- dbt_platform_helper/providers/config.py +90 -0
- dbt_platform_helper/providers/opensearch.py +36 -0
- dbt_platform_helper/providers/platform_config_schema.py +667 -0
- dbt_platform_helper/providers/redis.py +34 -0
- dbt_platform_helper/providers/yaml_file.py +83 -0
- dbt_platform_helper/templates/addons/svc/s3-cross-account-policy.yml +67 -0
- dbt_platform_helper/utils/aws.py +1 -59
- dbt_platform_helper/utils/files.py +0 -106
- dbt_platform_helper/utils/template.py +10 -0
- dbt_platform_helper/utils/validation.py +5 -889
- {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/METADATA +2 -2
- {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/RECORD +29 -22
- {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/WHEEL +1 -1
- dbt_platform_helper/templates/pipelines/environments/buildspec.yml +0 -80
- dbt_platform_helper/templates/pipelines/environments/manifest.yml +0 -48
- dbt_platform_helper/templates/pipelines/environments/overrides/cfn.patches.yml +0 -21
- {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/LICENSE +0 -0
- {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
from typing import Callable
|
|
2
|
+
|
|
3
|
+
import boto3
|
|
4
|
+
import click
|
|
5
|
+
|
|
6
|
+
from dbt_platform_helper.constants import CODEBASE_PIPELINES_KEY
|
|
7
|
+
from dbt_platform_helper.constants import ENVIRONMENTS_KEY
|
|
8
|
+
from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
|
|
9
|
+
from dbt_platform_helper.providers.opensearch import OpensearchProvider
|
|
10
|
+
from dbt_platform_helper.providers.redis import RedisProvider
|
|
11
|
+
from dbt_platform_helper.utils.messages import abort_with_error
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ConfigValidator:
|
|
15
|
+
|
|
16
|
+
def __init__(self, validations: Callable[[dict], None] = None):
|
|
17
|
+
self.validations = validations or [
|
|
18
|
+
self.validate_supported_redis_versions,
|
|
19
|
+
self.validate_supported_opensearch_versions,
|
|
20
|
+
self.validate_environment_pipelines,
|
|
21
|
+
self.validate_codebase_pipelines,
|
|
22
|
+
self.validate_environment_pipelines_triggers,
|
|
23
|
+
self.validate_database_copy_section,
|
|
24
|
+
]
|
|
25
|
+
|
|
26
|
+
def run_validations(self, config: dict):
|
|
27
|
+
for validation in self.validations:
|
|
28
|
+
validation(config)
|
|
29
|
+
|
|
30
|
+
def _validate_extension_supported_versions(
|
|
31
|
+
self, config, extension_type, version_key, get_supported_versions
|
|
32
|
+
):
|
|
33
|
+
extensions = config.get("extensions", {})
|
|
34
|
+
if not extensions:
|
|
35
|
+
return
|
|
36
|
+
|
|
37
|
+
extensions_for_type = [
|
|
38
|
+
extension
|
|
39
|
+
for extension in config.get("extensions", {}).values()
|
|
40
|
+
if extension.get("type") == extension_type
|
|
41
|
+
]
|
|
42
|
+
|
|
43
|
+
supported_extension_versions = get_supported_versions()
|
|
44
|
+
extensions_with_invalid_version = []
|
|
45
|
+
|
|
46
|
+
for extension in extensions_for_type:
|
|
47
|
+
|
|
48
|
+
environments = extension.get("environments", {})
|
|
49
|
+
|
|
50
|
+
if not isinstance(environments, dict):
|
|
51
|
+
click.secho(
|
|
52
|
+
f"Error: {extension_type} extension definition is invalid type, expected dictionary",
|
|
53
|
+
fg="red",
|
|
54
|
+
)
|
|
55
|
+
continue
|
|
56
|
+
for environment, env_config in environments.items():
|
|
57
|
+
|
|
58
|
+
# An extension version doesn't need to be specified for all environments, provided one is specified under "*".
|
|
59
|
+
# So check if the version is set before checking if it's supported
|
|
60
|
+
extension_version = env_config.get(version_key)
|
|
61
|
+
if extension_version and extension_version not in supported_extension_versions:
|
|
62
|
+
extensions_with_invalid_version.append(
|
|
63
|
+
{"environment": environment, "version": extension_version}
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
for version_failure in extensions_with_invalid_version:
|
|
67
|
+
click.secho(
|
|
68
|
+
f"{extension_type} version for environment {version_failure['environment']} is not in the list of supported {extension_type} versions: {supported_extension_versions}. Provided Version: {version_failure['version']}",
|
|
69
|
+
fg="red",
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
def validate_supported_redis_versions(self, config):
|
|
73
|
+
return self._validate_extension_supported_versions(
|
|
74
|
+
config=config,
|
|
75
|
+
extension_type="redis",
|
|
76
|
+
version_key="engine",
|
|
77
|
+
get_supported_versions=RedisProvider(
|
|
78
|
+
boto3.client("elasticache")
|
|
79
|
+
).get_supported_redis_versions,
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
def validate_supported_opensearch_versions(self, config):
|
|
83
|
+
return self._validate_extension_supported_versions(
|
|
84
|
+
config=config,
|
|
85
|
+
extension_type="opensearch",
|
|
86
|
+
version_key="engine",
|
|
87
|
+
get_supported_versions=OpensearchProvider(
|
|
88
|
+
boto3.client("opensearch")
|
|
89
|
+
).get_supported_opensearch_versions,
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
def validate_environment_pipelines(self, config):
|
|
93
|
+
bad_pipelines = {}
|
|
94
|
+
for pipeline_name, pipeline in config.get("environment_pipelines", {}).items():
|
|
95
|
+
bad_envs = []
|
|
96
|
+
pipeline_account = pipeline.get("account", None)
|
|
97
|
+
if pipeline_account:
|
|
98
|
+
for env in pipeline.get("environments", {}).keys():
|
|
99
|
+
env_account = (
|
|
100
|
+
config.get("environments", {})
|
|
101
|
+
.get(env, {})
|
|
102
|
+
.get("accounts", {})
|
|
103
|
+
.get("deploy", {})
|
|
104
|
+
.get("name")
|
|
105
|
+
)
|
|
106
|
+
if not env_account == pipeline_account:
|
|
107
|
+
bad_envs.append(env)
|
|
108
|
+
if bad_envs:
|
|
109
|
+
bad_pipelines[pipeline_name] = {"account": pipeline_account, "bad_envs": bad_envs}
|
|
110
|
+
if bad_pipelines:
|
|
111
|
+
message = "The following pipelines are misconfigured:"
|
|
112
|
+
for pipeline, detail in bad_pipelines.items():
|
|
113
|
+
envs = detail["bad_envs"]
|
|
114
|
+
acc = detail["account"]
|
|
115
|
+
message += f" '{pipeline}' - these environments are not in the '{acc}' account: {', '.join(envs)}\n"
|
|
116
|
+
abort_with_error(message)
|
|
117
|
+
|
|
118
|
+
def validate_codebase_pipelines(self, config):
|
|
119
|
+
if CODEBASE_PIPELINES_KEY in config:
|
|
120
|
+
for codebase in config[CODEBASE_PIPELINES_KEY]:
|
|
121
|
+
codebase_environments = []
|
|
122
|
+
|
|
123
|
+
for pipeline in codebase["pipelines"]:
|
|
124
|
+
codebase_environments += [e["name"] for e in pipeline[ENVIRONMENTS_KEY]]
|
|
125
|
+
|
|
126
|
+
unique_codebase_environments = sorted(list(set(codebase_environments)))
|
|
127
|
+
|
|
128
|
+
if sorted(codebase_environments) != sorted(unique_codebase_environments):
|
|
129
|
+
abort_with_error(
|
|
130
|
+
f"The {PLATFORM_CONFIG_FILE} file is invalid, each environment can only be "
|
|
131
|
+
"listed in a single pipeline per codebase"
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
def validate_environment_pipelines_triggers(self, config):
|
|
135
|
+
errors = []
|
|
136
|
+
pipelines_with_triggers = {
|
|
137
|
+
pipeline_name: pipeline
|
|
138
|
+
for pipeline_name, pipeline in config.get("environment_pipelines", {}).items()
|
|
139
|
+
if "pipeline_to_trigger" in pipeline
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
for pipeline_name, pipeline in pipelines_with_triggers.items():
|
|
143
|
+
pipeline_to_trigger = pipeline["pipeline_to_trigger"]
|
|
144
|
+
if pipeline_to_trigger not in config.get("environment_pipelines", {}):
|
|
145
|
+
message = f" '{pipeline_name}' - '{pipeline_to_trigger}' is not a valid target pipeline to trigger"
|
|
146
|
+
|
|
147
|
+
errors.append(message)
|
|
148
|
+
continue
|
|
149
|
+
|
|
150
|
+
if pipeline_to_trigger == pipeline_name:
|
|
151
|
+
message = f" '{pipeline_name}' - pipelines cannot trigger themselves"
|
|
152
|
+
errors.append(message)
|
|
153
|
+
|
|
154
|
+
if errors:
|
|
155
|
+
error_message = "The following pipelines are misconfigured: \n"
|
|
156
|
+
abort_with_error(error_message + "\n ".join(errors))
|
|
157
|
+
|
|
158
|
+
def validate_database_copy_section(self, config):
|
|
159
|
+
extensions = config.get("extensions", {})
|
|
160
|
+
if not extensions:
|
|
161
|
+
return
|
|
162
|
+
|
|
163
|
+
postgres_extensions = {
|
|
164
|
+
key: ext for key, ext in extensions.items() if ext.get("type", None) == "postgres"
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
if not postgres_extensions:
|
|
168
|
+
return
|
|
169
|
+
|
|
170
|
+
errors = []
|
|
171
|
+
|
|
172
|
+
for extension_name, extension in postgres_extensions.items():
|
|
173
|
+
database_copy_sections = extension.get("database_copy", [])
|
|
174
|
+
|
|
175
|
+
if not database_copy_sections:
|
|
176
|
+
return
|
|
177
|
+
|
|
178
|
+
all_environments = [
|
|
179
|
+
env for env in config.get("environments", {}).keys() if not env == "*"
|
|
180
|
+
]
|
|
181
|
+
all_envs_string = ", ".join(all_environments)
|
|
182
|
+
|
|
183
|
+
for section in database_copy_sections:
|
|
184
|
+
from_env = section["from"]
|
|
185
|
+
to_env = section["to"]
|
|
186
|
+
|
|
187
|
+
from_account = (
|
|
188
|
+
config.get("environments", {})
|
|
189
|
+
.get(from_env, {})
|
|
190
|
+
.get("accounts", {})
|
|
191
|
+
.get("deploy", {})
|
|
192
|
+
.get("id")
|
|
193
|
+
)
|
|
194
|
+
to_account = (
|
|
195
|
+
config.get("environments", {})
|
|
196
|
+
.get(to_env, {})
|
|
197
|
+
.get("accounts", {})
|
|
198
|
+
.get("deploy", {})
|
|
199
|
+
.get("id")
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
if from_env == to_env:
|
|
203
|
+
errors.append(
|
|
204
|
+
f"database_copy 'to' and 'from' cannot be the same environment in extension '{extension_name}'."
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
if "prod" in to_env:
|
|
208
|
+
errors.append(
|
|
209
|
+
f"Copying to a prod environment is not supported: database_copy 'to' cannot be '{to_env}' in extension '{extension_name}'."
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
if from_env not in all_environments:
|
|
213
|
+
errors.append(
|
|
214
|
+
f"database_copy 'from' parameter must be a valid environment ({all_envs_string}) but was '{from_env}' in extension '{extension_name}'."
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
if to_env not in all_environments:
|
|
218
|
+
errors.append(
|
|
219
|
+
f"database_copy 'to' parameter must be a valid environment ({all_envs_string}) but was '{to_env}' in extension '{extension_name}'."
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
if from_account != to_account:
|
|
223
|
+
if "from_account" not in section:
|
|
224
|
+
errors.append(
|
|
225
|
+
f"Environments '{from_env}' and '{to_env}' are in different AWS accounts. The 'from_account' parameter must be present."
|
|
226
|
+
)
|
|
227
|
+
elif section["from_account"] != from_account:
|
|
228
|
+
errors.append(
|
|
229
|
+
f"Incorrect value for 'from_account' for environment '{from_env}'"
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
if "to_account" not in section:
|
|
233
|
+
errors.append(
|
|
234
|
+
f"Environments '{from_env}' and '{to_env}' are in different AWS accounts. The 'to_account' parameter must be present."
|
|
235
|
+
)
|
|
236
|
+
elif section["to_account"] != to_account:
|
|
237
|
+
errors.append(
|
|
238
|
+
f"Incorrect value for 'to_account' for environment '{to_env}'"
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
if errors:
|
|
242
|
+
abort_with_error("\n".join(errors))
|
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
from collections import defaultdict
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
import boto3
|
|
5
|
+
import click
|
|
6
|
+
|
|
7
|
+
from dbt_platform_helper.platform_exception import PlatformException
|
|
8
|
+
from dbt_platform_helper.providers.load_balancers import find_https_listener
|
|
9
|
+
from dbt_platform_helper.utils.aws import get_aws_session_or_abort
|
|
10
|
+
from dbt_platform_helper.utils.files import mkfile
|
|
11
|
+
from dbt_platform_helper.utils.template import S3_CROSS_ACCOUNT_POLICY
|
|
12
|
+
from dbt_platform_helper.utils.template import camel_case
|
|
13
|
+
from dbt_platform_helper.utils.template import setup_templates
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
# TODO - move helper functions into suitable provider classes
|
|
17
|
+
def get_subnet_ids(session, vpc_id, environment_name):
|
|
18
|
+
subnets = session.client("ec2").describe_subnets(
|
|
19
|
+
Filters=[{"Name": "vpc-id", "Values": [vpc_id]}]
|
|
20
|
+
)["Subnets"]
|
|
21
|
+
|
|
22
|
+
if not subnets:
|
|
23
|
+
click.secho(f"No subnets found for VPC with id: {vpc_id}.", fg="red")
|
|
24
|
+
raise click.Abort
|
|
25
|
+
|
|
26
|
+
public_tag = {"Key": "subnet_type", "Value": "public"}
|
|
27
|
+
public_subnets = [subnet["SubnetId"] for subnet in subnets if public_tag in subnet["Tags"]]
|
|
28
|
+
private_tag = {"Key": "subnet_type", "Value": "private"}
|
|
29
|
+
private_subnets = [subnet["SubnetId"] for subnet in subnets if private_tag in subnet["Tags"]]
|
|
30
|
+
|
|
31
|
+
# This call and the method declaration can be removed when we stop using AWS Copilot to deploy the services
|
|
32
|
+
public_subnets, private_subnets = _match_subnet_id_order_to_cloudformation_exports(
|
|
33
|
+
session,
|
|
34
|
+
environment_name,
|
|
35
|
+
public_subnets,
|
|
36
|
+
private_subnets,
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
return public_subnets, private_subnets
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def _match_subnet_id_order_to_cloudformation_exports(
|
|
43
|
+
session, environment_name, public_subnets, private_subnets
|
|
44
|
+
):
|
|
45
|
+
public_subnet_exports = []
|
|
46
|
+
private_subnet_exports = []
|
|
47
|
+
for page in session.client("cloudformation").get_paginator("list_exports").paginate():
|
|
48
|
+
for export in page["Exports"]:
|
|
49
|
+
if f"-{environment_name}-" in export["Name"]:
|
|
50
|
+
if export["Name"].endswith("-PublicSubnets"):
|
|
51
|
+
public_subnet_exports = export["Value"].split(",")
|
|
52
|
+
if export["Name"].endswith("-PrivateSubnets"):
|
|
53
|
+
private_subnet_exports = export["Value"].split(",")
|
|
54
|
+
|
|
55
|
+
# If the elements match, regardless of order, use the list from the CloudFormation exports
|
|
56
|
+
if set(public_subnets) == set(public_subnet_exports):
|
|
57
|
+
public_subnets = public_subnet_exports
|
|
58
|
+
if set(private_subnets) == set(private_subnet_exports):
|
|
59
|
+
private_subnets = private_subnet_exports
|
|
60
|
+
|
|
61
|
+
return public_subnets, private_subnets
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def get_cert_arn(session, application, env_name):
|
|
65
|
+
try:
|
|
66
|
+
arn = find_https_certificate(session, application, env_name)
|
|
67
|
+
except:
|
|
68
|
+
click.secho(
|
|
69
|
+
f"No certificate found with domain name matching environment {env_name}.", fg="red"
|
|
70
|
+
)
|
|
71
|
+
raise click.Abort
|
|
72
|
+
|
|
73
|
+
return arn
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def get_vpc_id(session, env_name, vpc_name=None):
|
|
77
|
+
if not vpc_name:
|
|
78
|
+
vpc_name = f"{session.profile_name}-{env_name}"
|
|
79
|
+
|
|
80
|
+
filters = [{"Name": "tag:Name", "Values": [vpc_name]}]
|
|
81
|
+
vpcs = session.client("ec2").describe_vpcs(Filters=filters)["Vpcs"]
|
|
82
|
+
|
|
83
|
+
if not vpcs:
|
|
84
|
+
filters[0]["Values"] = [session.profile_name]
|
|
85
|
+
vpcs = session.client("ec2").describe_vpcs(Filters=filters)["Vpcs"]
|
|
86
|
+
|
|
87
|
+
if not vpcs:
|
|
88
|
+
click.secho(
|
|
89
|
+
f"No VPC found with name {vpc_name} in AWS account {session.profile_name}.", fg="red"
|
|
90
|
+
)
|
|
91
|
+
raise click.Abort
|
|
92
|
+
|
|
93
|
+
return vpcs[0]["VpcId"]
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def _generate_copilot_environment_manifests(
|
|
97
|
+
environment_name, application_name, env_config, session
|
|
98
|
+
):
|
|
99
|
+
env_template = setup_templates().get_template("env/manifest.yml")
|
|
100
|
+
vpc_name = env_config.get("vpc", None)
|
|
101
|
+
vpc_id = get_vpc_id(session, environment_name, vpc_name)
|
|
102
|
+
pub_subnet_ids, priv_subnet_ids = get_subnet_ids(session, vpc_id, environment_name)
|
|
103
|
+
cert_arn = get_cert_arn(session, application_name, environment_name)
|
|
104
|
+
contents = env_template.render(
|
|
105
|
+
{
|
|
106
|
+
"name": environment_name,
|
|
107
|
+
"vpc_id": vpc_id,
|
|
108
|
+
"pub_subnet_ids": pub_subnet_ids,
|
|
109
|
+
"priv_subnet_ids": priv_subnet_ids,
|
|
110
|
+
"certificate_arn": cert_arn,
|
|
111
|
+
}
|
|
112
|
+
)
|
|
113
|
+
click.echo(
|
|
114
|
+
mkfile(
|
|
115
|
+
".", f"copilot/environments/{environment_name}/manifest.yml", contents, overwrite=True
|
|
116
|
+
)
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def find_https_certificate(session: boto3.Session, app: str, env: str) -> str:
|
|
121
|
+
listener_arn = find_https_listener(session, app, env)
|
|
122
|
+
cert_client = session.client("elbv2")
|
|
123
|
+
certificates = cert_client.describe_listener_certificates(ListenerArn=listener_arn)[
|
|
124
|
+
"Certificates"
|
|
125
|
+
]
|
|
126
|
+
|
|
127
|
+
try:
|
|
128
|
+
certificate_arn = next(c["CertificateArn"] for c in certificates if c["IsDefault"])
|
|
129
|
+
except StopIteration:
|
|
130
|
+
raise CertificateNotFoundException()
|
|
131
|
+
|
|
132
|
+
return certificate_arn
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class CertificateNotFoundException(PlatformException):
|
|
136
|
+
pass
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
class CopilotEnvironment:
|
|
140
|
+
def __init__(self, config_provider):
|
|
141
|
+
self.config_provider = config_provider
|
|
142
|
+
|
|
143
|
+
def generate(self, environment_name):
|
|
144
|
+
config = self.config_provider.load_and_validate_platform_config()
|
|
145
|
+
enriched_config = self.config_provider.apply_environment_defaults(config)
|
|
146
|
+
|
|
147
|
+
env_config = enriched_config["environments"][environment_name]
|
|
148
|
+
profile_for_environment = env_config.get("accounts", {}).get("deploy", {}).get("name")
|
|
149
|
+
click.secho(f"Using {profile_for_environment} for this AWS session")
|
|
150
|
+
session = get_aws_session_or_abort(profile_for_environment)
|
|
151
|
+
|
|
152
|
+
_generate_copilot_environment_manifests(
|
|
153
|
+
environment_name, enriched_config["application"], env_config, session
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
class CopilotTemplating:
|
|
158
|
+
def __init__(self, mkfile_fn=mkfile):
|
|
159
|
+
self.mkfile_fn = mkfile_fn
|
|
160
|
+
|
|
161
|
+
def generate_cross_account_s3_policies(self, environments: dict, extensions):
|
|
162
|
+
resource_blocks = defaultdict(list)
|
|
163
|
+
|
|
164
|
+
for ext_name, ext_data in extensions.items():
|
|
165
|
+
for env_name, env_data in ext_data.get("environments", {}).items():
|
|
166
|
+
if "cross_environment_service_access" in env_data:
|
|
167
|
+
bucket = env_data.get("bucket_name")
|
|
168
|
+
x_env_data = env_data["cross_environment_service_access"]
|
|
169
|
+
for access_name, access_data in x_env_data.items():
|
|
170
|
+
service = access_data.get("service")
|
|
171
|
+
read = access_data.get("read", False)
|
|
172
|
+
write = access_data.get("write", False)
|
|
173
|
+
if read or write:
|
|
174
|
+
resource_blocks[service].append(
|
|
175
|
+
{
|
|
176
|
+
"bucket_name": bucket,
|
|
177
|
+
"app_prefix": camel_case(f"{service}-{bucket}-{access_name}"),
|
|
178
|
+
"bucket_env": env_name,
|
|
179
|
+
"access_env": access_data.get("environment"),
|
|
180
|
+
"bucket_account": environments.get(env_name, {})
|
|
181
|
+
.get("accounts", {})
|
|
182
|
+
.get("deploy", {})
|
|
183
|
+
.get("id"),
|
|
184
|
+
"read": read,
|
|
185
|
+
"write": write,
|
|
186
|
+
}
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
if not resource_blocks:
|
|
190
|
+
click.echo("\n>>> No cross-environment S3 policies to create.\n")
|
|
191
|
+
return
|
|
192
|
+
|
|
193
|
+
templates = setup_templates()
|
|
194
|
+
|
|
195
|
+
for service in sorted(resource_blocks.keys()):
|
|
196
|
+
resources = resource_blocks[service]
|
|
197
|
+
click.echo(f"\n>>> Creating S3 cross account policies for {service}.\n")
|
|
198
|
+
template = templates.get_template(S3_CROSS_ACCOUNT_POLICY)
|
|
199
|
+
file_content = template.render({"resources": resources})
|
|
200
|
+
output_dir = Path(".").absolute()
|
|
201
|
+
file_path = f"copilot/{service}/addons/s3-cross-account-policy.yml"
|
|
202
|
+
|
|
203
|
+
self.mkfile_fn(output_dir, file_path, file_content, True)
|
|
204
|
+
click.echo(f"File {file_path} created")
|
|
@@ -7,8 +7,10 @@ import click
|
|
|
7
7
|
from boto3 import Session
|
|
8
8
|
|
|
9
9
|
from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
|
|
10
|
+
from dbt_platform_helper.domain.config_validator import ConfigValidator
|
|
10
11
|
from dbt_platform_helper.domain.maintenance_page import MaintenancePageProvider
|
|
11
12
|
from dbt_platform_helper.providers.aws import AWSException
|
|
13
|
+
from dbt_platform_helper.providers.config import ConfigProvider
|
|
12
14
|
from dbt_platform_helper.utils.application import Application
|
|
13
15
|
from dbt_platform_helper.utils.application import ApplicationNotFoundException
|
|
14
16
|
from dbt_platform_helper.utils.application import load_application
|
|
@@ -16,9 +18,7 @@ from dbt_platform_helper.utils.aws import Vpc
|
|
|
16
18
|
from dbt_platform_helper.utils.aws import get_connection_string
|
|
17
19
|
from dbt_platform_helper.utils.aws import get_vpc_info_by_name
|
|
18
20
|
from dbt_platform_helper.utils.aws import wait_for_log_group_to_exist
|
|
19
|
-
from dbt_platform_helper.utils.files import apply_environment_defaults
|
|
20
21
|
from dbt_platform_helper.utils.messages import abort_with_error
|
|
21
|
-
from dbt_platform_helper.utils.validation import load_and_validate_platform_config
|
|
22
22
|
|
|
23
23
|
|
|
24
24
|
class DatabaseCopy:
|
|
@@ -38,6 +38,7 @@ class DatabaseCopy:
|
|
|
38
38
|
input: Callable[[str], str] = click.prompt,
|
|
39
39
|
echo: Callable[[str], str] = click.secho,
|
|
40
40
|
abort: Callable[[str], None] = abort_with_error,
|
|
41
|
+
config_provider: ConfigProvider = ConfigProvider(ConfigValidator()),
|
|
41
42
|
):
|
|
42
43
|
self.app = app
|
|
43
44
|
self.database = database
|
|
@@ -48,12 +49,13 @@ class DatabaseCopy:
|
|
|
48
49
|
self.input = input
|
|
49
50
|
self.echo = echo
|
|
50
51
|
self.abort = abort
|
|
52
|
+
self.config_provider = config_provider
|
|
51
53
|
|
|
52
54
|
if not self.app:
|
|
53
55
|
if not Path(PLATFORM_CONFIG_FILE).exists():
|
|
54
56
|
self.abort("You must either be in a deploy repo, or provide the --app option.")
|
|
55
57
|
|
|
56
|
-
config = load_and_validate_platform_config()
|
|
58
|
+
config = self.config_provider.load_and_validate_platform_config()
|
|
57
59
|
self.app = config["application"]
|
|
58
60
|
|
|
59
61
|
try:
|
|
@@ -110,8 +112,8 @@ class DatabaseCopy:
|
|
|
110
112
|
if not vpc_name:
|
|
111
113
|
if not Path(PLATFORM_CONFIG_FILE).exists():
|
|
112
114
|
self.abort("You must either be in a deploy repo, or provide the vpc name option.")
|
|
113
|
-
config = load_and_validate_platform_config()
|
|
114
|
-
env_config = apply_environment_defaults(config)["environments"]
|
|
115
|
+
config = self.config_provider.load_and_validate_platform_config()
|
|
116
|
+
env_config = self.config_provider.apply_environment_defaults(config)["environments"]
|
|
115
117
|
vpc_name = env_config.get(env, {}).get("vpc")
|
|
116
118
|
return vpc_name
|
|
117
119
|
|
|
@@ -261,7 +261,7 @@ def add_maintenance_page(
|
|
|
261
261
|
)
|
|
262
262
|
|
|
263
263
|
click.secho(
|
|
264
|
-
f"\nUse a browser plugin to add `Bypass-Key` header with value {bypass_value} to your requests. For more detail, visit https://platform.readme.trade.gov.uk/
|
|
264
|
+
f"\nUse a browser plugin to add `Bypass-Key` header with value {bypass_value} to your requests. For more detail, visit https://platform.readme.trade.gov.uk/next-steps/put-a-service-under-maintenance/",
|
|
265
265
|
fg="green",
|
|
266
266
|
)
|
|
267
267
|
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import click
|
|
2
|
+
|
|
3
|
+
from dbt_platform_helper.constants import DEFAULT_TERRAFORM_PLATFORM_MODULES_VERSION
|
|
4
|
+
from dbt_platform_helper.utils.files import mkfile
|
|
5
|
+
from dbt_platform_helper.utils.template import setup_templates
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def _generate_terraform_environment_manifests(
|
|
9
|
+
application, env, env_config, cli_terraform_platform_modules_version
|
|
10
|
+
):
|
|
11
|
+
env_template = setup_templates().get_template("environments/main.tf")
|
|
12
|
+
|
|
13
|
+
terraform_platform_modules_version = _determine_terraform_platform_modules_version(
|
|
14
|
+
env_config, cli_terraform_platform_modules_version
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
contents = env_template.render(
|
|
18
|
+
{
|
|
19
|
+
"application": application,
|
|
20
|
+
"environment": env,
|
|
21
|
+
"config": env_config,
|
|
22
|
+
"terraform_platform_modules_version": terraform_platform_modules_version,
|
|
23
|
+
}
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
click.echo(mkfile(".", f"terraform/environments/{env}/main.tf", contents, overwrite=True))
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _determine_terraform_platform_modules_version(env_conf, cli_terraform_platform_modules_version):
|
|
30
|
+
cli_terraform_platform_modules_version = cli_terraform_platform_modules_version
|
|
31
|
+
env_conf_terraform_platform_modules_version = env_conf.get("versions", {}).get(
|
|
32
|
+
"terraform-platform-modules"
|
|
33
|
+
)
|
|
34
|
+
version_preference_order = [
|
|
35
|
+
cli_terraform_platform_modules_version,
|
|
36
|
+
env_conf_terraform_platform_modules_version,
|
|
37
|
+
DEFAULT_TERRAFORM_PLATFORM_MODULES_VERSION,
|
|
38
|
+
]
|
|
39
|
+
return [version for version in version_preference_order if version][0]
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class TerraformEnvironment:
|
|
43
|
+
def __init__(self, config_provider):
|
|
44
|
+
self.config_provider = config_provider
|
|
45
|
+
|
|
46
|
+
def generate(self, name, terraform_platform_modules_version):
|
|
47
|
+
config = self.config_provider.load_and_validate_platform_config()
|
|
48
|
+
enriched_config = self.config_provider.apply_environment_defaults(config)
|
|
49
|
+
|
|
50
|
+
env_config = enriched_config["environments"][name]
|
|
51
|
+
_generate_terraform_environment_manifests(
|
|
52
|
+
config["application"], name, env_config, terraform_platform_modules_version
|
|
53
|
+
)
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
|
|
4
|
+
from dbt_platform_helper.providers.yaml_file import FileProvider
|
|
5
|
+
from dbt_platform_helper.providers.yaml_file import YamlFileProvider
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class CacheProvider:
|
|
9
|
+
def __init__(
|
|
10
|
+
self,
|
|
11
|
+
file_provider: FileProvider = None,
|
|
12
|
+
):
|
|
13
|
+
self._cache_file = ".platform-helper-config-cache.yml"
|
|
14
|
+
self.file_provider = file_provider or YamlFileProvider
|
|
15
|
+
|
|
16
|
+
def read_supported_versions_from_cache(self, resource_name):
|
|
17
|
+
|
|
18
|
+
platform_helper_config = self.file_provider.load(self._cache_file)
|
|
19
|
+
|
|
20
|
+
return platform_helper_config.get(resource_name).get("versions")
|
|
21
|
+
|
|
22
|
+
def update_cache(self, resource_name, supported_versions):
|
|
23
|
+
|
|
24
|
+
platform_helper_config = {}
|
|
25
|
+
|
|
26
|
+
if self.__cache_exists():
|
|
27
|
+
platform_helper_config = self.file_provider.load(self._cache_file)
|
|
28
|
+
|
|
29
|
+
cache_dict = {
|
|
30
|
+
resource_name: {
|
|
31
|
+
"versions": supported_versions,
|
|
32
|
+
"date-retrieved": datetime.now().strftime("%d-%m-%y %H:%M:%S"),
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
platform_helper_config.update(cache_dict)
|
|
37
|
+
|
|
38
|
+
self.file_provider.write(
|
|
39
|
+
self._cache_file,
|
|
40
|
+
platform_helper_config,
|
|
41
|
+
"# [!] This file is autogenerated via the platform-helper. Do not edit.\n",
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
def cache_refresh_required(self, resource_name) -> bool:
|
|
45
|
+
"""
|
|
46
|
+
Checks if the platform-helper should reach out to AWS to 'refresh' its
|
|
47
|
+
cached values.
|
|
48
|
+
|
|
49
|
+
An API call is needed if any of the following conditions are met:
|
|
50
|
+
1. No cache file (.platform-helper-config.yml) exists.
|
|
51
|
+
2. The resource name (e.g. redis, opensearch) does not exist within the cache file.
|
|
52
|
+
3. The date-retrieved value of the cached data is > than a time interval. In this case 1 day.
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
if not self.__cache_exists():
|
|
56
|
+
return True
|
|
57
|
+
|
|
58
|
+
platform_helper_config = self.file_provider.load(self._cache_file)
|
|
59
|
+
|
|
60
|
+
if platform_helper_config.get(resource_name):
|
|
61
|
+
return self.__check_if_cached_datetime_is_greater_than_interval(
|
|
62
|
+
platform_helper_config[resource_name].get("date-retrieved"), 1
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
return True
|
|
66
|
+
|
|
67
|
+
@staticmethod
|
|
68
|
+
def __check_if_cached_datetime_is_greater_than_interval(date_retrieved, interval_in_days):
|
|
69
|
+
|
|
70
|
+
current_datetime = datetime.now()
|
|
71
|
+
cached_datetime = datetime.strptime(date_retrieved, "%d-%m-%y %H:%M:%S")
|
|
72
|
+
delta = current_datetime - cached_datetime
|
|
73
|
+
|
|
74
|
+
return delta.days > interval_in_days
|
|
75
|
+
|
|
76
|
+
def __cache_exists(self):
|
|
77
|
+
return os.path.exists(self._cache_file)
|
|
@@ -93,7 +93,6 @@ class CloudFormation:
|
|
|
93
93
|
)
|
|
94
94
|
|
|
95
95
|
params = []
|
|
96
|
-
# TODO Currently not covered by tests - see https://uktrade.atlassian.net/browse/DBTP-1582
|
|
97
96
|
if "Parameters" in template_yml:
|
|
98
97
|
for param in template_yml["Parameters"]:
|
|
99
98
|
params.append({"ParameterKey": param, "UsePreviousValue": True})
|