dbt-platform-helper 12.4.1__py3-none-any.whl → 12.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbt-platform-helper might be problematic. Click here for more details.

Files changed (38) hide show
  1. dbt_platform_helper/COMMANDS.md +1 -6
  2. dbt_platform_helper/commands/config.py +2 -2
  3. dbt_platform_helper/commands/copilot.py +51 -30
  4. dbt_platform_helper/commands/environment.py +25 -185
  5. dbt_platform_helper/commands/pipeline.py +10 -173
  6. dbt_platform_helper/constants.py +10 -0
  7. dbt_platform_helper/domain/codebase.py +8 -4
  8. dbt_platform_helper/domain/config_validator.py +242 -0
  9. dbt_platform_helper/domain/copilot_environment.py +204 -0
  10. dbt_platform_helper/domain/database_copy.py +16 -12
  11. dbt_platform_helper/domain/maintenance_page.py +44 -20
  12. dbt_platform_helper/domain/pipelines.py +213 -0
  13. dbt_platform_helper/domain/terraform_environment.py +86 -0
  14. dbt_platform_helper/domain/test_platform_terraform_manifest_generator.py +100 -0
  15. dbt_platform_helper/jinja2_tags.py +1 -1
  16. dbt_platform_helper/providers/cache.py +14 -21
  17. dbt_platform_helper/providers/cloudformation.py +0 -1
  18. dbt_platform_helper/providers/config.py +100 -0
  19. dbt_platform_helper/providers/copilot.py +2 -0
  20. dbt_platform_helper/providers/files.py +26 -0
  21. dbt_platform_helper/providers/opensearch.py +36 -0
  22. dbt_platform_helper/providers/platform_config_schema.py +589 -527
  23. dbt_platform_helper/providers/redis.py +34 -0
  24. dbt_platform_helper/providers/vpc.py +57 -0
  25. dbt_platform_helper/providers/yaml_file.py +72 -0
  26. dbt_platform_helper/templates/addons/svc/s3-cross-account-policy.yml +67 -0
  27. dbt_platform_helper/utils/application.py +32 -34
  28. dbt_platform_helper/utils/aws.py +1 -107
  29. dbt_platform_helper/utils/files.py +8 -59
  30. dbt_platform_helper/utils/platform_config.py +0 -7
  31. dbt_platform_helper/utils/template.py +10 -0
  32. dbt_platform_helper/utils/validation.py +5 -327
  33. dbt_platform_helper/utils/versioning.py +12 -0
  34. {dbt_platform_helper-12.4.1.dist-info → dbt_platform_helper-12.5.1.dist-info}/METADATA +2 -2
  35. {dbt_platform_helper-12.4.1.dist-info → dbt_platform_helper-12.5.1.dist-info}/RECORD +38 -26
  36. {dbt_platform_helper-12.4.1.dist-info → dbt_platform_helper-12.5.1.dist-info}/WHEEL +1 -1
  37. {dbt_platform_helper-12.4.1.dist-info → dbt_platform_helper-12.5.1.dist-info}/LICENSE +0 -0
  38. {dbt_platform_helper-12.4.1.dist-info → dbt_platform_helper-12.5.1.dist-info}/entry_points.txt +0 -0
@@ -8,6 +8,7 @@ DEFAULT_TERRAFORM_PLATFORM_MODULES_VERSION = "5"
8
8
  # Keys
9
9
  CODEBASE_PIPELINES_KEY = "codebase_pipelines"
10
10
  ENVIRONMENTS_KEY = "environments"
11
+ ENVIRONMENT_PIPELINES_KEY = "environment_pipelines"
11
12
 
12
13
  # Conduit
13
14
  CONDUIT_ADDON_TYPES = [
@@ -16,3 +17,12 @@ CONDUIT_ADDON_TYPES = [
16
17
  "redis",
17
18
  ]
18
19
  CONDUIT_DOCKER_IMAGE_LOCATION = "public.ecr.aws/uktrade/tunnel"
20
+ HYPHENATED_APPLICATION_NAME = "hyphenated-application-name"
21
+ ALPHANUMERIC_ENVIRONMENT_NAME = "alphanumericenvironmentname123"
22
+ ALPHANUMERIC_SERVICE_NAME = "alphanumericservicename123"
23
+ COPILOT_IDENTIFIER = "c0PIlotiD3ntIF3r"
24
+ CLUSTER_NAME_SUFFIX = f"Cluster-{COPILOT_IDENTIFIER}"
25
+ SERVICE_NAME_SUFFIX = f"Service-{COPILOT_IDENTIFIER}"
26
+ REFRESH_TOKEN_MESSAGE = (
27
+ "To refresh this SSO session run `aws sso login` with the corresponding profile"
28
+ )
@@ -10,6 +10,7 @@ import yaml
10
10
  from boto3 import Session
11
11
 
12
12
  from dbt_platform_helper.platform_exception import PlatformException
13
+ from dbt_platform_helper.providers.files import FileProvider
13
14
  from dbt_platform_helper.utils.application import Application
14
15
  from dbt_platform_helper.utils.application import ApplicationException
15
16
  from dbt_platform_helper.utils.application import load_application
@@ -19,7 +20,6 @@ from dbt_platform_helper.utils.aws import get_aws_session_or_abort
19
20
  from dbt_platform_helper.utils.aws import get_build_url_from_arn
20
21
  from dbt_platform_helper.utils.aws import list_latest_images
21
22
  from dbt_platform_helper.utils.aws import start_build_extraction
22
- from dbt_platform_helper.utils.files import mkfile
23
23
  from dbt_platform_helper.utils.git import check_if_commit_exists
24
24
  from dbt_platform_helper.utils.template import setup_templates
25
25
 
@@ -92,7 +92,7 @@ class Codebase:
92
92
  repository=repository, builder_version=builder_version
93
93
  )
94
94
  self.echo(
95
- mkfile(
95
+ FileProvider.mkfile(
96
96
  Path("."), ".copilot/image_build_run.sh", image_build_run_contents, overwrite=True
97
97
  )
98
98
  )
@@ -100,13 +100,17 @@ class Codebase:
100
100
  image_build_run_file = Path(".copilot/image_build_run.sh")
101
101
  image_build_run_file.chmod(image_build_run_file.stat().st_mode | stat.S_IEXEC)
102
102
 
103
- self.echo(mkfile(Path("."), ".copilot/config.yml", config_contents, overwrite=True))
103
+ self.echo(
104
+ FileProvider.mkfile(Path("."), ".copilot/config.yml", config_contents, overwrite=True)
105
+ )
104
106
 
105
107
  for phase in ["build", "install", "post_build", "pre_build"]:
106
108
  phase_contents = templates.get_template(f".copilot/phases/{phase}.sh").render()
107
109
 
108
110
  self.echo(
109
- mkfile(Path("./.copilot"), f"phases/{phase}.sh", phase_contents, overwrite=True)
111
+ FileProvider.mkfile(
112
+ Path("./.copilot"), f"phases/{phase}.sh", phase_contents, overwrite=True
113
+ )
110
114
  )
111
115
 
112
116
  def build(self, app: str, codebase: str, commit: str):
@@ -0,0 +1,242 @@
1
+ from typing import Callable
2
+
3
+ import boto3
4
+ import click
5
+
6
+ from dbt_platform_helper.constants import CODEBASE_PIPELINES_KEY
7
+ from dbt_platform_helper.constants import ENVIRONMENTS_KEY
8
+ from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
9
+ from dbt_platform_helper.providers.opensearch import OpensearchProvider
10
+ from dbt_platform_helper.providers.redis import RedisProvider
11
+ from dbt_platform_helper.utils.messages import abort_with_error
12
+
13
+
14
+ class ConfigValidator:
15
+
16
+ def __init__(self, validations: Callable[[dict], None] = None):
17
+ self.validations = validations or [
18
+ self.validate_supported_redis_versions,
19
+ self.validate_supported_opensearch_versions,
20
+ self.validate_environment_pipelines,
21
+ self.validate_codebase_pipelines,
22
+ self.validate_environment_pipelines_triggers,
23
+ self.validate_database_copy_section,
24
+ ]
25
+
26
+ def run_validations(self, config: dict):
27
+ for validation in self.validations:
28
+ validation(config)
29
+
30
+ def _validate_extension_supported_versions(
31
+ self, config, extension_type, version_key, get_supported_versions
32
+ ):
33
+ extensions = config.get("extensions", {})
34
+ if not extensions:
35
+ return
36
+
37
+ extensions_for_type = [
38
+ extension
39
+ for extension in config.get("extensions", {}).values()
40
+ if extension.get("type") == extension_type
41
+ ]
42
+
43
+ supported_extension_versions = get_supported_versions()
44
+ extensions_with_invalid_version = []
45
+
46
+ for extension in extensions_for_type:
47
+
48
+ environments = extension.get("environments", {})
49
+
50
+ if not isinstance(environments, dict):
51
+ click.secho(
52
+ f"Error: {extension_type} extension definition is invalid type, expected dictionary",
53
+ fg="red",
54
+ )
55
+ continue
56
+ for environment, env_config in environments.items():
57
+
58
+ # An extension version doesn't need to be specified for all environments, provided one is specified under "*".
59
+ # So check if the version is set before checking if it's supported
60
+ extension_version = env_config.get(version_key)
61
+ if extension_version and extension_version not in supported_extension_versions:
62
+ extensions_with_invalid_version.append(
63
+ {"environment": environment, "version": extension_version}
64
+ )
65
+
66
+ for version_failure in extensions_with_invalid_version:
67
+ click.secho(
68
+ f"{extension_type} version for environment {version_failure['environment']} is not in the list of supported {extension_type} versions: {supported_extension_versions}. Provided Version: {version_failure['version']}",
69
+ fg="red",
70
+ )
71
+
72
+ def validate_supported_redis_versions(self, config):
73
+ return self._validate_extension_supported_versions(
74
+ config=config,
75
+ extension_type="redis",
76
+ version_key="engine",
77
+ get_supported_versions=RedisProvider(
78
+ boto3.client("elasticache")
79
+ ).get_supported_redis_versions,
80
+ )
81
+
82
+ def validate_supported_opensearch_versions(self, config):
83
+ return self._validate_extension_supported_versions(
84
+ config=config,
85
+ extension_type="opensearch",
86
+ version_key="engine",
87
+ get_supported_versions=OpensearchProvider(
88
+ boto3.client("opensearch")
89
+ ).get_supported_opensearch_versions,
90
+ )
91
+
92
+ def validate_environment_pipelines(self, config):
93
+ bad_pipelines = {}
94
+ for pipeline_name, pipeline in config.get("environment_pipelines", {}).items():
95
+ bad_envs = []
96
+ pipeline_account = pipeline.get("account", None)
97
+ if pipeline_account:
98
+ for env in pipeline.get("environments", {}).keys():
99
+ env_account = (
100
+ config.get("environments", {})
101
+ .get(env, {})
102
+ .get("accounts", {})
103
+ .get("deploy", {})
104
+ .get("name")
105
+ )
106
+ if not env_account == pipeline_account:
107
+ bad_envs.append(env)
108
+ if bad_envs:
109
+ bad_pipelines[pipeline_name] = {"account": pipeline_account, "bad_envs": bad_envs}
110
+ if bad_pipelines:
111
+ message = "The following pipelines are misconfigured:"
112
+ for pipeline, detail in bad_pipelines.items():
113
+ envs = detail["bad_envs"]
114
+ acc = detail["account"]
115
+ message += f" '{pipeline}' - these environments are not in the '{acc}' account: {', '.join(envs)}\n"
116
+ abort_with_error(message)
117
+
118
+ def validate_codebase_pipelines(self, config):
119
+ if CODEBASE_PIPELINES_KEY in config:
120
+ for codebase in config[CODEBASE_PIPELINES_KEY]:
121
+ codebase_environments = []
122
+
123
+ for pipeline in codebase["pipelines"]:
124
+ codebase_environments += [e["name"] for e in pipeline[ENVIRONMENTS_KEY]]
125
+
126
+ unique_codebase_environments = sorted(list(set(codebase_environments)))
127
+
128
+ if sorted(codebase_environments) != sorted(unique_codebase_environments):
129
+ abort_with_error(
130
+ f"The {PLATFORM_CONFIG_FILE} file is invalid, each environment can only be "
131
+ "listed in a single pipeline per codebase"
132
+ )
133
+
134
+ def validate_environment_pipelines_triggers(self, config):
135
+ errors = []
136
+ pipelines_with_triggers = {
137
+ pipeline_name: pipeline
138
+ for pipeline_name, pipeline in config.get("environment_pipelines", {}).items()
139
+ if "pipeline_to_trigger" in pipeline
140
+ }
141
+
142
+ for pipeline_name, pipeline in pipelines_with_triggers.items():
143
+ pipeline_to_trigger = pipeline["pipeline_to_trigger"]
144
+ if pipeline_to_trigger not in config.get("environment_pipelines", {}):
145
+ message = f" '{pipeline_name}' - '{pipeline_to_trigger}' is not a valid target pipeline to trigger"
146
+
147
+ errors.append(message)
148
+ continue
149
+
150
+ if pipeline_to_trigger == pipeline_name:
151
+ message = f" '{pipeline_name}' - pipelines cannot trigger themselves"
152
+ errors.append(message)
153
+
154
+ if errors:
155
+ error_message = "The following pipelines are misconfigured: \n"
156
+ abort_with_error(error_message + "\n ".join(errors))
157
+
158
+ def validate_database_copy_section(self, config):
159
+ extensions = config.get("extensions", {})
160
+ if not extensions:
161
+ return
162
+
163
+ postgres_extensions = {
164
+ key: ext for key, ext in extensions.items() if ext.get("type", None) == "postgres"
165
+ }
166
+
167
+ if not postgres_extensions:
168
+ return
169
+
170
+ errors = []
171
+
172
+ for extension_name, extension in postgres_extensions.items():
173
+ database_copy_sections = extension.get("database_copy", [])
174
+
175
+ if not database_copy_sections:
176
+ return
177
+
178
+ all_environments = [
179
+ env for env in config.get("environments", {}).keys() if not env == "*"
180
+ ]
181
+ all_envs_string = ", ".join(all_environments)
182
+
183
+ for section in database_copy_sections:
184
+ from_env = section["from"]
185
+ to_env = section["to"]
186
+
187
+ from_account = (
188
+ config.get("environments", {})
189
+ .get(from_env, {})
190
+ .get("accounts", {})
191
+ .get("deploy", {})
192
+ .get("id")
193
+ )
194
+ to_account = (
195
+ config.get("environments", {})
196
+ .get(to_env, {})
197
+ .get("accounts", {})
198
+ .get("deploy", {})
199
+ .get("id")
200
+ )
201
+
202
+ if from_env == to_env:
203
+ errors.append(
204
+ f"database_copy 'to' and 'from' cannot be the same environment in extension '{extension_name}'."
205
+ )
206
+
207
+ if "prod" in to_env:
208
+ errors.append(
209
+ f"Copying to a prod environment is not supported: database_copy 'to' cannot be '{to_env}' in extension '{extension_name}'."
210
+ )
211
+
212
+ if from_env not in all_environments:
213
+ errors.append(
214
+ f"database_copy 'from' parameter must be a valid environment ({all_envs_string}) but was '{from_env}' in extension '{extension_name}'."
215
+ )
216
+
217
+ if to_env not in all_environments:
218
+ errors.append(
219
+ f"database_copy 'to' parameter must be a valid environment ({all_envs_string}) but was '{to_env}' in extension '{extension_name}'."
220
+ )
221
+
222
+ if from_account != to_account:
223
+ if "from_account" not in section:
224
+ errors.append(
225
+ f"Environments '{from_env}' and '{to_env}' are in different AWS accounts. The 'from_account' parameter must be present."
226
+ )
227
+ elif section["from_account"] != from_account:
228
+ errors.append(
229
+ f"Incorrect value for 'from_account' for environment '{from_env}'"
230
+ )
231
+
232
+ if "to_account" not in section:
233
+ errors.append(
234
+ f"Environments '{from_env}' and '{to_env}' are in different AWS accounts. The 'to_account' parameter must be present."
235
+ )
236
+ elif section["to_account"] != to_account:
237
+ errors.append(
238
+ f"Incorrect value for 'to_account' for environment '{to_env}'"
239
+ )
240
+
241
+ if errors:
242
+ abort_with_error("\n".join(errors))
@@ -0,0 +1,204 @@
1
+ from collections import defaultdict
2
+ from pathlib import Path
3
+
4
+ import boto3
5
+ import click
6
+
7
+ from dbt_platform_helper.platform_exception import PlatformException
8
+ from dbt_platform_helper.providers.files import FileProvider
9
+ from dbt_platform_helper.providers.load_balancers import find_https_listener
10
+ from dbt_platform_helper.utils.aws import get_aws_session_or_abort
11
+ from dbt_platform_helper.utils.template import S3_CROSS_ACCOUNT_POLICY
12
+ from dbt_platform_helper.utils.template import camel_case
13
+ from dbt_platform_helper.utils.template import setup_templates
14
+
15
+
16
+ # TODO - move helper functions into suitable provider classes
17
+ def get_subnet_ids(session, vpc_id, environment_name):
18
+ subnets = session.client("ec2").describe_subnets(
19
+ Filters=[{"Name": "vpc-id", "Values": [vpc_id]}]
20
+ )["Subnets"]
21
+
22
+ if not subnets:
23
+ click.secho(f"No subnets found for VPC with id: {vpc_id}.", fg="red")
24
+ raise click.Abort
25
+
26
+ public_tag = {"Key": "subnet_type", "Value": "public"}
27
+ public_subnets = [subnet["SubnetId"] for subnet in subnets if public_tag in subnet["Tags"]]
28
+ private_tag = {"Key": "subnet_type", "Value": "private"}
29
+ private_subnets = [subnet["SubnetId"] for subnet in subnets if private_tag in subnet["Tags"]]
30
+
31
+ # This call and the method declaration can be removed when we stop using AWS Copilot to deploy the services
32
+ public_subnets, private_subnets = _match_subnet_id_order_to_cloudformation_exports(
33
+ session,
34
+ environment_name,
35
+ public_subnets,
36
+ private_subnets,
37
+ )
38
+
39
+ return public_subnets, private_subnets
40
+
41
+
42
+ def _match_subnet_id_order_to_cloudformation_exports(
43
+ session, environment_name, public_subnets, private_subnets
44
+ ):
45
+ public_subnet_exports = []
46
+ private_subnet_exports = []
47
+ for page in session.client("cloudformation").get_paginator("list_exports").paginate():
48
+ for export in page["Exports"]:
49
+ if f"-{environment_name}-" in export["Name"]:
50
+ if export["Name"].endswith("-PublicSubnets"):
51
+ public_subnet_exports = export["Value"].split(",")
52
+ if export["Name"].endswith("-PrivateSubnets"):
53
+ private_subnet_exports = export["Value"].split(",")
54
+
55
+ # If the elements match, regardless of order, use the list from the CloudFormation exports
56
+ if set(public_subnets) == set(public_subnet_exports):
57
+ public_subnets = public_subnet_exports
58
+ if set(private_subnets) == set(private_subnet_exports):
59
+ private_subnets = private_subnet_exports
60
+
61
+ return public_subnets, private_subnets
62
+
63
+
64
+ def get_cert_arn(session, application, env_name):
65
+ try:
66
+ arn = find_https_certificate(session, application, env_name)
67
+ except:
68
+ click.secho(
69
+ f"No certificate found with domain name matching environment {env_name}.", fg="red"
70
+ )
71
+ raise click.Abort
72
+
73
+ return arn
74
+
75
+
76
+ def get_vpc_id(session, env_name, vpc_name=None):
77
+ if not vpc_name:
78
+ vpc_name = f"{session.profile_name}-{env_name}"
79
+
80
+ filters = [{"Name": "tag:Name", "Values": [vpc_name]}]
81
+ vpcs = session.client("ec2").describe_vpcs(Filters=filters)["Vpcs"]
82
+
83
+ if not vpcs:
84
+ filters[0]["Values"] = [session.profile_name]
85
+ vpcs = session.client("ec2").describe_vpcs(Filters=filters)["Vpcs"]
86
+
87
+ if not vpcs:
88
+ click.secho(
89
+ f"No VPC found with name {vpc_name} in AWS account {session.profile_name}.", fg="red"
90
+ )
91
+ raise click.Abort
92
+
93
+ return vpcs[0]["VpcId"]
94
+
95
+
96
+ def _generate_copilot_environment_manifests(
97
+ environment_name, application_name, env_config, session
98
+ ):
99
+ env_template = setup_templates().get_template("env/manifest.yml")
100
+ vpc_name = env_config.get("vpc", None)
101
+ vpc_id = get_vpc_id(session, environment_name, vpc_name)
102
+ pub_subnet_ids, priv_subnet_ids = get_subnet_ids(session, vpc_id, environment_name)
103
+ cert_arn = get_cert_arn(session, application_name, environment_name)
104
+ contents = env_template.render(
105
+ {
106
+ "name": environment_name,
107
+ "vpc_id": vpc_id,
108
+ "pub_subnet_ids": pub_subnet_ids,
109
+ "priv_subnet_ids": priv_subnet_ids,
110
+ "certificate_arn": cert_arn,
111
+ }
112
+ )
113
+ click.echo(
114
+ FileProvider.mkfile(
115
+ ".", f"copilot/environments/{environment_name}/manifest.yml", contents, overwrite=True
116
+ )
117
+ )
118
+
119
+
120
+ def find_https_certificate(session: boto3.Session, app: str, env: str) -> str:
121
+ listener_arn = find_https_listener(session, app, env)
122
+ cert_client = session.client("elbv2")
123
+ certificates = cert_client.describe_listener_certificates(ListenerArn=listener_arn)[
124
+ "Certificates"
125
+ ]
126
+
127
+ try:
128
+ certificate_arn = next(c["CertificateArn"] for c in certificates if c["IsDefault"])
129
+ except StopIteration:
130
+ raise CertificateNotFoundException()
131
+
132
+ return certificate_arn
133
+
134
+
135
+ class CertificateNotFoundException(PlatformException):
136
+ pass
137
+
138
+
139
+ class CopilotEnvironment:
140
+ def __init__(self, config_provider):
141
+ self.config_provider = config_provider
142
+
143
+ def generate(self, environment_name):
144
+ config = self.config_provider.load_and_validate_platform_config()
145
+ enriched_config = self.config_provider.apply_environment_defaults(config)
146
+
147
+ env_config = enriched_config["environments"][environment_name]
148
+ profile_for_environment = env_config.get("accounts", {}).get("deploy", {}).get("name")
149
+ click.secho(f"Using {profile_for_environment} for this AWS session")
150
+ session = get_aws_session_or_abort(profile_for_environment)
151
+
152
+ _generate_copilot_environment_manifests(
153
+ environment_name, enriched_config["application"], env_config, session
154
+ )
155
+
156
+
157
+ class CopilotTemplating:
158
+ def __init__(self, mkfile_fn=FileProvider.mkfile):
159
+ self.mkfile_fn = mkfile_fn
160
+
161
+ def generate_cross_account_s3_policies(self, environments: dict, extensions):
162
+ resource_blocks = defaultdict(list)
163
+
164
+ for ext_name, ext_data in extensions.items():
165
+ for env_name, env_data in ext_data.get("environments", {}).items():
166
+ if "cross_environment_service_access" in env_data:
167
+ bucket = env_data.get("bucket_name")
168
+ x_env_data = env_data["cross_environment_service_access"]
169
+ for access_name, access_data in x_env_data.items():
170
+ service = access_data.get("service")
171
+ read = access_data.get("read", False)
172
+ write = access_data.get("write", False)
173
+ if read or write:
174
+ resource_blocks[service].append(
175
+ {
176
+ "bucket_name": bucket,
177
+ "app_prefix": camel_case(f"{service}-{bucket}-{access_name}"),
178
+ "bucket_env": env_name,
179
+ "access_env": access_data.get("environment"),
180
+ "bucket_account": environments.get(env_name, {})
181
+ .get("accounts", {})
182
+ .get("deploy", {})
183
+ .get("id"),
184
+ "read": read,
185
+ "write": write,
186
+ }
187
+ )
188
+
189
+ if not resource_blocks:
190
+ click.echo("\n>>> No cross-environment S3 policies to create.\n")
191
+ return
192
+
193
+ templates = setup_templates()
194
+
195
+ for service in sorted(resource_blocks.keys()):
196
+ resources = resource_blocks[service]
197
+ click.echo(f"\n>>> Creating S3 cross account policies for {service}.\n")
198
+ template = templates.get_template(S3_CROSS_ACCOUNT_POLICY)
199
+ file_content = template.render({"resources": resources})
200
+ output_dir = Path(".").absolute()
201
+ file_path = f"copilot/{service}/addons/s3-cross-account-policy.yml"
202
+
203
+ self.mkfile_fn(output_dir, file_path, file_content, True)
204
+ click.echo(f"File {file_path} created")
@@ -7,18 +7,18 @@ import click
7
7
  from boto3 import Session
8
8
 
9
9
  from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
10
- from dbt_platform_helper.domain.maintenance_page import MaintenancePageProvider
10
+ from dbt_platform_helper.domain.config_validator import ConfigValidator
11
+ from dbt_platform_helper.domain.maintenance_page import MaintenancePage
11
12
  from dbt_platform_helper.providers.aws import AWSException
13
+ from dbt_platform_helper.providers.config import ConfigProvider
14
+ from dbt_platform_helper.providers.vpc import Vpc
15
+ from dbt_platform_helper.providers.vpc import VpcProvider
12
16
  from dbt_platform_helper.utils.application import Application
13
17
  from dbt_platform_helper.utils.application import ApplicationNotFoundException
14
18
  from dbt_platform_helper.utils.application import load_application
15
- from dbt_platform_helper.utils.aws import Vpc
16
19
  from dbt_platform_helper.utils.aws import get_connection_string
17
- from dbt_platform_helper.utils.aws import get_vpc_info_by_name
18
20
  from dbt_platform_helper.utils.aws import wait_for_log_group_to_exist
19
- from dbt_platform_helper.utils.files import apply_environment_defaults
20
21
  from dbt_platform_helper.utils.messages import abort_with_error
21
- from dbt_platform_helper.utils.validation import load_and_validate_platform_config
22
22
 
23
23
 
24
24
  class DatabaseCopy:
@@ -28,32 +28,35 @@ class DatabaseCopy:
28
28
  database: str,
29
29
  auto_approve: bool = False,
30
30
  load_application: Callable[[str], Application] = load_application,
31
- vpc_config: Callable[[Session, str, str, str], Vpc] = get_vpc_info_by_name,
31
+ # TODO We inject VpcProvider as a callable here so that it can be instantiated within the method. To be improved
32
+ vpc_provider: Callable[[Session], VpcProvider] = VpcProvider,
32
33
  db_connection_string: Callable[
33
34
  [Session, str, str, str, Callable], str
34
35
  ] = get_connection_string,
35
36
  maintenance_page_provider: Callable[
36
37
  [str, str, list[str], str, str], None
37
- ] = MaintenancePageProvider(),
38
+ ] = MaintenancePage(),
38
39
  input: Callable[[str], str] = click.prompt,
39
40
  echo: Callable[[str], str] = click.secho,
40
41
  abort: Callable[[str], None] = abort_with_error,
42
+ config_provider: ConfigProvider = ConfigProvider(ConfigValidator()),
41
43
  ):
42
44
  self.app = app
43
45
  self.database = database
44
46
  self.auto_approve = auto_approve
45
- self.vpc_config = vpc_config
47
+ self.vpc_provider = vpc_provider
46
48
  self.db_connection_string = db_connection_string
47
49
  self.maintenance_page_provider = maintenance_page_provider
48
50
  self.input = input
49
51
  self.echo = echo
50
52
  self.abort = abort
53
+ self.config_provider = config_provider
51
54
 
52
55
  if not self.app:
53
56
  if not Path(PLATFORM_CONFIG_FILE).exists():
54
57
  self.abort("You must either be in a deploy repo, or provide the --app option.")
55
58
 
56
- config = load_and_validate_platform_config()
59
+ config = self.config_provider.load_and_validate_platform_config()
57
60
  self.app = config["application"]
58
61
 
59
62
  try:
@@ -74,7 +77,8 @@ class DatabaseCopy:
74
77
  env_session = environment.session
75
78
 
76
79
  try:
77
- vpc_config = self.vpc_config(env_session, self.app, env, vpc_name)
80
+ vpc_provider = self.vpc_provider(env_session)
81
+ vpc_config = vpc_provider.get_vpc_info_by_name(self.app, env, vpc_name)
78
82
  except AWSException as ex:
79
83
  self.abort(str(ex))
80
84
 
@@ -110,8 +114,8 @@ class DatabaseCopy:
110
114
  if not vpc_name:
111
115
  if not Path(PLATFORM_CONFIG_FILE).exists():
112
116
  self.abort("You must either be in a deploy repo, or provide the vpc name option.")
113
- config = load_and_validate_platform_config()
114
- env_config = apply_environment_defaults(config)["environments"]
117
+ config = self.config_provider.load_and_validate_platform_config()
118
+ env_config = self.config_provider.apply_environment_defaults(config)["environments"]
115
119
  vpc_name = env_config.get(env, {}).get("vpc")
116
120
  return vpc_name
117
121