dbt-platform-helper 12.6.0__py3-none-any.whl → 13.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbt-platform-helper might be problematic. Click here for more details.

Files changed (35) hide show
  1. dbt_platform_helper/COMMANDS.md +7 -7
  2. dbt_platform_helper/commands/codebase.py +2 -2
  3. dbt_platform_helper/commands/notify.py +5 -3
  4. dbt_platform_helper/commands/pipeline.py +17 -8
  5. dbt_platform_helper/constants.py +3 -1
  6. dbt_platform_helper/domain/codebase.py +35 -16
  7. dbt_platform_helper/domain/config_validator.py +5 -27
  8. dbt_platform_helper/domain/maintenance_page.py +94 -57
  9. dbt_platform_helper/domain/pipelines.py +53 -127
  10. dbt_platform_helper/domain/terraform_environment.py +4 -0
  11. dbt_platform_helper/providers/aws.py +5 -0
  12. dbt_platform_helper/providers/config.py +2 -2
  13. dbt_platform_helper/providers/ecr.py +20 -0
  14. dbt_platform_helper/providers/files.py +1 -1
  15. dbt_platform_helper/providers/platform_config_schema.py +14 -15
  16. dbt_platform_helper/providers/terraform_manifest.py +120 -0
  17. dbt_platform_helper/templates/environment-pipelines/main.tf +2 -2
  18. dbt_platform_helper/templates/environments/main.tf +3 -4
  19. dbt_platform_helper/utils/aws.py +16 -5
  20. {dbt_platform_helper-12.6.0.dist-info → dbt_platform_helper-13.0.0.dist-info}/METADATA +1 -1
  21. {dbt_platform_helper-12.6.0.dist-info → dbt_platform_helper-13.0.0.dist-info}/RECORD +24 -33
  22. dbt_platform_helper/templates/pipelines/codebase/manifest.yml +0 -56
  23. dbt_platform_helper/templates/pipelines/codebase/overrides/.gitignore +0 -12
  24. dbt_platform_helper/templates/pipelines/codebase/overrides/bin/override.ts +0 -8
  25. dbt_platform_helper/templates/pipelines/codebase/overrides/buildspec.deploy.yml +0 -29
  26. dbt_platform_helper/templates/pipelines/codebase/overrides/buildspec.image.yml +0 -48
  27. dbt_platform_helper/templates/pipelines/codebase/overrides/cdk.json +0 -20
  28. dbt_platform_helper/templates/pipelines/codebase/overrides/package-lock.json +0 -4428
  29. dbt_platform_helper/templates/pipelines/codebase/overrides/package.json +0 -27
  30. dbt_platform_helper/templates/pipelines/codebase/overrides/stack.ts +0 -521
  31. dbt_platform_helper/templates/pipelines/codebase/overrides/tsconfig.json +0 -30
  32. dbt_platform_helper/templates/pipelines/codebase/overrides/types.ts +0 -52
  33. {dbt_platform_helper-12.6.0.dist-info → dbt_platform_helper-13.0.0.dist-info}/LICENSE +0 -0
  34. {dbt_platform_helper-12.6.0.dist-info → dbt_platform_helper-13.0.0.dist-info}/WHEEL +0 -0
  35. {dbt_platform_helper-12.6.0.dist-info → dbt_platform_helper-13.0.0.dist-info}/entry_points.txt +0 -0
@@ -5,14 +5,14 @@ from shutil import rmtree
5
5
 
6
6
  from dbt_platform_helper.constants import CODEBASE_PIPELINES_KEY
7
7
  from dbt_platform_helper.constants import ENVIRONMENT_PIPELINES_KEY
8
- from dbt_platform_helper.constants import ENVIRONMENTS_KEY
8
+ from dbt_platform_helper.constants import SUPPORTED_AWS_PROVIDER_VERSION
9
+ from dbt_platform_helper.constants import SUPPORTED_TERRAFORM_VERSION
9
10
  from dbt_platform_helper.providers.config import ConfigProvider
11
+ from dbt_platform_helper.providers.ecr import ECRProvider
10
12
  from dbt_platform_helper.providers.files import FileProvider
11
13
  from dbt_platform_helper.providers.io import ClickIOProvider
14
+ from dbt_platform_helper.providers.terraform_manifest import TerraformManifestProvider
12
15
  from dbt_platform_helper.utils.application import get_application_name
13
- from dbt_platform_helper.utils.aws import get_account_details
14
- from dbt_platform_helper.utils.aws import get_public_repository_arn
15
- from dbt_platform_helper.utils.files import generate_override_files_from_template
16
16
  from dbt_platform_helper.utils.template import setup_templates
17
17
  from dbt_platform_helper.utils.versioning import (
18
18
  get_required_terraform_platform_modules_version,
@@ -23,30 +23,35 @@ class Pipelines:
23
23
  def __init__(
24
24
  self,
25
25
  config_provider: ConfigProvider,
26
+ terraform_manifest_provider: TerraformManifestProvider,
27
+ ecr_provider: ECRProvider,
26
28
  get_git_remote: Callable[[], str],
27
29
  get_codestar_arn: Callable[[str], str],
28
30
  io: ClickIOProvider = ClickIOProvider(),
31
+ file_provider: FileProvider = FileProvider(),
29
32
  ):
30
33
  self.config_provider = config_provider
31
34
  self.get_git_remote = get_git_remote
32
35
  self.get_codestar_arn = get_codestar_arn
36
+ self.terraform_manifest_provider = terraform_manifest_provider
37
+ self.ecr_provider = ecr_provider
33
38
  self.io = io
39
+ self.file_provider = file_provider
34
40
 
35
- def generate(self, terraform_platform_modules_version, deploy_branch):
36
- pipeline_config = self.config_provider.load_and_validate_platform_config()
41
+ def generate(self, cli_terraform_platform_modules_version: str, deploy_branch: str):
42
+ platform_config = self.config_provider.load_and_validate_platform_config()
37
43
 
38
- has_codebase_pipelines = CODEBASE_PIPELINES_KEY in pipeline_config
39
- has_environment_pipelines = ENVIRONMENT_PIPELINES_KEY in pipeline_config
44
+ has_codebase_pipelines = CODEBASE_PIPELINES_KEY in platform_config
45
+ has_environment_pipelines = ENVIRONMENT_PIPELINES_KEY in platform_config
40
46
 
41
47
  if not (has_codebase_pipelines or has_environment_pipelines):
42
48
  self.io.warn("No pipelines defined: nothing to do.")
43
49
  return
44
50
 
45
- platform_config_terraform_modules_default_version = pipeline_config.get(
51
+ platform_config_terraform_modules_default_version = platform_config.get(
46
52
  "default_versions", {}
47
53
  ).get("terraform-platform-modules", "")
48
54
 
49
- templates = setup_templates()
50
55
  app_name = get_application_name()
51
56
 
52
57
  git_repo = self.get_git_remote()
@@ -62,151 +67,72 @@ class Pipelines:
62
67
 
63
68
  self._clean_pipeline_config(copilot_pipelines_dir)
64
69
 
70
+ terraform_platform_modules_version = get_required_terraform_platform_modules_version(
71
+ cli_terraform_platform_modules_version,
72
+ platform_config_terraform_modules_default_version,
73
+ )
74
+
65
75
  if has_environment_pipelines:
66
- environment_pipelines = pipeline_config[ENVIRONMENT_PIPELINES_KEY]
76
+ environment_pipelines = platform_config[ENVIRONMENT_PIPELINES_KEY]
77
+ accounts = {
78
+ config.get("account")
79
+ for config in environment_pipelines.values()
80
+ if "account" in config
81
+ }
67
82
 
68
- for config in environment_pipelines.values():
69
- aws_account = config.get("account")
83
+ for account in accounts:
70
84
  self._generate_terraform_environment_pipeline_manifest(
71
- pipeline_config["application"],
72
- aws_account,
85
+ platform_config["application"],
86
+ account,
73
87
  terraform_platform_modules_version,
74
- platform_config_terraform_modules_default_version,
75
88
  deploy_branch,
76
89
  )
77
90
 
78
91
  if has_codebase_pipelines:
79
- account_id, _ = get_account_details()
80
-
81
- for codebase in pipeline_config[CODEBASE_PIPELINES_KEY]:
82
- self._generate_codebase_pipeline(
83
- account_id,
84
- app_name,
85
- codestar_connection_arn,
86
- git_repo,
87
- codebase,
88
- base_path,
89
- copilot_pipelines_dir,
90
- templates,
91
- )
92
+ codebase_pipelines = platform_config[CODEBASE_PIPELINES_KEY]
93
+ ecrs_to_be_managed = {
94
+ codebase: f"{platform_config['application']}/{codebase}"
95
+ for codebase in codebase_pipelines.keys()
96
+ }
97
+ ecrs_already_provisioned = set(self.ecr_provider.get_ecr_repo_names())
98
+ ecrs_that_need_importing = {
99
+ codebase: repo
100
+ for codebase, repo in ecrs_to_be_managed.items()
101
+ if repo in ecrs_already_provisioned
102
+ }
92
103
 
93
- def _clean_pipeline_config(self, pipelines_dir):
104
+ self.terraform_manifest_provider.generate_codebase_pipeline_config(
105
+ platform_config, terraform_platform_modules_version, ecrs_that_need_importing
106
+ )
107
+
108
+ def _clean_pipeline_config(self, pipelines_dir: Path):
94
109
  if pipelines_dir.exists():
95
110
  self.io.info("Deleting copilot/pipelines directory.")
96
111
  rmtree(pipelines_dir)
97
112
 
98
- def _generate_codebase_pipeline(
99
- self,
100
- account_id,
101
- app_name,
102
- codestar_connection_arn,
103
- git_repo,
104
- codebase,
105
- base_path,
106
- pipelines_dir,
107
- templates,
108
- ):
109
- makedirs(pipelines_dir / codebase["name"] / "overrides", exist_ok=True)
110
- environments = []
111
- for pipelines in codebase["pipelines"]:
112
- environments += pipelines[ENVIRONMENTS_KEY]
113
-
114
- additional_ecr = codebase.get("additional_ecr_repository", None)
115
- add_public_perms = additional_ecr and additional_ecr.startswith("public.ecr.aws")
116
- additional_ecr_arn = get_public_repository_arn(additional_ecr) if add_public_perms else None
117
-
118
- template_data = {
119
- "account_id": account_id,
120
- "app_name": app_name,
121
- "deploy_repo": git_repo,
122
- "codebase": codebase,
123
- ENVIRONMENTS_KEY: environments,
124
- "codestar_connection_arn": codestar_connection_arn,
125
- "codestar_connection_id": codestar_connection_arn.split("/")[-1],
126
- "additional_ecr_arn": additional_ecr_arn,
127
- }
128
-
129
- self._create_file_from_template(
130
- base_path,
131
- f"{codebase['name']}/manifest.yml",
132
- pipelines_dir,
133
- template_data,
134
- templates,
135
- "codebase/manifest.yml",
136
- )
137
-
138
- overrides_path = Path(__file__).parent.parent.joinpath(
139
- "templates/pipelines/codebase/overrides"
140
- )
141
- generate_override_files_from_template(
142
- base_path, overrides_path, pipelines_dir / codebase["name"] / "overrides", template_data
143
- )
144
-
145
- def _create_file_from_template(
146
- self, base_path, file_name, pipelines_dir, template_data, templates, template_name=None
147
- ):
148
- contents = templates.get_template(
149
- f"pipelines/{file_name if template_name is None else template_name}"
150
- ).render(template_data)
151
- message = FileProvider.mkfile(
152
- base_path, pipelines_dir / file_name, contents, overwrite=True
153
- )
154
- self.io.info(message)
155
-
156
113
  def _generate_terraform_environment_pipeline_manifest(
157
114
  self,
158
- application,
159
- aws_account,
160
- cli_terraform_platform_modules_version,
161
- platform_config_terraform_modules_default_version,
162
- deploy_branch,
115
+ application: str,
116
+ aws_account: str,
117
+ terraform_platform_modules_version: str,
118
+ deploy_branch: str,
163
119
  ):
164
120
  env_pipeline_template = setup_templates().get_template("environment-pipelines/main.tf")
165
121
 
166
- terraform_platform_modules_version = get_required_terraform_platform_modules_version(
167
- cli_terraform_platform_modules_version,
168
- platform_config_terraform_modules_default_version,
169
- )
170
-
171
122
  contents = env_pipeline_template.render(
172
123
  {
173
124
  "application": application,
174
125
  "aws_account": aws_account,
175
126
  "terraform_platform_modules_version": terraform_platform_modules_version,
176
127
  "deploy_branch": deploy_branch,
128
+ "terraform_version": SUPPORTED_TERRAFORM_VERSION,
129
+ "aws_provider_version": SUPPORTED_AWS_PROVIDER_VERSION,
177
130
  }
178
131
  )
179
132
 
180
133
  dir_path = f"terraform/environment-pipelines/{aws_account}"
181
134
  makedirs(dir_path, exist_ok=True)
182
135
 
183
- self.io.info(FileProvider.mkfile(".", f"{dir_path}/main.tf", contents, overwrite=True))
184
-
185
- def generate_terraform_codebase_pipeline_manifest(
186
- self,
187
- application,
188
- aws_account,
189
- cli_terraform_platform_modules_version,
190
- platform_config_terraform_modules_default_version,
191
- deploy_branch,
192
- ):
193
- env_pipeline_template = setup_templates().get_template("codebase-pipelines/main.tf")
194
-
195
- terraform_platform_modules_version = get_required_terraform_platform_modules_version(
196
- cli_terraform_platform_modules_version,
197
- platform_config_terraform_modules_default_version,
198
- )
199
-
200
- contents = env_pipeline_template.render(
201
- {
202
- "application": application,
203
- "aws_account": aws_account,
204
- "terraform_platform_modules_version": terraform_platform_modules_version,
205
- "deploy_branch": deploy_branch,
206
- }
136
+ self.io.info(
137
+ self.file_provider.mkfile(".", f"{dir_path}/main.tf", contents, overwrite=True)
207
138
  )
208
-
209
- dir_path = f"terraform/environment-pipelines/{aws_account}"
210
- makedirs(dir_path, exist_ok=True)
211
-
212
- self.io.info(FileProvider.mkfile(".", f"{dir_path}/main.tf", contents, overwrite=True))
@@ -1,6 +1,8 @@
1
1
  import click
2
2
 
3
3
  from dbt_platform_helper.constants import DEFAULT_TERRAFORM_PLATFORM_MODULES_VERSION
4
+ from dbt_platform_helper.constants import SUPPORTED_AWS_PROVIDER_VERSION
5
+ from dbt_platform_helper.constants import SUPPORTED_TERRAFORM_VERSION
4
6
  from dbt_platform_helper.platform_exception import PlatformException
5
7
  from dbt_platform_helper.providers.files import FileProvider
6
8
  from dbt_platform_helper.utils.template import setup_templates
@@ -31,6 +33,8 @@ class PlatformTerraformManifestGenerator:
31
33
  "environment": environment_name,
32
34
  "config": environment_config,
33
35
  "terraform_platform_modules_version": terraform_platform_modules_version,
36
+ "terraform_version": SUPPORTED_TERRAFORM_VERSION,
37
+ "aws_provider_version": SUPPORTED_AWS_PROVIDER_VERSION,
34
38
  }
35
39
  )
36
40
 
@@ -19,6 +19,11 @@ class ImageNotFoundException(AWSException):
19
19
  )
20
20
 
21
21
 
22
+ class RepositoryNotFoundException(AWSException):
23
+ def __init__(self, repository: str):
24
+ super().__init__(f"""The ECR repository "{repository}" could not be found.""")
25
+
26
+
22
27
  class LogGroupNotFoundException(AWSException):
23
28
  def __init__(self, log_group_name: str):
24
29
  super().__init__(f"""No log group called "{log_group_name}".""")
@@ -28,7 +28,7 @@ class ConfigProvider:
28
28
  def get_enriched_config(self):
29
29
  return self.apply_environment_defaults(self.load_and_validate_platform_config())
30
30
 
31
- def validate_platform_config(self):
31
+ def _validate_platform_config(self):
32
32
  PlatformConfigSchema.schema().validate(self.config)
33
33
 
34
34
  # TODO= logically this isn't validation but loading + parsing, to move.
@@ -48,7 +48,7 @@ class ConfigProvider:
48
48
  self.io.abort_with_error(f"Error loading configuration from {path}: {e}")
49
49
 
50
50
  try:
51
- self.validate_platform_config()
51
+ self._validate_platform_config()
52
52
  except SchemaError as e:
53
53
  self.io.abort_with_error(f"Schema error in {path}. {e}")
54
54
 
@@ -0,0 +1,20 @@
1
+ from boto3 import Session
2
+
3
+ from dbt_platform_helper.utils.aws import get_aws_session_or_abort
4
+
5
+
6
+ class ECRProvider:
7
+ def __init__(self, session: Session = None):
8
+ self.session = session
9
+ self.client = None
10
+
11
+ def _get_client(self):
12
+ if not self.session:
13
+ self.session = get_aws_session_or_abort()
14
+ return self.session.client("ecr")
15
+
16
+ def get_ecr_repo_names(self) -> list[str]:
17
+ out = []
18
+ for page in self._get_client().get_paginator("describe_repositories").paginate():
19
+ out.extend([repo["repositoryName"] for repo in page.get("repositories", {})])
20
+ return out
@@ -8,7 +8,7 @@ class FileProvider:
8
8
  pass
9
9
 
10
10
  @staticmethod
11
- def mkfile(base_path: str, file_path: str, contents, overwrite=False) -> str:
11
+ def mkfile(base_path: str, file_path: str, contents: str, overwrite=False) -> str:
12
12
  file_path = Path(file_path)
13
13
  file = Path(base_path).joinpath(file_path)
14
14
  file_exists = file.exists()
@@ -16,9 +16,7 @@ class PlatformConfigSchema:
16
16
  {
17
17
  # The following line is for the AWS Copilot version, will be removed under DBTP-1002
18
18
  "application": str,
19
- Optional("legacy_project", default=False): bool,
20
19
  Optional("default_versions"): PlatformConfigSchema.__default_versions_schema(),
21
- Optional("accounts"): list[str],
22
20
  Optional("environments"): PlatformConfigSchema.__environments_schema(),
23
21
  Optional("codebase_pipelines"): PlatformConfigSchema.__codebase_pipelines_schema(),
24
22
  Optional(
@@ -80,7 +78,7 @@ class PlatformConfigSchema:
80
78
  "cache": str,
81
79
  "request": str,
82
80
  },
83
- Optional("additional"): list[
81
+ Optional("additional"): [
84
82
  {
85
83
  "path": str,
86
84
  "cache": str,
@@ -94,12 +92,12 @@ class PlatformConfigSchema:
94
92
  Optional("environments"): {
95
93
  PlatformConfigSchema.__valid_environment_name(): Or(
96
94
  {
97
- Optional("additional_address_list"): list,
98
- Optional("allowed_methods"): list,
99
- Optional("cached_methods"): list,
95
+ Optional("additional_address_list"): [str],
96
+ Optional("allowed_methods"): [str],
97
+ Optional("cached_methods"): [str],
100
98
  Optional("cdn_compress"): bool,
101
99
  Optional("cdn_domains_list"): dict,
102
- Optional("cdn_geo_locations"): list,
100
+ Optional("cdn_geo_locations"): [str],
103
101
  Optional("cdn_geo_restriction_type"): str,
104
102
  Optional("cdn_logging_bucket"): str,
105
103
  Optional("cdn_logging_bucket_prefix"): str,
@@ -109,10 +107,10 @@ class PlatformConfigSchema:
109
107
  Optional("enable_logging"): bool,
110
108
  Optional("env_root"): str,
111
109
  Optional("forwarded_values_forward"): str,
112
- Optional("forwarded_values_headers"): list,
110
+ Optional("forwarded_values_headers"): [str],
113
111
  Optional("forwarded_values_query_string"): bool,
114
112
  Optional("origin_protocol_policy"): str,
115
- Optional("origin_ssl_protocols"): list,
113
+ Optional("origin_ssl_protocols"): [str],
116
114
  Optional("slack_alert_channel_alb_secret_rotation"): str,
117
115
  Optional("viewer_certificate_minimum_protocol_version"): str,
118
116
  Optional("viewer_certificate_ssl_support_method"): str,
@@ -127,14 +125,15 @@ class PlatformConfigSchema:
127
125
  }
128
126
 
129
127
  @staticmethod
130
- def __codebase_pipelines_schema() -> list[dict]:
131
- return [
132
- {
133
- "name": str,
128
+ def __codebase_pipelines_schema() -> dict:
129
+ return {
130
+ str: {
134
131
  "repository": str,
132
+ Optional("slack_channel"): str,
133
+ Optional("requires_image_build"): bool,
135
134
  Optional("additional_ecr_repository"): str,
136
135
  Optional("deploy_repository_branch"): str,
137
- "services": list[str],
136
+ "services": [{str: [str]}],
138
137
  "pipelines": [
139
138
  Or(
140
139
  {
@@ -160,7 +159,7 @@ class PlatformConfigSchema:
160
159
  ),
161
160
  ],
162
161
  },
163
- ]
162
+ }
164
163
 
165
164
  @staticmethod
166
165
  def __default_versions_schema() -> dict:
@@ -0,0 +1,120 @@
1
+ import json
2
+ from datetime import datetime
3
+ from importlib.metadata import version
4
+ from pathlib import Path
5
+ from typing import Callable
6
+
7
+ import click
8
+
9
+ from dbt_platform_helper.constants import SUPPORTED_AWS_PROVIDER_VERSION
10
+ from dbt_platform_helper.constants import SUPPORTED_TERRAFORM_VERSION
11
+ from dbt_platform_helper.providers.files import FileProvider
12
+
13
+
14
+ class TerraformManifestProvider:
15
+ def __init__(
16
+ self, file_provider: FileProvider = FileProvider(), echo: Callable[[str], None] = click.echo
17
+ ):
18
+ self.file_provider = file_provider
19
+ self.echo = echo
20
+
21
+ def generate_codebase_pipeline_config(
22
+ self,
23
+ platform_config: dict,
24
+ terraform_platform_modules_version: str,
25
+ ecr_imports: dict[str, str],
26
+ ):
27
+ default_account = (
28
+ platform_config.get("environments", {})
29
+ .get("*", {})
30
+ .get("accounts", {})
31
+ .get("deploy", {})
32
+ .get("name")
33
+ )
34
+ terraform = {}
35
+ self._add_header(terraform)
36
+ self._add_locals(terraform)
37
+ self._add_provider(terraform, default_account)
38
+ self._add_backend(terraform, platform_config, default_account)
39
+ self._add_codebase_pipeline_module(terraform, terraform_platform_modules_version)
40
+ self._add_imports(terraform, ecr_imports)
41
+
42
+ message = self.file_provider.mkfile(
43
+ str(Path(".").absolute()),
44
+ "terraform/codebase-pipelines/main.tf.json",
45
+ json.dumps(terraform, indent=2),
46
+ True,
47
+ )
48
+ self.echo(message)
49
+
50
+ @staticmethod
51
+ def _add_header(terraform: dict):
52
+ time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
53
+ version_header = f"Generated by platform-helper {version('dbt-platform-helper')} / {time}."
54
+ warning = "WARNING: This is an autogenerated file, not for manual editing."
55
+ # The "//" key denotes a comment in terraform json.
56
+ terraform["//"] = f"{version_header} {warning}"
57
+
58
+ @staticmethod
59
+ def _add_locals(terraform: dict):
60
+ terraform["locals"] = {
61
+ "platform_config": '${yamldecode(file("../../platform-config.yml"))}',
62
+ "application": '${local.platform_config["application"]}',
63
+ "all_codebases": '${local.platform_config["codebase_pipelines"]}',
64
+ "environments": '${local.platform_config["environments"]}',
65
+ }
66
+
67
+ @staticmethod
68
+ def _add_provider(terraform: dict, default_account: str):
69
+ terraform["provider"] = {"aws": {}}
70
+ terraform["provider"]["aws"]["region"] = "eu-west-2"
71
+ terraform["provider"]["aws"]["profile"] = default_account
72
+ terraform["provider"]["aws"]["alias"] = default_account
73
+ terraform["provider"]["aws"]["shared_credentials_files"] = ["~/.aws/config"]
74
+
75
+ @staticmethod
76
+ def _add_backend(terraform: dict, platform_config: dict, default_account: str):
77
+ terraform["terraform"] = {
78
+ "required_version": SUPPORTED_TERRAFORM_VERSION,
79
+ "backend": {
80
+ "s3": {
81
+ "bucket": f"terraform-platform-state-{default_account}",
82
+ "key": f"tfstate/application/{platform_config['application']}-codebase-pipelines.tfstate",
83
+ "region": "eu-west-2",
84
+ "encrypt": True,
85
+ "kms_key_id": f"alias/terraform-platform-state-s3-key-{default_account}",
86
+ "dynamodb_table": f"terraform-platform-lockdb-{default_account}",
87
+ }
88
+ },
89
+ "required_providers": {
90
+ "aws": {"source": "hashicorp/aws", "version": SUPPORTED_AWS_PROVIDER_VERSION}
91
+ },
92
+ }
93
+
94
+ @staticmethod
95
+ def _add_codebase_pipeline_module(terraform: dict, terraform_platform_modules_version: str):
96
+ source = f"git::https://github.com/uktrade/terraform-platform-modules.git//codebase-pipelines?depth=1&ref={terraform_platform_modules_version}"
97
+ terraform["module"] = {
98
+ "codebase-pipelines": {
99
+ "source": source,
100
+ "for_each": "${local.all_codebases}",
101
+ "application": "${local.application}",
102
+ "codebase": "${each.key}",
103
+ "repository": "${each.value.repository}",
104
+ "additional_ecr_repository": '${lookup(each.value, "additional_ecr_repository", null)}',
105
+ "pipelines": "${each.value.pipelines}",
106
+ "services": "${each.value.services}",
107
+ "requires_image_build": '${lookup(each.value, "requires_image_build", true)}',
108
+ "slack_channel": '${lookup(each.value, "slack_channel", "/codebuild/slack_oauth_channel")}',
109
+ "env_config": "${local.environments}",
110
+ }
111
+ }
112
+
113
+ @staticmethod
114
+ def _add_imports(terraform: dict, ecr_imports: dict[str, str]):
115
+ if ecr_imports:
116
+ terraform["import"] = {
117
+ "for_each": "${%s}" % json.dumps(ecr_imports),
118
+ "id": "${each.value}",
119
+ "to": "module.codebase-pipelines[each.key].aws_ecr_repository.this",
120
+ }
@@ -15,7 +15,7 @@ provider "aws" {
15
15
  }
16
16
 
17
17
  terraform {
18
- required_version = "~> 1.8"
18
+ required_version = "{{ terraform_version }}"
19
19
  backend "s3" {
20
20
  bucket = "terraform-platform-state-{{ aws_account }}"
21
21
  key = "tfstate/application/{{ application }}-pipelines.tfstate"
@@ -27,7 +27,7 @@ terraform {
27
27
  required_providers {
28
28
  aws = {
29
29
  source = "hashicorp/aws"
30
- version = "~> 5"
30
+ version = "{{ aws_provider_version }}"
31
31
  }
32
32
  }
33
33
  }
@@ -7,12 +7,12 @@ locals {
7
7
  args = {
8
8
  application = "{{ application }}"
9
9
  services = local.config["extensions"]
10
- dns_account_id = local.env_config["{{ environment }}"]["accounts"]["dns"]["id"]
10
+ env_config = local.env_config
11
11
  }
12
12
  }
13
13
 
14
14
  terraform {
15
- required_version = "~> 1.8"
15
+ required_version = "{{ terraform_version }}"
16
16
  backend "s3" {
17
17
  bucket = "terraform-platform-state-{{ config.accounts.deploy.name }}"
18
18
  key = "tfstate/application/{{ application }}-{{ environment }}.tfstate"
@@ -24,7 +24,7 @@ terraform {
24
24
  required_providers {
25
25
  aws = {
26
26
  source = "hashicorp/aws"
27
- version = "~> 5"
27
+ version = "{{ aws_provider_version }}"
28
28
  }
29
29
  }
30
30
  }
@@ -34,7 +34,6 @@ module "extensions" {
34
34
 
35
35
  args = local.args
36
36
  environment = "{{ environment }}"
37
- vpc_name = "{{ config.vpc }}"
38
37
  }
39
38
 
40
39
  /*
@@ -18,6 +18,7 @@ from dbt_platform_helper.platform_exception import PlatformException
18
18
  from dbt_platform_helper.providers.aws import CopilotCodebaseNotFoundException
19
19
  from dbt_platform_helper.providers.aws import ImageNotFoundException
20
20
  from dbt_platform_helper.providers.aws import LogGroupNotFoundException
21
+ from dbt_platform_helper.providers.aws import RepositoryNotFoundException
21
22
  from dbt_platform_helper.providers.validation import ValidationException
22
23
 
23
24
  SSM_BASE_PATH = "/copilot/{app}/{env}/secrets/"
@@ -381,6 +382,11 @@ def start_build_extraction(codebuild_client, build_options):
381
382
  return response["build"]["arn"]
382
383
 
383
384
 
385
+ def start_pipeline_and_return_execution_id(codepipeline_client, build_options):
386
+ response = codepipeline_client.start_pipeline_execution(**build_options)
387
+ return response["pipelineExecutionId"]
388
+
389
+
384
390
  # Todo: This should probably be in the AWS Copilot provider
385
391
  def check_codebase_exists(session: Session, application, codebase: str):
386
392
  try:
@@ -402,16 +408,16 @@ def check_codebase_exists(session: Session, application, codebase: str):
402
408
 
403
409
  def check_image_exists(session, application, codebase, commit):
404
410
  ecr_client = session.client("ecr")
411
+ repository = f"{application.name}/{codebase}"
405
412
  try:
406
413
  ecr_client.describe_images(
407
- repositoryName=f"{application.name}/{codebase}",
414
+ repositoryName=repository,
408
415
  imageIds=[{"imageTag": f"commit-{commit}"}],
409
416
  )
410
- except (
411
- ecr_client.exceptions.RepositoryNotFoundException,
412
- ecr_client.exceptions.ImageNotFoundException,
413
- ):
417
+ except ecr_client.exceptions.ImageNotFoundException:
414
418
  raise ImageNotFoundException(commit)
419
+ except ecr_client.exceptions.RepositoryNotFoundException:
420
+ raise RepositoryNotFoundException(repository)
415
421
 
416
422
 
417
423
  def get_build_url_from_arn(build_arn: str) -> str:
@@ -423,6 +429,11 @@ def get_build_url_from_arn(build_arn: str) -> str:
423
429
  )
424
430
 
425
431
 
432
+ def get_build_url_from_pipeline_execution_id(execution_id: str, pipeline_name: str) -> str:
433
+
434
+ return f"https://eu-west-2.console.aws.amazon.com/codesuite/codepipeline/pipelines/{pipeline_name}/executions/{execution_id}"
435
+
436
+
426
437
  def list_latest_images(ecr_client, ecr_repository_name, codebase_repository, echo):
427
438
  paginator = ecr_client.get_paginator("describe_images")
428
439
  describe_images_response_iterator = paginator.paginate(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: dbt-platform-helper
3
- Version: 12.6.0
3
+ Version: 13.0.0
4
4
  Summary: Set of tools to help transfer applications/services from GOV.UK PaaS to DBT PaaS augmenting AWS Copilot.
5
5
  License: MIT
6
6
  Author: Department for Business and Trade Platform Team