dbt-platform-helper 11.0.1__py3-none-any.whl → 11.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbt-platform-helper might be problematic. Click here for more details.

@@ -5,6 +5,7 @@ from shutil import rmtree
5
5
 
6
6
  import click
7
7
 
8
+ from dbt_platform_helper.constants import DEFAULT_TERRAFORM_PLATFORM_MODULES_VERSION
8
9
  from dbt_platform_helper.utils.application import get_application_name
9
10
  from dbt_platform_helper.utils.aws import get_account_details
10
11
  from dbt_platform_helper.utils.aws import get_codestar_connection_arn
@@ -24,6 +25,7 @@ from dbt_platform_helper.utils.versioning import (
24
25
 
25
26
  CODEBASE_PIPELINES_KEY = "codebase_pipelines"
26
27
  ENVIRONMENTS_KEY = "environments"
28
+ ENVIRONMENT_PIPELINES_KEY = "environment_pipelines"
27
29
 
28
30
 
29
31
  @click.group(chain=True, cls=ClickDocOptGroup)
@@ -33,18 +35,55 @@ def pipeline():
33
35
 
34
36
 
35
37
  @pipeline.command()
36
- def generate():
37
- """Given a platform-config.yml file, generate environment and service
38
- deployment pipelines."""
38
+ @click.option(
39
+ "--terraform-platform-modules-version",
40
+ help=f"""Override the default version of terraform-platform-modules with a specific version or branch.
41
+ Precedence of version used is version supplied via CLI, then the version found in
42
+ platform-config.yml/default_versions/terraform-platform-modules.
43
+ In absence of these inputs, defaults to version '{DEFAULT_TERRAFORM_PLATFORM_MODULES_VERSION}'.""",
44
+ )
45
+ @click.option(
46
+ "--deploy-branch",
47
+ help="""Specify the branch of <application>-deploy used to configure the source stage in the environment-pipeline resource.
48
+ This is generated from the terraform/environments-pipeline/<aws_account>/main.tf file.
49
+ (Default <application>-deploy branch is specified in
50
+ <application>-deploy/platform-config.yml/environment_pipelines/<environment-pipeline>/branch).""",
51
+ default=None,
52
+ )
53
+ def generate(terraform_platform_modules_version, deploy_branch):
54
+ """
55
+ Given a platform-config.yml file, generate environment and service
56
+ deployment pipelines.
57
+
58
+ This command does the following in relation to the environment pipelines:
59
+ - Reads contents of `platform-config.yml/environment-pipelines` configuration.
60
+ The `terraform/environment-pipelines/<aws_account>/main.tf` file is generated using this configuration.
61
+ The `main.tf` file is then used to generate Terraform for creating an environment pipeline resource.
62
+
63
+ This command does the following in relation to the codebase pipelines:
64
+ - Generates the copilot pipeline manifest.yml for copilot/pipelines/<codebase_pipeline_name>
65
+
66
+ (Deprecated) This command does the following for non terraform projects (legacy AWS Copilot):
67
+ - Generates the copilot manifest.yml for copilot/environments/<environment>
68
+ """
39
69
  pipeline_config = load_and_validate_platform_config()
40
70
 
41
- no_codebase_pipelines = CODEBASE_PIPELINES_KEY not in pipeline_config
42
- no_environment_pipelines = ENVIRONMENTS_KEY not in pipeline_config
71
+ has_codebase_pipelines = CODEBASE_PIPELINES_KEY in pipeline_config
72
+ has_legacy_environment_pipelines = ENVIRONMENTS_KEY in pipeline_config
73
+ has_environment_pipelines = ENVIRONMENT_PIPELINES_KEY in pipeline_config
43
74
 
44
- if no_codebase_pipelines and no_environment_pipelines:
75
+ if (
76
+ not has_codebase_pipelines
77
+ and not has_legacy_environment_pipelines
78
+ and not has_environment_pipelines
79
+ ):
45
80
  click.secho("No pipelines defined: nothing to do.", err=True, fg="yellow")
46
81
  return
47
82
 
83
+ platform_config_terraform_modules_default_version = pipeline_config.get(
84
+ "default_versions", {}
85
+ ).get("terraform-platform-modules", "")
86
+
48
87
  templates = setup_templates()
49
88
  app_name = get_application_name()
50
89
 
@@ -57,22 +96,34 @@ def generate():
57
96
  abort_with_error(f'There is no CodeStar Connection named "{app_name}" to use')
58
97
 
59
98
  base_path = Path(".")
60
- pipelines_dir = base_path / f"copilot/pipelines"
99
+ copilot_pipelines_dir = base_path / f"copilot/pipelines"
61
100
 
62
- _clean_pipeline_config(pipelines_dir)
101
+ _clean_pipeline_config(copilot_pipelines_dir)
63
102
 
64
- if not is_terraform_project() and ENVIRONMENTS_KEY in pipeline_config:
103
+ if is_terraform_project() and has_environment_pipelines:
104
+ environment_pipelines = pipeline_config[ENVIRONMENT_PIPELINES_KEY]
105
+
106
+ for config in environment_pipelines.values():
107
+ aws_account = config.get("account")
108
+ _generate_terraform_environment_pipeline_manifest(
109
+ pipeline_config["application"],
110
+ aws_account,
111
+ terraform_platform_modules_version,
112
+ platform_config_terraform_modules_default_version,
113
+ deploy_branch,
114
+ )
115
+ if not is_terraform_project() and has_legacy_environment_pipelines:
65
116
  _generate_copilot_environments_pipeline(
66
117
  app_name,
67
118
  codestar_connection_arn,
68
119
  git_repo,
69
120
  apply_environment_defaults(pipeline_config)[ENVIRONMENTS_KEY],
70
121
  base_path,
71
- pipelines_dir,
122
+ copilot_pipelines_dir,
72
123
  templates,
73
124
  )
74
125
 
75
- if CODEBASE_PIPELINES_KEY in pipeline_config:
126
+ if has_codebase_pipelines:
76
127
  account_id, _ = get_account_details()
77
128
 
78
129
  for codebase in pipeline_config[CODEBASE_PIPELINES_KEY]:
@@ -83,7 +134,7 @@ def generate():
83
134
  git_repo,
84
135
  codebase,
85
136
  base_path,
86
- pipelines_dir,
137
+ copilot_pipelines_dir,
87
138
  templates,
88
139
  )
89
140
 
@@ -170,3 +221,43 @@ def _create_file_from_template(
170
221
  ).render(template_data)
171
222
  message = mkfile(base_path, pipelines_dir / file_name, contents, overwrite=True)
172
223
  click.echo(message)
224
+
225
+
226
+ def _generate_terraform_environment_pipeline_manifest(
227
+ application,
228
+ aws_account,
229
+ cli_terraform_platform_modules_version,
230
+ platform_config_terraform_modules_default_version,
231
+ deploy_branch,
232
+ ):
233
+ env_pipeline_template = setup_templates().get_template("environment-pipelines/main.tf")
234
+
235
+ terraform_platform_modules_version = _determine_terraform_platform_modules_version(
236
+ cli_terraform_platform_modules_version, platform_config_terraform_modules_default_version
237
+ )
238
+
239
+ contents = env_pipeline_template.render(
240
+ {
241
+ "application": application,
242
+ "aws_account": aws_account,
243
+ "terraform_platform_modules_version": terraform_platform_modules_version,
244
+ "deploy_branch": deploy_branch,
245
+ }
246
+ )
247
+
248
+ dir_path = f"terraform/environment-pipelines/{aws_account}"
249
+ makedirs(dir_path, exist_ok=True)
250
+
251
+ click.echo(mkfile(".", f"{dir_path}/main.tf", contents, overwrite=True))
252
+
253
+
254
+ def _determine_terraform_platform_modules_version(
255
+ cli_terraform_platform_modules_version, platform_config_terraform_modules_default_version
256
+ ):
257
+
258
+ version_preference_order = [
259
+ cli_terraform_platform_modules_version,
260
+ platform_config_terraform_modules_default_version,
261
+ DEFAULT_TERRAFORM_PLATFORM_MODULES_VERSION,
262
+ ]
263
+ return [version for version in version_preference_order if version][0]
File without changes
@@ -0,0 +1,220 @@
1
+ import re
2
+ from collections.abc import Callable
3
+ from pathlib import Path
4
+
5
+ import boto3
6
+ import click
7
+ from boto3 import Session
8
+
9
+ from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
10
+ from dbt_platform_helper.domain.maintenance_page import MaintenancePageProvider
11
+ from dbt_platform_helper.exceptions import AWSException
12
+ from dbt_platform_helper.utils.application import Application
13
+ from dbt_platform_helper.utils.application import ApplicationNotFoundError
14
+ from dbt_platform_helper.utils.application import load_application
15
+ from dbt_platform_helper.utils.aws import Vpc
16
+ from dbt_platform_helper.utils.aws import get_connection_string
17
+ from dbt_platform_helper.utils.aws import get_vpc_info_by_name
18
+ from dbt_platform_helper.utils.messages import abort_with_error
19
+ from dbt_platform_helper.utils.validation import load_and_validate_platform_config
20
+
21
+
22
+ class DatabaseCopy:
23
+ def __init__(
24
+ self,
25
+ app: str,
26
+ database: str,
27
+ auto_approve: bool = False,
28
+ load_application_fn: Callable[[str], Application] = load_application,
29
+ vpc_config_fn: Callable[[Session, str, str, str], Vpc] = get_vpc_info_by_name,
30
+ db_connection_string_fn: Callable[
31
+ [Session, str, str, str, Callable], str
32
+ ] = get_connection_string,
33
+ maintenance_page_provider: Callable[
34
+ [str, str, list[str], str, str], None
35
+ ] = MaintenancePageProvider(),
36
+ input_fn: Callable[[str], str] = click.prompt,
37
+ echo_fn: Callable[[str], str] = click.secho,
38
+ abort_fn: Callable[[str], None] = abort_with_error,
39
+ ):
40
+ self.app = app
41
+ self.database = database
42
+ self.auto_approve = auto_approve
43
+ self.vpc_config_fn = vpc_config_fn
44
+ self.db_connection_string_fn = db_connection_string_fn
45
+ self.maintenance_page_provider = maintenance_page_provider
46
+ self.input_fn = input_fn
47
+ self.echo_fn = echo_fn
48
+ self.abort_fn = abort_fn
49
+
50
+ if not self.app:
51
+ if not Path(PLATFORM_CONFIG_FILE).exists():
52
+ self.abort_fn("You must either be in a deploy repo, or provide the --app option.")
53
+
54
+ config = load_and_validate_platform_config(disable_aws_validation=True)
55
+ self.app = config["application"]
56
+
57
+ try:
58
+ self.application = load_application_fn(self.app)
59
+ except ApplicationNotFoundError:
60
+ abort_fn(f"No such application '{app}'.")
61
+
62
+ def _execute_operation(self, is_dump: bool, env: str, vpc_name: str):
63
+ vpc_name = self.enrich_vpc_name(env, vpc_name)
64
+
65
+ environments = self.application.environments
66
+ environment = environments.get(env)
67
+ if not environment:
68
+ self.abort_fn(
69
+ f"No such environment '{env}'. Available environments are: {', '.join(environments.keys())}"
70
+ )
71
+
72
+ env_session = environment.session
73
+
74
+ try:
75
+ vpc_config = self.vpc_config_fn(env_session, self.app, env, vpc_name)
76
+ except AWSException as ex:
77
+ self.abort_fn(str(ex))
78
+
79
+ database_identifier = f"{self.app}-{env}-{self.database}"
80
+
81
+ try:
82
+ db_connection_string = self.db_connection_string_fn(
83
+ env_session, self.app, env, database_identifier
84
+ )
85
+ except Exception as exc:
86
+ self.abort_fn(f"{exc} (Database: {database_identifier})")
87
+
88
+ try:
89
+ task_arn = self.run_database_copy_task(
90
+ env_session, env, vpc_config, is_dump, db_connection_string
91
+ )
92
+ except Exception as exc:
93
+ self.abort_fn(f"{exc} (Account id: {self.account_id(env)})")
94
+
95
+ if is_dump:
96
+ message = f"Dumping {self.database} from the {env} environment into S3"
97
+ else:
98
+ message = f"Loading data into {self.database} in the {env} environment from S3"
99
+
100
+ self.echo_fn(message, fg="white", bold=True)
101
+ self.echo_fn(
102
+ f"Task {task_arn} started. Waiting for it to complete (this may take some time)...",
103
+ fg="white",
104
+ )
105
+ self.tail_logs(is_dump, env)
106
+
107
+ def enrich_vpc_name(self, env, vpc_name):
108
+ if not vpc_name:
109
+ if not Path(PLATFORM_CONFIG_FILE).exists():
110
+ self.abort_fn(
111
+ "You must either be in a deploy repo, or provide the vpc name option."
112
+ )
113
+ config = load_and_validate_platform_config(disable_aws_validation=True)
114
+ vpc_name = config.get("environments", {}).get(env, {}).get("vpc")
115
+ return vpc_name
116
+
117
+ def run_database_copy_task(
118
+ self,
119
+ session: boto3.session.Session,
120
+ env: str,
121
+ vpc_config: Vpc,
122
+ is_dump: bool,
123
+ db_connection_string: str,
124
+ ) -> str:
125
+ client = session.client("ecs")
126
+ action = "dump" if is_dump else "load"
127
+ env_vars = [
128
+ {"name": "DATA_COPY_OPERATION", "value": action.upper()},
129
+ {"name": "DB_CONNECTION_STRING", "value": db_connection_string},
130
+ ]
131
+ if not is_dump:
132
+ env_vars.append({"name": "ECS_CLUSTER", "value": f"{self.app}-{env}"})
133
+
134
+ response = client.run_task(
135
+ taskDefinition=f"arn:aws:ecs:eu-west-2:{self.account_id(env)}:task-definition/{self.app}-{env}-{self.database}-{action}",
136
+ cluster=f"{self.app}-{env}",
137
+ capacityProviderStrategy=[
138
+ {"capacityProvider": "FARGATE", "weight": 1, "base": 0},
139
+ ],
140
+ networkConfiguration={
141
+ "awsvpcConfiguration": {
142
+ "subnets": vpc_config.subnets,
143
+ "securityGroups": vpc_config.security_groups,
144
+ "assignPublicIp": "DISABLED",
145
+ }
146
+ },
147
+ overrides={
148
+ "containerOverrides": [
149
+ {
150
+ "name": f"{self.app}-{env}-{self.database}-{action}",
151
+ "environment": env_vars,
152
+ }
153
+ ]
154
+ },
155
+ )
156
+
157
+ return response.get("tasks", [{}])[0].get("taskArn")
158
+
159
+ def dump(self, env: str, vpc_name: str):
160
+ self._execute_operation(True, env, vpc_name)
161
+
162
+ def load(self, env: str, vpc_name: str):
163
+ if self.is_confirmed_ready_to_load(env):
164
+ self._execute_operation(False, env, vpc_name)
165
+
166
+ def copy(
167
+ self,
168
+ from_env: str,
169
+ to_env: str,
170
+ from_vpc: str,
171
+ to_vpc: str,
172
+ services: tuple[str],
173
+ template: str,
174
+ no_maintenance_page: bool = False,
175
+ ):
176
+ to_vpc = self.enrich_vpc_name(to_env, to_vpc)
177
+ if not no_maintenance_page:
178
+ self.maintenance_page_provider.activate(self.app, to_env, services, template, to_vpc)
179
+ self.dump(from_env, from_vpc)
180
+ self.load(to_env, to_vpc)
181
+ if not no_maintenance_page:
182
+ self.maintenance_page_provider.deactivate(self.app, to_env)
183
+
184
+ def is_confirmed_ready_to_load(self, env: str) -> bool:
185
+ if self.auto_approve:
186
+ return True
187
+
188
+ user_input = self.input_fn(
189
+ f"\nWARNING: the load operation is destructive and will delete the {self.database} database in the {env} environment. Continue? (y/n)"
190
+ )
191
+ return user_input.lower().strip() in ["y", "yes"]
192
+
193
+ def tail_logs(self, is_dump: bool, env: str):
194
+ action = "dump" if is_dump else "load"
195
+ log_group_name = f"/ecs/{self.app}-{env}-{self.database}-{action}"
196
+ log_group_arn = f"arn:aws:logs:eu-west-2:{self.account_id(env)}:log-group:{log_group_name}"
197
+ self.echo_fn(f"Tailing {log_group_name} logs", fg="yellow")
198
+ session = self.application.environments[env].session
199
+ response = session.client("logs").start_live_tail(logGroupIdentifiers=[log_group_arn])
200
+
201
+ stopped = False
202
+ for data in response["responseStream"]:
203
+ if stopped:
204
+ break
205
+ results = data.get("sessionUpdate", {}).get("sessionResults", [])
206
+ for result in results:
207
+ message = result.get("message")
208
+
209
+ if message:
210
+ match = re.match(r"(Stopping|Aborting) data (load|dump).*", message)
211
+ if match:
212
+ if match.group(1) == "Aborting":
213
+ self.abort_fn("Task aborted abnormally. See logs above for details.")
214
+ stopped = True
215
+ self.echo_fn(message)
216
+
217
+ def account_id(self, env):
218
+ envs = self.application.environments
219
+ if env in envs:
220
+ return envs.get(env).account_id