dbt-platform-helper 10.11.3__py3-none-any.whl → 11.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbt-platform-helper might be problematic. Click here for more details.

@@ -40,7 +40,8 @@
40
40
  - [platform-helper notify environment-progress](#platform-helper-notify-environment-progress)
41
41
  - [platform-helper notify add-comment](#platform-helper-notify-add-comment)
42
42
  - [platform-helper database](#platform-helper-database)
43
- - [platform-helper database copy](#platform-helper-database-copy)
43
+ - [platform-helper database dump](#platform-helper-database-dump)
44
+ - [platform-helper database load](#platform-helper-database-load)
44
45
  - [platform-helper version](#platform-helper-version)
45
46
  - [platform-helper version get-platform-helper-for-project](#platform-helper-version-get-platform-helper-for-project)
46
47
 
@@ -964,7 +965,7 @@ platform-helper notify add-comment <slack_channel_id> <slack_token>
964
965
  ## Usage
965
966
 
966
967
  ```
967
- platform-helper database copy
968
+ platform-helper database (dump|load)
968
969
  ```
969
970
 
970
971
  ## Options
@@ -974,27 +975,64 @@ platform-helper database copy
974
975
 
975
976
  ## Commands
976
977
 
977
- - [`copy` ↪](#platform-helper-database-copy)
978
+ - [`dump` ↪](#platform-helper-database-dump)
979
+ - [`load` ↪](#platform-helper-database-load)
978
980
 
979
- # platform-helper database copy
981
+ # platform-helper database dump
980
982
 
981
983
  [↩ Parent](#platform-helper-database)
982
984
 
983
- Copy source database to target database.
985
+ Dump a database into an S3 bucket.
984
986
 
985
987
  ## Usage
986
988
 
987
989
  ```
988
- platform-helper database copy <source_db> <target_db>
990
+ platform-helper database dump --account-id <account_id> --app <application>
991
+ --env <environment> --database <database>
992
+ --vpc-name <vpc_name>
989
993
  ```
990
994
 
991
- ## Arguments
995
+ ## Options
996
+
997
+ - `--account-id <text>`
998
+
999
+ - `--app <text>`
1000
+
1001
+ - `--env <text>`
1002
+
1003
+ - `--database <text>`
992
1004
 
993
- - `source_db <text>`
994
- - `target_db <text>`
1005
+ - `--vpc-name <text>`
1006
+
1007
+ - `--help <boolean>` _Defaults to False._
1008
+ - Show this message and exit.
1009
+
1010
+ # platform-helper database load
1011
+
1012
+ [↩ Parent](#platform-helper-database)
1013
+
1014
+ Load a database from an S3 bucket.
1015
+
1016
+ ## Usage
1017
+
1018
+ ```
1019
+ platform-helper database load --account-id <account_id> --app <application>
1020
+ --env <environment> --database <database>
1021
+ --vpc-name <vpc_name>
1022
+ ```
995
1023
 
996
1024
  ## Options
997
1025
 
1026
+ - `--account-id <text>`
1027
+
1028
+ - `--app <text>`
1029
+
1030
+ - `--env <text>`
1031
+
1032
+ - `--database <text>`
1033
+
1034
+ - `--vpc-name <text>`
1035
+
998
1036
  - `--help <boolean>` _Defaults to False._
999
1037
  - Show this message and exit.
1000
1038
 
@@ -11,7 +11,9 @@ from cfn_tools import load_yaml
11
11
 
12
12
  from dbt_platform_helper.utils.application import Application
13
13
  from dbt_platform_helper.utils.application import load_application
14
- from dbt_platform_helper.utils.aws import update_postgres_parameter_with_master_secret
14
+ from dbt_platform_helper.utils.aws import (
15
+ get_postgres_connection_data_updated_with_master_secret,
16
+ )
15
17
  from dbt_platform_helper.utils.click import ClickDocOptCommand
16
18
  from dbt_platform_helper.utils.messages import abort_with_error
17
19
  from dbt_platform_helper.utils.platform_config import is_terraform_project
@@ -171,7 +173,7 @@ def create_postgres_admin_task(
171
173
  Name=master_secret_name, WithDecryption=True
172
174
  )["Parameter"]["Value"]
173
175
  connection_string = json.dumps(
174
- update_postgres_parameter_with_master_secret(
176
+ get_postgres_connection_data_updated_with_master_secret(
175
177
  session, read_only_secret_name, master_secret_arn
176
178
  )
177
179
  )
@@ -416,7 +418,6 @@ def conduit(addon_name: str, app: str, env: str, access: str):
416
418
 
417
419
  try:
418
420
  addon_type = get_addon_type(application, env, addon_name)
419
- start_conduit(application, env, addon_type, addon_name, access)
420
421
  except ParameterNotFoundConduitError:
421
422
  click.secho(
422
423
  f"""No parameter called "/copilot/applications/{app}/environments/{env}/addons". Try deploying the "{app}" "{env}" environment.""",
@@ -435,6 +436,9 @@ def conduit(addon_name: str, app: str, env: str, access: str):
435
436
  fg="red",
436
437
  )
437
438
  exit(1)
439
+
440
+ try:
441
+ start_conduit(application, env, addon_type, addon_name, access)
438
442
  except NoClusterConduitError:
439
443
  click.secho(f"""No ECS cluster found for "{app}" in "{env}" environment.""", fg="red")
440
444
  exit(1)
@@ -1,122 +1,33 @@
1
- import subprocess
2
- from typing import List
3
-
4
1
  import click
5
2
 
6
- from dbt_platform_helper.commands.conduit import add_stack_delete_policy_to_task_role
7
- from dbt_platform_helper.commands.conduit import addon_client_is_running
8
- from dbt_platform_helper.commands.conduit import connect_to_addon_client_task
9
- from dbt_platform_helper.commands.conduit import get_cluster_arn
10
- from dbt_platform_helper.commands.conduit import normalise_secret_name
11
- from dbt_platform_helper.utils.application import load_application
12
- from dbt_platform_helper.utils.aws import get_aws_session_or_abort
13
- from dbt_platform_helper.utils.aws import update_postgres_parameter_with_master_secret
3
+ from dbt_platform_helper.commands.database_helpers import DatabaseCopy
14
4
  from dbt_platform_helper.utils.click import ClickDocOptGroup
15
- from dbt_platform_helper.utils.versioning import (
16
- check_platform_helper_version_needs_update,
17
- )
18
5
 
19
6
 
20
7
  @click.group(chain=True, cls=ClickDocOptGroup)
21
8
  def database():
22
- check_platform_helper_version_needs_update()
23
-
24
-
25
- @database.command(name="copy")
26
- @click.argument("source_db", type=str, required=True)
27
- @click.argument("target_db", type=str, required=True)
28
- def copy(source_db: str, target_db: str):
29
- """Copy source database to target database."""
30
- app = None
31
- source_env = None
32
- target_env = None
33
-
34
- for tag in get_database_tags(source_db):
35
- if tag["Key"] == "copilot-application":
36
- app = tag["Value"]
37
- if tag["Key"] == "copilot-environment":
38
- source_env = tag["Value"]
39
- if app is not None:
40
- break
41
-
42
- for tag in get_database_tags(target_db):
43
- if tag["Key"] == "copilot-environment":
44
- target_env = tag["Value"]
45
- break
46
-
47
- if not app or not source_env or not target_env:
48
- click.secho(f"""Required database tags not found.""", fg="red")
49
- exit(1)
50
-
51
- if target_env == "prod":
52
- click.secho(f"""The target database cannot be a production database.""", fg="red")
53
- exit(1)
54
-
55
- if source_db == target_db:
56
- click.secho(f"""Source and target databases are the same.""", fg="red")
57
- exit(1)
58
-
59
- if not click.confirm(
60
- click.style("Copying data from ", fg="yellow")
61
- + click.style(f"{source_db} ", fg="white", bold=True)
62
- + click.style(f"in environment {source_env} to ", fg="yellow", bold=True)
63
- + click.style(f"{target_db} ", fg="white", bold=True)
64
- + click.style(f"in environment {target_env}\n", fg="yellow", bold=True)
65
- + click.style("Do you want to continue?", fg="yellow"),
66
- ):
67
- exit()
68
-
69
- click.echo(f"""Starting task to copy data from {source_db} to {target_db}""")
70
-
71
- source_db_connection = get_connection_string(app, source_env, source_db)
72
- target_db_connection = get_connection_string(app, target_env, target_db)
73
-
74
- application = load_application(app)
75
- cluster_arn = get_cluster_arn(application, source_env)
76
- task_name = f"database-copy-{app}-{source_env}-{app}-{target_env}"
77
-
78
- if not addon_client_is_running(application, source_env, cluster_arn, task_name):
79
- subprocess.call(
80
- f"copilot task run --app {app} --env {source_env} "
81
- f"--task-group-name {task_name} "
82
- f"--image public.ecr.aws/uktrade/tunnel:database-copy "
83
- f"--env-vars SOURCE_DB_CONNECTION='{source_db_connection}',TARGET_DB_CONNECTION='{target_db_connection}' "
84
- "--platform-os linux "
85
- "--platform-arch arm64",
86
- shell=True,
87
- )
88
- add_stack_delete_policy_to_task_role(application, source_env, task_name)
89
- connect_to_addon_client_task(application, source_env, cluster_arn, task_name)
90
-
91
-
92
- def get_database_tags(db_identifier: str) -> List[dict]:
93
- session = get_aws_session_or_abort()
94
- rds = session.client("rds")
95
-
96
- try:
97
- db_instance = rds.describe_db_instances(DBInstanceIdentifier=db_identifier)["DBInstances"][
98
- 0
99
- ]
100
-
101
- return db_instance["TagList"]
102
- except rds.exceptions.DBInstanceNotFoundFault:
103
- click.secho(
104
- f"""Database {db_identifier} not found. Check the database identifier.""", fg="red"
105
- )
106
- exit(1)
107
-
108
-
109
- def get_connection_string(app: str, env: str, db_identifier: str) -> str:
110
- session = get_aws_session_or_abort()
111
- addon_name = normalise_secret_name(db_identifier.split(f"{app}-{env}-", 1)[1])
112
- connection_string_parameter = f"/copilot/{app}/{env}/secrets/{addon_name}_READ_ONLY_USER"
113
- master_secret_name = f"/copilot/{app}/{env}/secrets/{addon_name}_RDS_MASTER_ARN"
114
- master_secret_arn = session.client("ssm").get_parameter(
115
- Name=master_secret_name, WithDecryption=True
116
- )["Parameter"]["Value"]
117
-
118
- conn = update_postgres_parameter_with_master_secret(
119
- session, connection_string_parameter, master_secret_arn
120
- )
121
-
122
- return f"postgres://{conn['username']}:{conn['password']}@{conn['host']}:{conn['port']}/{conn['dbname']}"
9
+ pass
10
+
11
+
12
+ @database.command(name="dump")
13
+ @click.option("--account-id", type=str, required=True)
14
+ @click.option("--app", type=str, required=True)
15
+ @click.option("--env", type=str, required=True)
16
+ @click.option("--database", type=str, required=True)
17
+ @click.option("--vpc-name", type=str, required=True)
18
+ def dump(account_id, app, env, database, vpc_name):
19
+ """Dump a database into an S3 bucket."""
20
+ data_copy = DatabaseCopy(account_id, app, env, database, vpc_name)
21
+ data_copy.dump()
22
+
23
+
24
+ @database.command(name="load")
25
+ @click.option("--account-id", type=str, required=True)
26
+ @click.option("--app", type=str, required=True)
27
+ @click.option("--env", type=str, required=True)
28
+ @click.option("--database", type=str, required=True)
29
+ @click.option("--vpc-name", type=str, required=True)
30
+ def load(account_id, app, env, database, vpc_name):
31
+ """Load a database from an S3 bucket."""
32
+ data_copy = DatabaseCopy(account_id, app, env, database, vpc_name)
33
+ data_copy.load()
@@ -0,0 +1,145 @@
1
+ import boto3
2
+ import click
3
+
4
+ from dbt_platform_helper.utils.aws import Vpc
5
+ from dbt_platform_helper.utils.aws import get_aws_session_or_abort
6
+ from dbt_platform_helper.utils.aws import get_connection_string
7
+ from dbt_platform_helper.utils.aws import get_vpc_info_by_name
8
+
9
+
10
+ def run_database_copy_task(
11
+ session: boto3.session.Session,
12
+ account_id: str,
13
+ app: str,
14
+ env: str,
15
+ database: str,
16
+ vpc_config: Vpc,
17
+ is_dump: bool,
18
+ db_connection_string: str,
19
+ ):
20
+ client = session.client("ecs")
21
+ action = "dump" if is_dump else "load"
22
+ response = client.run_task(
23
+ taskDefinition=f"arn:aws:ecs:eu-west-2:{account_id}:task-definition/{app}-{env}-{database}-{action}",
24
+ cluster=f"{app}-{env}",
25
+ capacityProviderStrategy=[
26
+ {"capacityProvider": "FARGATE", "weight": 1, "base": 0},
27
+ ],
28
+ networkConfiguration={
29
+ "awsvpcConfiguration": {
30
+ "subnets": vpc_config.subnets,
31
+ "securityGroups": vpc_config.security_groups,
32
+ "assignPublicIp": "DISABLED",
33
+ }
34
+ },
35
+ overrides={
36
+ "containerOverrides": [
37
+ {
38
+ "name": f"{app}-{env}-{database}-{action}",
39
+ "environment": [
40
+ {"name": "DATA_COPY_OPERATION", "value": action.upper()},
41
+ {"name": "DB_CONNECTION_STRING", "value": db_connection_string},
42
+ ],
43
+ }
44
+ ]
45
+ },
46
+ )
47
+
48
+ return response.get("tasks", [{}])[0].get("taskArn")
49
+
50
+
51
+ class DatabaseCopy:
52
+ def __init__(
53
+ self,
54
+ account_id,
55
+ app,
56
+ env,
57
+ database,
58
+ vpc_name,
59
+ get_session_fn=get_aws_session_or_abort,
60
+ run_database_copy_fn=run_database_copy_task,
61
+ vpc_config_fn=get_vpc_info_by_name,
62
+ db_connection_string_fn=get_connection_string,
63
+ input_fn=click.prompt,
64
+ echo_fn=click.secho,
65
+ ):
66
+ self.account_id = account_id
67
+ self.app = app
68
+ self.env = env
69
+ self.database = database
70
+ self.vpc_name = vpc_name
71
+ self.get_session_fn = get_session_fn
72
+ self.run_database_copy_fn = run_database_copy_fn
73
+ self.vpc_config_fn = vpc_config_fn
74
+ self.db_connection_string_fn = db_connection_string_fn
75
+ self.input_fn = input_fn
76
+ self.echo_fn = echo_fn
77
+
78
+ def _execute_operation(self, is_dump):
79
+ session = self.get_session_fn()
80
+ vpc_config = self.vpc_config_fn(session, self.app, self.env, self.vpc_name)
81
+ database_identifier = f"{self.app}-{self.env}-{self.database}"
82
+ db_connection_string = self.db_connection_string_fn(
83
+ session, self.app, self.env, database_identifier
84
+ )
85
+ task_arn = self.run_database_copy_fn(
86
+ session,
87
+ self.account_id,
88
+ self.app,
89
+ self.env,
90
+ self.database,
91
+ vpc_config,
92
+ is_dump,
93
+ db_connection_string,
94
+ )
95
+
96
+ self.echo_fn(
97
+ f"Task {task_arn} started. Waiting for it to complete (this may take some time)...",
98
+ fg="green",
99
+ )
100
+ self.tail_logs(is_dump)
101
+ self.wait_for_task_to_stop(task_arn)
102
+
103
+ def dump(self):
104
+ self._execute_operation(True)
105
+
106
+ def load(self):
107
+ if self.is_confirmed_ready_to_load():
108
+ self._execute_operation(False)
109
+
110
+ def is_confirmed_ready_to_load(self):
111
+ user_input = self.input_fn(
112
+ f"Are all tasks using {self.database} in the {self.env} environment stopped? (y/n)"
113
+ )
114
+ return user_input.lower().strip() in ["y", "yes"]
115
+
116
+ def tail_logs(self, is_dump: bool):
117
+ action = "dump" if is_dump else "load"
118
+ log_group_name = f"/ecs/{self.app}-{self.env}-{self.database}-{action}"
119
+ log_group_arn = f"arn:aws:logs:eu-west-2:{self.account_id}:log-group:{log_group_name}"
120
+ self.echo_fn(f"Tailing logs for {log_group_name}", fg="yellow")
121
+ session = self.get_session_fn()
122
+ response = session.client("logs").start_live_tail(logGroupIdentifiers=[log_group_arn])
123
+
124
+ stopped = False
125
+ for data in response["responseStream"]:
126
+ if stopped:
127
+ break
128
+ results = data.get("sessionUpdate", {}).get("sessionResults", [])
129
+ for result in results:
130
+ message = result.get("message")
131
+
132
+ if message:
133
+ if message.startswith("Stopping data "):
134
+ stopped = True
135
+ self.echo_fn(message)
136
+
137
+ def wait_for_task_to_stop(self, task_arn):
138
+ self.echo_fn("Waiting for task to complete", fg="yellow")
139
+ client = self.get_session_fn().client("ecs")
140
+ waiter = client.get_waiter("tasks_stopped")
141
+ waiter.wait(
142
+ cluster=f"{self.app}-{self.env}",
143
+ tasks=[task_arn],
144
+ WaiterConfig={"Delay": 6, "MaxAttempts": 300},
145
+ )
@@ -2,6 +2,10 @@ class ValidationException(Exception):
2
2
  pass
3
3
 
4
4
 
5
+ class AWSException(Exception):
6
+ pass
7
+
8
+
5
9
  class IncompatibleMajorVersion(ValidationException):
6
10
  def __init__(self, app_version: str, check_version: str):
7
11
  super().__init__()
@@ -3321,12 +3321,13 @@
3321
3321
  "dev": true
3322
3322
  },
3323
3323
  "node_modules/micromatch": {
3324
- "version": "4.0.5",
3325
- "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
3326
- "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==",
3324
+ "version": "4.0.8",
3325
+ "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
3326
+ "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
3327
3327
  "dev": true,
3328
+ "license": "MIT",
3328
3329
  "dependencies": {
3329
- "braces": "^3.0.2",
3330
+ "braces": "^3.0.3",
3330
3331
  "picomatch": "^2.3.1"
3331
3332
  },
3332
3333
  "engines": {
@@ -11,6 +11,7 @@ import click
11
11
  import yaml
12
12
  from boto3 import Session
13
13
 
14
+ from dbt_platform_helper.exceptions import AWSException
14
15
  from dbt_platform_helper.exceptions import ValidationException
15
16
 
16
17
  SSM_BASE_PATH = "/copilot/{app}/{env}/secrets/"
@@ -321,7 +322,7 @@ def get_load_balancer_configuration(
321
322
  return response
322
323
 
323
324
 
324
- def update_postgres_parameter_with_master_secret(session, parameter_name, secret_arn):
325
+ def get_postgres_connection_data_updated_with_master_secret(session, parameter_name, secret_arn):
325
326
  ssm_client = session.client("ssm")
326
327
  secrets_manager_client = session.client("secretsmanager")
327
328
  response = ssm_client.get_parameter(Name=parameter_name, WithDecryption=True)
@@ -336,3 +337,74 @@ def update_postgres_parameter_with_master_secret(session, parameter_name, secret
336
337
  parameter_data["password"] = urllib.parse.quote(secret_value["password"])
337
338
 
338
339
  return parameter_data
340
+
341
+
342
+ def get_connection_string(
343
+ session: Session,
344
+ app: str,
345
+ env: str,
346
+ db_identifier: str,
347
+ connection_data_fn=get_postgres_connection_data_updated_with_master_secret,
348
+ ) -> str:
349
+ addon_name = db_identifier.split(f"{app}-{env}-", 1)[1]
350
+ normalised_addon_name = addon_name.replace("-", "_").upper()
351
+ connection_string_parameter = (
352
+ f"/copilot/{app}/{env}/secrets/{normalised_addon_name}_READ_ONLY_USER"
353
+ )
354
+ master_secret_name = f"/copilot/{app}/{env}/secrets/{normalised_addon_name}_RDS_MASTER_ARN"
355
+ master_secret_arn = session.client("ssm").get_parameter(
356
+ Name=master_secret_name, WithDecryption=True
357
+ )["Parameter"]["Value"]
358
+
359
+ conn = connection_data_fn(session, connection_string_parameter, master_secret_arn)
360
+
361
+ return f"postgres://{conn['username']}:{conn['password']}@{conn['host']}:{conn['port']}/{conn['dbname']}"
362
+
363
+
364
+ class Vpc:
365
+ def __init__(self, subnets, security_groups):
366
+ self.subnets = subnets
367
+ self.security_groups = security_groups
368
+
369
+
370
+ def get_vpc_info_by_name(session, app, env, vpc_name):
371
+ ec2_client = session.client("ec2")
372
+ vpc_response = ec2_client.describe_vpcs(Filters=[{"Name": "tag:Name", "Values": [vpc_name]}])
373
+
374
+ matching_vpcs = vpc_response.get("Vpcs", [])
375
+
376
+ if not matching_vpcs:
377
+ raise AWSException(f"VPC not found for name '{vpc_name}'")
378
+
379
+ vpc_id = vpc_response["Vpcs"][0].get("VpcId")
380
+
381
+ if not vpc_id:
382
+ raise AWSException(f"VPC id not present in vpc '{vpc_name}'")
383
+
384
+ ec2_resource = session.resource("ec2")
385
+ vpc = ec2_resource.Vpc(vpc_id)
386
+
387
+ route_tables = ec2_client.describe_route_tables(
388
+ Filters=[{"Name": "vpc-id", "Values": [vpc_id]}]
389
+ )["RouteTables"]
390
+
391
+ subnets = []
392
+ for route_table in route_tables:
393
+ private_routes = [route for route in route_table["Routes"] if "NatGatewayId" in route]
394
+ if not private_routes:
395
+ continue
396
+ for association in route_table["Associations"]:
397
+ if "SubnetId" in association:
398
+ subnet_id = association["SubnetId"]
399
+ subnets.append(subnet_id)
400
+
401
+ if not subnets:
402
+ raise AWSException(f"No private subnets found in vpc '{vpc_name}'")
403
+
404
+ tag_value = {"Key": "Name", "Value": f"copilot-{app}-{env}-env"}
405
+ sec_groups = [sg.id for sg in vpc.security_groups.all() if sg.tags and tag_value in sg.tags]
406
+
407
+ if not sec_groups:
408
+ raise AWSException(f"No matching security groups found in vpc '{vpc_name}'")
409
+
410
+ return Vpc(subnets, sec_groups)
@@ -222,6 +222,9 @@ RETENTION_POLICY = Or(
222
222
  Or("days", "years", only_one=True): int,
223
223
  },
224
224
  )
225
+
226
+ DATABASE_COPY = {"from": ENV_NAME, "to": ENV_NAME}
227
+
225
228
  POSTGRES_DEFINITION = {
226
229
  "type": "postgres",
227
230
  "version": NUMBER,
@@ -239,6 +242,7 @@ POSTGRES_DEFINITION = {
239
242
  Optional("backup_retention_days"): int_between(1, 35),
240
243
  }
241
244
  },
245
+ Optional("database_copy"): [DATABASE_COPY],
242
246
  Optional("objects"): [
243
247
  {
244
248
  "key": str,
@@ -387,10 +391,27 @@ ALB_DEFINITION = {
387
391
  Optional("environments"): {
388
392
  ENV_NAME: Or(
389
393
  {
394
+ Optional("additional_address_list"): list,
395
+ Optional("allowed_methods"): list,
396
+ Optional("cached_methods"): list,
397
+ Optional("cdn_compress"): bool,
398
+ Optional("cdn_domains_list"): dict,
399
+ Optional("cdn_geo_locations"): list,
400
+ Optional("cdn_geo_restriction_type"): str,
401
+ Optional("cdn_logging_bucket"): str,
402
+ Optional("cdn_logging_bucket_prefix"): str,
403
+ Optional("default_waf"): str,
390
404
  Optional("domain_prefix"): str,
405
+ Optional("enable_logging"): bool,
391
406
  Optional("env_root"): str,
392
- Optional("cdn_domains_list"): dict,
393
- Optional("additional_address_list"): list,
407
+ Optional("forwarded_values_forward"): str,
408
+ Optional("forwarded_values_headers"): list,
409
+ Optional("forwarded_values_query_string"): bool,
410
+ Optional("origin_protocol_policy"): str,
411
+ Optional("origin_ssl_protocols"): list,
412
+ Optional("viewer_certificate_minimum_protocol_version"): str,
413
+ Optional("viewer_certificate_ssl_support_method"): str,
414
+ Optional("viewer_protocol_policy"): str,
394
415
  },
395
416
  None,
396
417
  )
@@ -531,10 +552,62 @@ def validate_platform_config(config, disable_aws_validation=False):
531
552
  _validate_environment_pipelines(enriched_config)
532
553
  _validate_environment_pipelines_triggers(enriched_config)
533
554
  _validate_codebase_pipelines(enriched_config)
555
+ validate_database_copy_section(enriched_config)
534
556
  if not disable_aws_validation:
535
557
  _validate_s3_bucket_uniqueness(enriched_config)
536
558
 
537
559
 
560
+ def validate_database_copy_section(config):
561
+ extensions = config.get("extensions", {})
562
+ if not extensions:
563
+ return
564
+
565
+ postgres_extensions = {
566
+ key: ext for key, ext in extensions.items() if ext.get("type", None) == "postgres"
567
+ }
568
+
569
+ if not postgres_extensions:
570
+ return
571
+
572
+ errors = []
573
+
574
+ for extension_name, extension in postgres_extensions.items():
575
+ database_copy_sections = extension.get("database_copy", [])
576
+
577
+ if not database_copy_sections:
578
+ return
579
+
580
+ all_environments = [env for env in config.get("environments", {}).keys() if not env == "*"]
581
+ all_envs_string = ", ".join(all_environments)
582
+
583
+ for section in database_copy_sections:
584
+ from_env = section["from"]
585
+ to_env = section["to"]
586
+
587
+ if from_env == to_env:
588
+ errors.append(
589
+ f"database_copy 'to' and 'from' cannot be the same environment in extension '{extension_name}'."
590
+ )
591
+
592
+ if "prod" in to_env:
593
+ errors.append(
594
+ f"Copying to a prod environment is not supported: database_copy 'to' cannot be '{to_env}' in extension '{extension_name}'."
595
+ )
596
+
597
+ if from_env not in all_environments:
598
+ errors.append(
599
+ f"database_copy 'from' parameter must be a valid environment ({all_envs_string}) but was '{from_env}' in extension '{extension_name}'."
600
+ )
601
+
602
+ if to_env not in all_environments:
603
+ errors.append(
604
+ f"database_copy 'to' parameter must be a valid environment ({all_envs_string}) but was '{to_env}' in extension '{extension_name}'."
605
+ )
606
+
607
+ if errors:
608
+ abort_with_error("\n".join(errors))
609
+
610
+
538
611
  def _validate_environment_pipelines(config):
539
612
  bad_pipelines = {}
540
613
  for pipeline_name, pipeline in config.get("environment_pipelines", {}).items():
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbt-platform-helper
3
- Version: 10.11.3
3
+ Version: 11.0.1
4
4
  Summary: Set of tools to help transfer applications/services from GOV.UK PaaS to DBT PaaS augmenting AWS Copilot.
5
5
  License: MIT
6
6
  Author: Department for Business and Trade Platform Team
@@ -12,7 +12,7 @@ Classifier: Programming Language :: Python :: 3.9
12
12
  Classifier: Programming Language :: Python :: 3.10
13
13
  Classifier: Programming Language :: Python :: 3.11
14
14
  Classifier: Programming Language :: Python :: 3.12
15
- Requires-Dist: Jinja2 (>=3.1.3,<4.0.0)
15
+ Requires-Dist: Jinja2 (==3.1.4)
16
16
  Requires-Dist: PyYAML (==6.0.1)
17
17
  Requires-Dist: aiohttp (>=3.8.4,<4.0.0)
18
18
  Requires-Dist: boto3 (>=1.28.24,<2.0.0)
@@ -1,4 +1,4 @@
1
- dbt_platform_helper/COMMANDS.md,sha256=BZdxv8hNF9PZ2fRNozyGkeddbsuVPF2TTlpE1Y4Z2vk,23001
1
+ dbt_platform_helper/COMMANDS.md,sha256=u9g0Zf1RYk-pzE4hjSKg15EMpXglzSA9qEg3QWv559M,23843
2
2
  dbt_platform_helper/README.md,sha256=B0qN2_u_ASqqgkGDWY2iwNGZt_9tUgMb9XqtaTuzYjw,1530
3
3
  dbt_platform_helper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  dbt_platform_helper/addon-plans.yml,sha256=O46a_ODsGG9KXmQY_1XbSGqrpSaHSLDe-SdROzHx8Go,4545
@@ -7,10 +7,11 @@ dbt_platform_helper/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NM
7
7
  dbt_platform_helper/commands/application.py,sha256=1XTBfdZBDj1vOFkAWjTxxTNFtko4YB9Q0_tVdxavwAo,9957
8
8
  dbt_platform_helper/commands/check_cloudformation.py,sha256=aLif3yMHKuZO0uvdUjTH9FdRZNIpCqtWvE3aFQMozoc,3226
9
9
  dbt_platform_helper/commands/codebase.py,sha256=NchJzH-yxv5mXCe2rPyXVNHmXGEvjFUv0KhMKYsLNNQ,11380
10
- dbt_platform_helper/commands/conduit.py,sha256=SjuVhTcmyokCIF7OI8E5YxRgkyHc0C5lt72waCJfML0,15395
10
+ dbt_platform_helper/commands/conduit.py,sha256=BC2cxIX14e7_BEdvuMiq8z85k_4OWL-RwX0QJFJrUak,15436
11
11
  dbt_platform_helper/commands/config.py,sha256=NOHea7OAjrl6XHlW6HMLn0m0T5lFPyNH3HXoyCOWsJk,12070
12
12
  dbt_platform_helper/commands/copilot.py,sha256=euid0FTlVtwKmBQ6vxt_HxtBdRYiVQvb-9CyrK1-MWc,16724
13
- dbt_platform_helper/commands/database.py,sha256=-DacXZ2LhwV3CRukG35urEU2TuNVZHppUA3EhbBNjUs,4840
13
+ dbt_platform_helper/commands/database.py,sha256=r7GXfyt_1Wj7KlGilnPopfHXuJ8Q7x35RVh9XKiAZUY,1178
14
+ dbt_platform_helper/commands/database_helpers.py,sha256=7TE6ojEmrIcJ-XQf3MPKeRSuTO5GRBwzm8IoWYmjE3M,4967
14
15
  dbt_platform_helper/commands/dns.py,sha256=o7PkvHktZo0jmqbx0krJTL0R4GtWSf1rF2KDEWor8Ts,35211
15
16
  dbt_platform_helper/commands/environment.py,sha256=VNr7G1QstM8INGs8jOxL1jQRqDcWx2Q0jaaBXtbHhys,24819
16
17
  dbt_platform_helper/commands/generate.py,sha256=YLCPb-xcPapGcsLn-7d1Am7BpGp5l0iecIDTOdNGjHk,722
@@ -22,7 +23,7 @@ dbt_platform_helper/constants.py,sha256=fzN2VZt81mspNfdYpNef5_eEjDVsh8GUYmhBMTIf
22
23
  dbt_platform_helper/custom_resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
24
  dbt_platform_helper/custom_resources/s3_object.py,sha256=0mhLuKD0-vwuN1qmnLCrLo2qL58FvtCjNNjH34kac6Y,2526
24
25
  dbt_platform_helper/default-extensions.yml,sha256=SU1ZitskbuEBpvE7efc3s56eAUF11j70brhj_XrNMMo,493
25
- dbt_platform_helper/exceptions.py,sha256=BeqhBQHXS57ROwEEGCnd9qcMDbuKvsdeiYRUuTlhe4w,499
26
+ dbt_platform_helper/exceptions.py,sha256=dCQkqmlt5yycZgblwTtpkwGrP7ANQB_1MRORUWIYI-U,541
26
27
  dbt_platform_helper/jinja2_tags.py,sha256=jFyN_Sxmko1GSfvrqRIGQ80CCW8EwlCV3su0ahJPfoE,541
27
28
  dbt_platform_helper/templates/.copilot/config.yml,sha256=J_bA9sCtBdCPBRImpCBRnYvhQd4vpLYIXIU-lq9vbkA,158
28
29
  dbt_platform_helper/templates/.copilot/image_build_run.sh,sha256=adYucYXEB-kAgZNjTQo0T6EIAY8sh_xCEvVhWKKQ8mw,164
@@ -66,7 +67,7 @@ dbt_platform_helper/templates/pipelines/codebase/overrides/bin/override.ts,sha25
66
67
  dbt_platform_helper/templates/pipelines/codebase/overrides/buildspec.deploy.yml,sha256=neXXpwjCrNRPTOxec3m8nRIFZ0bI4zq2WaPHf5eSU_Y,1090
67
68
  dbt_platform_helper/templates/pipelines/codebase/overrides/buildspec.image.yml,sha256=oHtRzH27IXJRyORWp7zvtjln-kTf3FgTdc9W_pBFBfU,1480
68
69
  dbt_platform_helper/templates/pipelines/codebase/overrides/cdk.json,sha256=ZbvoQdcj_k9k1GAD9qHUQcDfQPbMcBPjJwt2mu_S6ho,339
69
- dbt_platform_helper/templates/pipelines/codebase/overrides/package-lock.json,sha256=Is83o58QXbeg2SkHmR79ATt91aFhVbO7kb1VF0qXpY8,152671
70
+ dbt_platform_helper/templates/pipelines/codebase/overrides/package-lock.json,sha256=olH0o2L_csz-05gsjZ-GMKzNZqrkxciaJFUiAt7sYKc,152695
70
71
  dbt_platform_helper/templates/pipelines/codebase/overrides/package.json,sha256=XB0Pf63NSsGyowkPGTl1Nki167nRDXJdnxLSN3S_lQg,536
71
72
  dbt_platform_helper/templates/pipelines/codebase/overrides/stack.ts,sha256=v9m6EziRgFnrhF7inbr1KtuOh75FeC054vaWMoAi-qg,21500
72
73
  dbt_platform_helper/templates/pipelines/codebase/overrides/tsconfig.json,sha256=k6KabP-WwhFNgA1AFHNuonTEAnES6eR74jUuYUJEGOM,651
@@ -83,7 +84,7 @@ dbt_platform_helper/templates/svc/overrides/cfn.patches.yml,sha256=W7-d017akuUq9
83
84
  dbt_platform_helper/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
84
85
  dbt_platform_helper/utils/application.py,sha256=FzEoyTUlRJ7bt01cZAg-JHRQM4CqosVLOojdfwaDl6w,4113
85
86
  dbt_platform_helper/utils/arn_parser.py,sha256=1jY0elpAe4YL3ulrrCf1YiKmjI-7YXz4gJASqkIFHTc,1294
86
- dbt_platform_helper/utils/aws.py,sha256=KCH73l5OvfXHE6gHEI6FsKTOI6Wv4nkMfgVJ8GWPqy4,11105
87
+ dbt_platform_helper/utils/aws.py,sha256=H4oFnzJf0WXQ8xi19QlC3zKKA3HWPp5wNfm9hZGgMsI,13683
87
88
  dbt_platform_helper/utils/click.py,sha256=Fx4y4bbve1zypvog_sgK7tJtCocmzheoEFLBRv1lfdM,2943
88
89
  dbt_platform_helper/utils/cloudformation.py,sha256=A1z12IYpapCOngAvZUCc_Ig7S7tuSP3ceeVzVR-5BGY,1053
89
90
  dbt_platform_helper/utils/cloudfoundry.py,sha256=GnQ4fVLnDfOdNSrsJjI6ElZHqpgwINeoPn77cUH2UFY,484
@@ -93,11 +94,11 @@ dbt_platform_helper/utils/manifests.py,sha256=ji3UYHCxq9tTpkm4MlRa2y0-JOYYqq1pWZ
93
94
  dbt_platform_helper/utils/messages.py,sha256=aLx6s9utt__IqlDdeIYq4n82ERwludu2Zfqy0Q2t-x8,115
94
95
  dbt_platform_helper/utils/platform_config.py,sha256=zJYCIsgUk5kNerocTzD7Q2XIrkYhiCu65sx8KQ-d-o8,833
95
96
  dbt_platform_helper/utils/template.py,sha256=raRx4QUCVJtKfvJK08Egg6gwWcs3r3V4nPWcJW4xNhA,574
96
- dbt_platform_helper/utils/validation.py,sha256=ZQMKPleuBsUr4XZ-fl_9AA_-NVHzH1Ol7YD-0vzKFhc,23801
97
+ dbt_platform_helper/utils/validation.py,sha256=fsUsEjabAtoswdEzyRBdVUdgRqYJeoE3PgNU-6kTb3k,26713
97
98
  dbt_platform_helper/utils/versioning.py,sha256=IBxdocJ8ZyJib38d1ja87tTuFE0iJ4npaDcAHQAKQ58,10825
98
99
  platform_helper.py,sha256=1lvPwynKODyi2U-ePKzJyFwRdKPs6_6zAYUPDYzDKMo,2300
99
- dbt_platform_helper-10.11.3.dist-info/LICENSE,sha256=dP79lN73--7LMApnankTGLqDbImXg8iYFqWgnExGkGk,1090
100
- dbt_platform_helper-10.11.3.dist-info/METADATA,sha256=gOy4d4Ms2qrec-Pb_YOn0hU7KTmHRftUqma2mo2isTI,3127
101
- dbt_platform_helper-10.11.3.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
102
- dbt_platform_helper-10.11.3.dist-info/entry_points.txt,sha256=QhbY8F434A-onsg0-FsdMd2U6HKh6Q7yCFFZrGUh5-M,67
103
- dbt_platform_helper-10.11.3.dist-info/RECORD,,
100
+ dbt_platform_helper-11.0.1.dist-info/LICENSE,sha256=dP79lN73--7LMApnankTGLqDbImXg8iYFqWgnExGkGk,1090
101
+ dbt_platform_helper-11.0.1.dist-info/METADATA,sha256=h7DzlcU6VkjDqM8j8yG_WvBIUkUkzzebIS2t_WU1jXk,3119
102
+ dbt_platform_helper-11.0.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
103
+ dbt_platform_helper-11.0.1.dist-info/entry_points.txt,sha256=QhbY8F434A-onsg0-FsdMd2U6HKh6Q7yCFFZrGUh5-M,67
104
+ dbt_platform_helper-11.0.1.dist-info/RECORD,,