dbt-platform-helper 12.2.3__py3-none-any.whl → 12.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbt-platform-helper might be problematic. Click here for more details.
- dbt_platform_helper/commands/codebase.py +9 -80
- dbt_platform_helper/commands/conduit.py +25 -45
- dbt_platform_helper/commands/secrets.py +1 -1
- dbt_platform_helper/domain/codebase.py +60 -60
- dbt_platform_helper/domain/conduit.py +44 -89
- dbt_platform_helper/domain/database_copy.py +42 -37
- dbt_platform_helper/exceptions.py +87 -21
- dbt_platform_helper/providers/cloudformation.py +122 -100
- dbt_platform_helper/providers/copilot.py +32 -12
- dbt_platform_helper/providers/ecs.py +78 -70
- dbt_platform_helper/providers/secrets.py +74 -74
- dbt_platform_helper/utils/application.py +1 -1
- dbt_platform_helper/utils/aws.py +29 -6
- dbt_platform_helper/utils/validation.py +44 -16
- {dbt_platform_helper-12.2.3.dist-info → dbt_platform_helper-12.3.0.dist-info}/METADATA +1 -1
- {dbt_platform_helper-12.2.3.dist-info → dbt_platform_helper-12.3.0.dist-info}/RECORD +19 -19
- {dbt_platform_helper-12.2.3.dist-info → dbt_platform_helper-12.3.0.dist-info}/LICENSE +0 -0
- {dbt_platform_helper-12.2.3.dist-info → dbt_platform_helper-12.3.0.dist-info}/WHEEL +0 -0
- {dbt_platform_helper-12.2.3.dist-info → dbt_platform_helper-12.3.0.dist-info}/entry_points.txt +0 -0
|
@@ -15,6 +15,8 @@ from dbt_platform_helper.utils.application import load_application
|
|
|
15
15
|
from dbt_platform_helper.utils.aws import Vpc
|
|
16
16
|
from dbt_platform_helper.utils.aws import get_connection_string
|
|
17
17
|
from dbt_platform_helper.utils.aws import get_vpc_info_by_name
|
|
18
|
+
from dbt_platform_helper.utils.aws import wait_for_log_group_to_exist
|
|
19
|
+
from dbt_platform_helper.utils.files import apply_environment_defaults
|
|
18
20
|
from dbt_platform_helper.utils.messages import abort_with_error
|
|
19
21
|
from dbt_platform_helper.utils.validation import load_and_validate_platform_config
|
|
20
22
|
|
|
@@ -25,80 +27,80 @@ class DatabaseCopy:
|
|
|
25
27
|
app: str,
|
|
26
28
|
database: str,
|
|
27
29
|
auto_approve: bool = False,
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
30
|
+
load_application: Callable[[str], Application] = load_application,
|
|
31
|
+
vpc_config: Callable[[Session, str, str, str], Vpc] = get_vpc_info_by_name,
|
|
32
|
+
db_connection_string: Callable[
|
|
31
33
|
[Session, str, str, str, Callable], str
|
|
32
34
|
] = get_connection_string,
|
|
33
35
|
maintenance_page_provider: Callable[
|
|
34
36
|
[str, str, list[str], str, str], None
|
|
35
37
|
] = MaintenancePageProvider(),
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
38
|
+
input: Callable[[str], str] = click.prompt,
|
|
39
|
+
echo: Callable[[str], str] = click.secho,
|
|
40
|
+
abort: Callable[[str], None] = abort_with_error,
|
|
39
41
|
):
|
|
40
42
|
self.app = app
|
|
41
43
|
self.database = database
|
|
42
44
|
self.auto_approve = auto_approve
|
|
43
|
-
self.
|
|
44
|
-
self.
|
|
45
|
+
self.vpc_config = vpc_config
|
|
46
|
+
self.db_connection_string = db_connection_string
|
|
45
47
|
self.maintenance_page_provider = maintenance_page_provider
|
|
46
|
-
self.
|
|
47
|
-
self.
|
|
48
|
-
self.
|
|
48
|
+
self.input = input
|
|
49
|
+
self.echo = echo
|
|
50
|
+
self.abort = abort
|
|
49
51
|
|
|
50
52
|
if not self.app:
|
|
51
53
|
if not Path(PLATFORM_CONFIG_FILE).exists():
|
|
52
|
-
self.
|
|
54
|
+
self.abort("You must either be in a deploy repo, or provide the --app option.")
|
|
53
55
|
|
|
54
56
|
config = load_and_validate_platform_config()
|
|
55
57
|
self.app = config["application"]
|
|
56
58
|
|
|
57
59
|
try:
|
|
58
|
-
self.application =
|
|
60
|
+
self.application = load_application(self.app)
|
|
59
61
|
except ApplicationNotFoundError:
|
|
60
|
-
|
|
62
|
+
abort(f"No such application '{app}'.")
|
|
61
63
|
|
|
62
|
-
def _execute_operation(self, is_dump: bool, env: str, vpc_name: str):
|
|
64
|
+
def _execute_operation(self, is_dump: bool, env: str, vpc_name: str, to_env: str):
|
|
63
65
|
vpc_name = self.enrich_vpc_name(env, vpc_name)
|
|
64
66
|
|
|
65
67
|
environments = self.application.environments
|
|
66
68
|
environment = environments.get(env)
|
|
67
69
|
if not environment:
|
|
68
|
-
self.
|
|
70
|
+
self.abort(
|
|
69
71
|
f"No such environment '{env}'. Available environments are: {', '.join(environments.keys())}"
|
|
70
72
|
)
|
|
71
73
|
|
|
72
74
|
env_session = environment.session
|
|
73
75
|
|
|
74
76
|
try:
|
|
75
|
-
vpc_config = self.
|
|
77
|
+
vpc_config = self.vpc_config(env_session, self.app, env, vpc_name)
|
|
76
78
|
except AWSException as ex:
|
|
77
|
-
self.
|
|
79
|
+
self.abort(str(ex))
|
|
78
80
|
|
|
79
81
|
database_identifier = f"{self.app}-{env}-{self.database}"
|
|
80
82
|
|
|
81
83
|
try:
|
|
82
|
-
db_connection_string = self.
|
|
84
|
+
db_connection_string = self.db_connection_string(
|
|
83
85
|
env_session, self.app, env, database_identifier
|
|
84
86
|
)
|
|
85
87
|
except Exception as exc:
|
|
86
|
-
self.
|
|
88
|
+
self.abort(f"{exc} (Database: {database_identifier})")
|
|
87
89
|
|
|
88
90
|
try:
|
|
89
91
|
task_arn = self.run_database_copy_task(
|
|
90
|
-
env_session, env, vpc_config, is_dump, db_connection_string
|
|
92
|
+
env_session, env, vpc_config, is_dump, db_connection_string, to_env
|
|
91
93
|
)
|
|
92
94
|
except Exception as exc:
|
|
93
|
-
self.
|
|
95
|
+
self.abort(f"{exc} (Account id: {self.account_id(env)})")
|
|
94
96
|
|
|
95
97
|
if is_dump:
|
|
96
98
|
message = f"Dumping {self.database} from the {env} environment into S3"
|
|
97
99
|
else:
|
|
98
100
|
message = f"Loading data into {self.database} in the {env} environment from S3"
|
|
99
101
|
|
|
100
|
-
self.
|
|
101
|
-
self.
|
|
102
|
+
self.echo(message, fg="white", bold=True)
|
|
103
|
+
self.echo(
|
|
102
104
|
f"Task {task_arn} started. Waiting for it to complete (this may take some time)...",
|
|
103
105
|
fg="white",
|
|
104
106
|
)
|
|
@@ -107,11 +109,10 @@ class DatabaseCopy:
|
|
|
107
109
|
def enrich_vpc_name(self, env, vpc_name):
|
|
108
110
|
if not vpc_name:
|
|
109
111
|
if not Path(PLATFORM_CONFIG_FILE).exists():
|
|
110
|
-
self.
|
|
111
|
-
"You must either be in a deploy repo, or provide the vpc name option."
|
|
112
|
-
)
|
|
112
|
+
self.abort("You must either be in a deploy repo, or provide the vpc name option.")
|
|
113
113
|
config = load_and_validate_platform_config()
|
|
114
|
-
|
|
114
|
+
env_config = apply_environment_defaults(config)["environments"]
|
|
115
|
+
vpc_name = env_config.get(env, {}).get("vpc")
|
|
115
116
|
return vpc_name
|
|
116
117
|
|
|
117
118
|
def run_database_copy_task(
|
|
@@ -121,12 +122,14 @@ class DatabaseCopy:
|
|
|
121
122
|
vpc_config: Vpc,
|
|
122
123
|
is_dump: bool,
|
|
123
124
|
db_connection_string: str,
|
|
125
|
+
to_env: str,
|
|
124
126
|
) -> str:
|
|
125
127
|
client = session.client("ecs")
|
|
126
128
|
action = "dump" if is_dump else "load"
|
|
127
129
|
env_vars = [
|
|
128
130
|
{"name": "DATA_COPY_OPERATION", "value": action.upper()},
|
|
129
131
|
{"name": "DB_CONNECTION_STRING", "value": db_connection_string},
|
|
132
|
+
{"name": "TO_ENVIRONMENT", "value": to_env},
|
|
130
133
|
]
|
|
131
134
|
if not is_dump:
|
|
132
135
|
env_vars.append({"name": "ECS_CLUSTER", "value": f"{self.app}-{env}"})
|
|
@@ -156,12 +159,12 @@ class DatabaseCopy:
|
|
|
156
159
|
|
|
157
160
|
return response.get("tasks", [{}])[0].get("taskArn")
|
|
158
161
|
|
|
159
|
-
def dump(self, env: str, vpc_name: str):
|
|
160
|
-
self._execute_operation(True, env, vpc_name)
|
|
162
|
+
def dump(self, env: str, vpc_name: str, to_env: str):
|
|
163
|
+
self._execute_operation(True, env, vpc_name, to_env)
|
|
161
164
|
|
|
162
165
|
def load(self, env: str, vpc_name: str):
|
|
163
166
|
if self.is_confirmed_ready_to_load(env):
|
|
164
|
-
self._execute_operation(False, env, vpc_name)
|
|
167
|
+
self._execute_operation(False, env, vpc_name, to_env=env)
|
|
165
168
|
|
|
166
169
|
def copy(
|
|
167
170
|
self,
|
|
@@ -176,7 +179,7 @@ class DatabaseCopy:
|
|
|
176
179
|
to_vpc = self.enrich_vpc_name(to_env, to_vpc)
|
|
177
180
|
if not no_maintenance_page:
|
|
178
181
|
self.maintenance_page_provider.activate(self.app, to_env, services, template, to_vpc)
|
|
179
|
-
self.dump(from_env, from_vpc)
|
|
182
|
+
self.dump(from_env, from_vpc, to_env)
|
|
180
183
|
self.load(to_env, to_vpc)
|
|
181
184
|
if not no_maintenance_page:
|
|
182
185
|
self.maintenance_page_provider.deactivate(self.app, to_env)
|
|
@@ -185,7 +188,7 @@ class DatabaseCopy:
|
|
|
185
188
|
if self.auto_approve:
|
|
186
189
|
return True
|
|
187
190
|
|
|
188
|
-
user_input = self.
|
|
191
|
+
user_input = self.input(
|
|
189
192
|
f"\nWARNING: the load operation is destructive and will delete the {self.database} database in the {env} environment. Continue? (y/n)"
|
|
190
193
|
)
|
|
191
194
|
return user_input.lower().strip() in ["y", "yes"]
|
|
@@ -194,9 +197,11 @@ class DatabaseCopy:
|
|
|
194
197
|
action = "dump" if is_dump else "load"
|
|
195
198
|
log_group_name = f"/ecs/{self.app}-{env}-{self.database}-{action}"
|
|
196
199
|
log_group_arn = f"arn:aws:logs:eu-west-2:{self.account_id(env)}:log-group:{log_group_name}"
|
|
197
|
-
self.
|
|
200
|
+
self.echo(f"Tailing {log_group_name} logs", fg="yellow")
|
|
198
201
|
session = self.application.environments[env].session
|
|
199
|
-
|
|
202
|
+
log_client = session.client("logs")
|
|
203
|
+
wait_for_log_group_to_exist(log_client, log_group_name)
|
|
204
|
+
response = log_client.start_live_tail(logGroupIdentifiers=[log_group_arn])
|
|
200
205
|
|
|
201
206
|
stopped = False
|
|
202
207
|
for data in response["responseStream"]:
|
|
@@ -210,9 +215,9 @@ class DatabaseCopy:
|
|
|
210
215
|
match = re.match(r"(Stopping|Aborting) data (load|dump).*", message)
|
|
211
216
|
if match:
|
|
212
217
|
if match.group(1) == "Aborting":
|
|
213
|
-
self.
|
|
218
|
+
self.abort("Task aborted abnormally. See logs above for details.")
|
|
214
219
|
stopped = True
|
|
215
|
-
self.
|
|
220
|
+
self.echo(message)
|
|
216
221
|
|
|
217
222
|
def account_id(self, env):
|
|
218
223
|
envs = self.application.environments
|
|
@@ -1,11 +1,38 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
from dbt_platform_helper.constants import CONDUIT_ADDON_TYPES
|
|
4
|
+
|
|
5
|
+
|
|
1
6
|
class ValidationException(Exception):
|
|
2
7
|
pass
|
|
3
8
|
|
|
4
9
|
|
|
5
|
-
class
|
|
10
|
+
class PlatformException(Exception):
|
|
11
|
+
pass
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class AWSException(PlatformException):
|
|
15
|
+
pass
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class ApplicationException(PlatformException):
|
|
6
19
|
pass
|
|
7
20
|
|
|
8
21
|
|
|
22
|
+
class CloudFormationException(AWSException):
|
|
23
|
+
def __init__(self, stack_name: str, current_status: str):
|
|
24
|
+
super().__init__(
|
|
25
|
+
f"The CloudFormation stack '{stack_name}' is not in a good state: {current_status}"
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class CommitNotFoundError:
|
|
30
|
+
def __init__(self, commit: str):
|
|
31
|
+
super().__init__(
|
|
32
|
+
f"""The commit hash "{commit}" either does not exist or you need to run `git fetch`."""
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
|
|
9
36
|
class IncompatibleMajorVersion(ValidationException):
|
|
10
37
|
def __init__(self, app_version: str, check_version: str):
|
|
11
38
|
super().__init__()
|
|
@@ -21,61 +48,100 @@ class IncompatibleMinorVersion(ValidationException):
|
|
|
21
48
|
|
|
22
49
|
|
|
23
50
|
class NoClusterError(AWSException):
|
|
24
|
-
|
|
51
|
+
def __init__(self, application_name: str, environment: str):
|
|
52
|
+
super().__init__(
|
|
53
|
+
f"""No ECS cluster found for "{application_name}" in "{environment}" environment."""
|
|
54
|
+
)
|
|
25
55
|
|
|
26
56
|
|
|
27
57
|
class CreateTaskTimeoutError(AWSException):
|
|
28
|
-
|
|
58
|
+
def __init__(self, addon_name: str, application_name: str, environment: str):
|
|
59
|
+
super().__init__(
|
|
60
|
+
f"""Client ({addon_name}) ECS task has failed to start for "{application_name}" in "{environment}" environment."""
|
|
61
|
+
)
|
|
29
62
|
|
|
30
63
|
|
|
31
64
|
class ParameterNotFoundError(AWSException):
|
|
32
|
-
|
|
65
|
+
def __init__(self, application_name: str, environment: str):
|
|
66
|
+
super().__init__(
|
|
67
|
+
f"""No parameter called "/copilot/applications/{application_name}/environments/{environment}/addons". Try deploying the "{application_name}" "{environment}" environment."""
|
|
68
|
+
)
|
|
33
69
|
|
|
34
70
|
|
|
35
71
|
class AddonNotFoundError(AWSException):
|
|
36
|
-
|
|
72
|
+
def __init__(self, addon_name: str):
|
|
73
|
+
super().__init__(f"""Addon "{addon_name}" does not exist.""")
|
|
37
74
|
|
|
38
75
|
|
|
39
76
|
class InvalidAddonTypeError(AWSException):
|
|
40
77
|
def __init__(self, addon_type):
|
|
41
78
|
self.addon_type = addon_type
|
|
79
|
+
super().__init__(
|
|
80
|
+
f"""Addon type "{self.addon_type}" is not supported, we support: {", ".join(CONDUIT_ADDON_TYPES)}."""
|
|
81
|
+
)
|
|
42
82
|
|
|
43
83
|
|
|
44
84
|
class AddonTypeMissingFromConfigError(AWSException):
|
|
45
|
-
|
|
85
|
+
def __init__(self, addon_name: str):
|
|
86
|
+
super().__init__(
|
|
87
|
+
f"""The configuration for the addon {addon_name}, is misconfigured and missing the addon type."""
|
|
88
|
+
)
|
|
46
89
|
|
|
47
90
|
|
|
48
|
-
class CopilotCodebaseNotFoundError(
|
|
49
|
-
|
|
91
|
+
class CopilotCodebaseNotFoundError(PlatformException):
|
|
92
|
+
def __init__(self, codebase: str):
|
|
93
|
+
super().__init__(
|
|
94
|
+
f"""The codebase "{codebase}" either does not exist or has not been deployed."""
|
|
95
|
+
)
|
|
50
96
|
|
|
51
97
|
|
|
52
|
-
class NotInCodeBaseRepositoryError(
|
|
53
|
-
|
|
98
|
+
class NotInCodeBaseRepositoryError(PlatformException):
|
|
99
|
+
def __init__(self):
|
|
100
|
+
super().__init__(
|
|
101
|
+
"You are in the deploy repository; make sure you are in the application codebase repository.",
|
|
102
|
+
)
|
|
54
103
|
|
|
55
104
|
|
|
56
|
-
class NoCopilotCodebasesFoundError(
|
|
57
|
-
|
|
105
|
+
class NoCopilotCodebasesFoundError(PlatformException):
|
|
106
|
+
def __init__(self, application_name: str):
|
|
107
|
+
super().__init__(f"""No codebases found for application "{application_name}".""")
|
|
58
108
|
|
|
59
109
|
|
|
60
|
-
class ImageNotFoundError(
|
|
61
|
-
|
|
110
|
+
class ImageNotFoundError(PlatformException):
|
|
111
|
+
def __init__(self, commit: str):
|
|
112
|
+
super().__init__(
|
|
113
|
+
f"""The commit hash "{commit}" has not been built into an image, try the `platform-helper codebase build` command first."""
|
|
114
|
+
)
|
|
62
115
|
|
|
63
116
|
|
|
64
|
-
class ApplicationDeploymentNotTriggered(
|
|
65
|
-
|
|
117
|
+
class ApplicationDeploymentNotTriggered(PlatformException):
|
|
118
|
+
def __init__(self, codebase: str):
|
|
119
|
+
super().__init__(f"""Your deployment for {codebase} was not triggered.""")
|
|
66
120
|
|
|
67
121
|
|
|
68
|
-
class ApplicationNotFoundError(
|
|
69
|
-
|
|
122
|
+
class ApplicationNotFoundError(ApplicationException):
|
|
123
|
+
def __init__(self, application_name: str):
|
|
124
|
+
super().__init__(
|
|
125
|
+
f"""The account "{os.environ.get("AWS_PROFILE")}" does not contain the application "{application_name}"; ensure you have set the environment variable "AWS_PROFILE" correctly."""
|
|
126
|
+
)
|
|
70
127
|
|
|
71
128
|
|
|
72
|
-
class ApplicationEnvironmentNotFoundError(
|
|
73
|
-
|
|
129
|
+
class ApplicationEnvironmentNotFoundError(ApplicationException):
|
|
130
|
+
def __init__(self, environment: str):
|
|
131
|
+
super().__init__(
|
|
132
|
+
f"""The environment "{environment}" either does not exist or has not been deployed."""
|
|
133
|
+
)
|
|
74
134
|
|
|
75
135
|
|
|
76
136
|
class SecretNotFoundError(AWSException):
|
|
77
|
-
|
|
137
|
+
def __init__(self, secret_name: str):
|
|
138
|
+
super().__init__(f"""No secret called "{secret_name}".""")
|
|
78
139
|
|
|
79
140
|
|
|
80
141
|
class ECSAgentNotRunning(AWSException):
|
|
142
|
+
def __init__(self):
|
|
143
|
+
super().__init__("""ECS exec agent never reached "RUNNING" status""")
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
class ResourceNotFoundException(AWSException):
|
|
81
147
|
pass
|
|
@@ -1,105 +1,127 @@
|
|
|
1
1
|
import json
|
|
2
2
|
|
|
3
|
+
import botocore
|
|
3
4
|
from cfn_tools import dump_yaml
|
|
4
5
|
from cfn_tools import load_yaml
|
|
5
6
|
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
7
|
+
from dbt_platform_helper.exceptions import CloudFormationException
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class CloudFormation:
|
|
11
|
+
def __init__(self, cloudformation_client, iam_client, ssm_client):
|
|
12
|
+
self.cloudformation_client = cloudformation_client
|
|
13
|
+
self.iam_client = iam_client
|
|
14
|
+
self.ssm_client = ssm_client
|
|
15
|
+
|
|
16
|
+
def add_stack_delete_policy_to_task_role(self, task_name: str):
|
|
17
|
+
stack_name = f"task-{task_name}"
|
|
18
|
+
stack_resources = self.cloudformation_client.list_stack_resources(StackName=stack_name)[
|
|
19
|
+
"StackResourceSummaries"
|
|
20
|
+
]
|
|
21
|
+
|
|
22
|
+
for resource in stack_resources:
|
|
23
|
+
if resource["LogicalResourceId"] == "DefaultTaskRole":
|
|
24
|
+
task_role_name = resource["PhysicalResourceId"]
|
|
25
|
+
self.iam_client.put_role_policy(
|
|
26
|
+
RoleName=task_role_name,
|
|
27
|
+
PolicyName="DeleteCloudFormationStack",
|
|
28
|
+
PolicyDocument=json.dumps(
|
|
29
|
+
{
|
|
30
|
+
"Version": "2012-10-17",
|
|
31
|
+
"Statement": [
|
|
32
|
+
{
|
|
33
|
+
"Action": ["cloudformation:DeleteStack"],
|
|
34
|
+
"Effect": "Allow",
|
|
35
|
+
"Resource": f"arn:aws:cloudformation:*:*:stack/{stack_name}/*",
|
|
36
|
+
},
|
|
37
|
+
],
|
|
38
|
+
},
|
|
39
|
+
),
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
def update_conduit_stack_resources(
|
|
43
|
+
self,
|
|
44
|
+
application_name: str,
|
|
45
|
+
env: str,
|
|
46
|
+
addon_type: str,
|
|
47
|
+
addon_name: str,
|
|
48
|
+
task_name: str,
|
|
49
|
+
parameter_name: str,
|
|
50
|
+
access: str,
|
|
51
|
+
):
|
|
52
|
+
conduit_stack_name = f"task-{task_name}"
|
|
53
|
+
template = self.cloudformation_client.get_template(StackName=conduit_stack_name)
|
|
54
|
+
template_yml = load_yaml(template["TemplateBody"])
|
|
55
|
+
|
|
56
|
+
template_yml["Resources"]["LogGroup"]["DeletionPolicy"] = "Retain"
|
|
57
|
+
|
|
58
|
+
template_yml["Resources"]["TaskNameParameter"] = load_yaml(
|
|
59
|
+
f"""
|
|
60
|
+
Type: AWS::SSM::Parameter
|
|
61
|
+
Properties:
|
|
62
|
+
Name: {parameter_name}
|
|
63
|
+
Type: String
|
|
64
|
+
Value: {task_name}
|
|
65
|
+
"""
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
log_filter_role_arn = self.iam_client.get_role(RoleName="CWLtoSubscriptionFilterRole")[
|
|
69
|
+
"Role"
|
|
70
|
+
]["Arn"]
|
|
71
|
+
|
|
72
|
+
destination_log_group_arns = json.loads(
|
|
73
|
+
self.ssm_client.get_parameter(Name="/copilot/tools/central_log_groups")["Parameter"][
|
|
74
|
+
"Value"
|
|
75
|
+
]
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
destination_arn = destination_log_group_arns["dev"]
|
|
79
|
+
if env.lower() in ("prod", "production"):
|
|
80
|
+
destination_arn = destination_log_group_arns["prod"]
|
|
81
|
+
|
|
82
|
+
template_yml["Resources"]["SubscriptionFilter"] = load_yaml(
|
|
83
|
+
f"""
|
|
84
|
+
Type: AWS::Logs::SubscriptionFilter
|
|
85
|
+
DeletionPolicy: Retain
|
|
86
|
+
Properties:
|
|
87
|
+
RoleArn: {log_filter_role_arn}
|
|
88
|
+
LogGroupName: /copilot/{task_name}
|
|
89
|
+
FilterName: /copilot/conduit/{application_name}/{env}/{addon_type}/{addon_name}/{task_name.rsplit("-", 1)[1]}/{access}
|
|
90
|
+
FilterPattern: ''
|
|
91
|
+
DestinationArn: {destination_arn}
|
|
92
|
+
"""
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
params = []
|
|
96
|
+
# TODO Currently not covered by tests - see https://uktrade.atlassian.net/browse/DBTP-1582
|
|
97
|
+
if "Parameters" in template_yml:
|
|
98
|
+
for param in template_yml["Parameters"]:
|
|
99
|
+
params.append({"ParameterKey": param, "UsePreviousValue": True})
|
|
100
|
+
|
|
101
|
+
self.cloudformation_client.update_stack(
|
|
102
|
+
StackName=conduit_stack_name,
|
|
103
|
+
TemplateBody=dump_yaml(template_yml),
|
|
104
|
+
Parameters=params,
|
|
105
|
+
Capabilities=["CAPABILITY_IAM"],
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
return conduit_stack_name
|
|
109
|
+
|
|
110
|
+
def wait_for_cloudformation_to_reach_status(self, stack_status, stack_name):
|
|
111
|
+
waiter = self.cloudformation_client.get_waiter(stack_status)
|
|
112
|
+
|
|
113
|
+
try:
|
|
114
|
+
waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 5, "MaxAttempts": 20})
|
|
115
|
+
except botocore.exceptions.WaiterError as err:
|
|
116
|
+
current_status = err.last_response.get("Stacks", [{}])[0].get("StackStatus", "")
|
|
117
|
+
|
|
118
|
+
if current_status in [
|
|
119
|
+
"ROLLBACK_IN_PROGRESS",
|
|
120
|
+
"UPDATE_ROLLBACK_IN_PROGRESS",
|
|
121
|
+
"ROLLBACK_FAILED",
|
|
122
|
+
]:
|
|
123
|
+
raise CloudFormationException(stack_name, current_status)
|
|
124
|
+
else:
|
|
125
|
+
raise CloudFormationException(
|
|
126
|
+
stack_name, f"Error while waiting for stack status: {str(err)}"
|
|
127
|
+
)
|