dbt-platform-helper 13.1.0__py3-none-any.whl → 15.16.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbt_platform_helper/COMMANDS.md +107 -27
- dbt_platform_helper/commands/application.py +5 -6
- dbt_platform_helper/commands/codebase.py +31 -10
- dbt_platform_helper/commands/conduit.py +3 -5
- dbt_platform_helper/commands/config.py +20 -311
- dbt_platform_helper/commands/copilot.py +18 -391
- dbt_platform_helper/commands/database.py +17 -9
- dbt_platform_helper/commands/environment.py +20 -14
- dbt_platform_helper/commands/generate.py +0 -3
- dbt_platform_helper/commands/internal.py +140 -0
- dbt_platform_helper/commands/notify.py +58 -78
- dbt_platform_helper/commands/pipeline.py +23 -19
- dbt_platform_helper/commands/secrets.py +39 -93
- dbt_platform_helper/commands/version.py +7 -12
- dbt_platform_helper/constants.py +52 -7
- dbt_platform_helper/domain/codebase.py +89 -39
- dbt_platform_helper/domain/conduit.py +335 -76
- dbt_platform_helper/domain/config.py +381 -0
- dbt_platform_helper/domain/copilot.py +398 -0
- dbt_platform_helper/domain/copilot_environment.py +8 -8
- dbt_platform_helper/domain/database_copy.py +2 -2
- dbt_platform_helper/domain/maintenance_page.py +254 -430
- dbt_platform_helper/domain/notify.py +64 -0
- dbt_platform_helper/domain/pipelines.py +43 -35
- dbt_platform_helper/domain/plans.py +41 -0
- dbt_platform_helper/domain/secrets.py +279 -0
- dbt_platform_helper/domain/service.py +570 -0
- dbt_platform_helper/domain/terraform_environment.py +14 -13
- dbt_platform_helper/domain/update_alb_rules.py +412 -0
- dbt_platform_helper/domain/versioning.py +249 -0
- dbt_platform_helper/{providers → entities}/platform_config_schema.py +75 -82
- dbt_platform_helper/entities/semantic_version.py +83 -0
- dbt_platform_helper/entities/service.py +339 -0
- dbt_platform_helper/platform_exception.py +4 -0
- dbt_platform_helper/providers/autoscaling.py +24 -0
- dbt_platform_helper/providers/aws/__init__.py +0 -0
- dbt_platform_helper/providers/aws/exceptions.py +70 -0
- dbt_platform_helper/providers/aws/interfaces.py +13 -0
- dbt_platform_helper/providers/aws/opensearch.py +23 -0
- dbt_platform_helper/providers/aws/redis.py +21 -0
- dbt_platform_helper/providers/aws/sso_auth.py +75 -0
- dbt_platform_helper/providers/cache.py +40 -4
- dbt_platform_helper/providers/cloudformation.py +1 -1
- dbt_platform_helper/providers/config.py +137 -19
- dbt_platform_helper/providers/config_validator.py +112 -51
- dbt_platform_helper/providers/copilot.py +24 -16
- dbt_platform_helper/providers/ecr.py +89 -7
- dbt_platform_helper/providers/ecs.py +228 -36
- dbt_platform_helper/providers/environment_variable.py +24 -0
- dbt_platform_helper/providers/files.py +1 -1
- dbt_platform_helper/providers/io.py +36 -4
- dbt_platform_helper/providers/kms.py +22 -0
- dbt_platform_helper/providers/load_balancers.py +402 -42
- dbt_platform_helper/providers/logs.py +72 -0
- dbt_platform_helper/providers/parameter_store.py +134 -0
- dbt_platform_helper/providers/s3.py +21 -0
- dbt_platform_helper/providers/schema_migrations/__init__.py +0 -0
- dbt_platform_helper/providers/schema_migrations/schema_v0_to_v1_migration.py +43 -0
- dbt_platform_helper/providers/schema_migrator.py +77 -0
- dbt_platform_helper/providers/secrets.py +5 -5
- dbt_platform_helper/providers/slack_channel_notifier.py +62 -0
- dbt_platform_helper/providers/terraform_manifest.py +121 -19
- dbt_platform_helper/providers/version.py +106 -23
- dbt_platform_helper/providers/version_status.py +27 -0
- dbt_platform_helper/providers/vpc.py +36 -5
- dbt_platform_helper/providers/yaml_file.py +58 -2
- dbt_platform_helper/templates/environment-pipelines/main.tf +4 -3
- dbt_platform_helper/templates/svc/overrides/cfn.patches.yml +5 -0
- dbt_platform_helper/utilities/decorators.py +103 -0
- dbt_platform_helper/utils/application.py +119 -22
- dbt_platform_helper/utils/aws.py +39 -150
- dbt_platform_helper/utils/deep_merge.py +10 -0
- dbt_platform_helper/utils/git.py +1 -14
- dbt_platform_helper/utils/validation.py +1 -1
- {dbt_platform_helper-13.1.0.dist-info → dbt_platform_helper-15.16.0.dist-info}/METADATA +11 -20
- dbt_platform_helper-15.16.0.dist-info/RECORD +118 -0
- {dbt_platform_helper-13.1.0.dist-info → dbt_platform_helper-15.16.0.dist-info}/WHEEL +1 -1
- platform_helper.py +3 -1
- terraform/elasticache-redis/plans.yml +85 -0
- terraform/opensearch/plans.yml +71 -0
- terraform/postgres/plans.yml +128 -0
- dbt_platform_helper/addon-plans.yml +0 -224
- dbt_platform_helper/providers/aws.py +0 -37
- dbt_platform_helper/providers/opensearch.py +0 -36
- dbt_platform_helper/providers/redis.py +0 -34
- dbt_platform_helper/providers/semantic_version.py +0 -126
- dbt_platform_helper/templates/svc/manifest-backend.yml +0 -69
- dbt_platform_helper/templates/svc/manifest-public.yml +0 -109
- dbt_platform_helper/utils/cloudfoundry.py +0 -14
- dbt_platform_helper/utils/files.py +0 -53
- dbt_platform_helper/utils/manifests.py +0 -18
- dbt_platform_helper/utils/versioning.py +0 -238
- dbt_platform_helper-13.1.0.dist-info/RECORD +0 -96
- {dbt_platform_helper-13.1.0.dist-info → dbt_platform_helper-15.16.0.dist-info}/entry_points.txt +0 -0
- {dbt_platform_helper-13.1.0.dist-info → dbt_platform_helper-15.16.0.dist-info/licenses}/LICENSE +0 -0
|
@@ -3,35 +3,43 @@ import stat
|
|
|
3
3
|
import subprocess
|
|
4
4
|
from collections.abc import Callable
|
|
5
5
|
from pathlib import Path
|
|
6
|
+
from typing import Tuple
|
|
6
7
|
|
|
7
8
|
import requests
|
|
8
9
|
import yaml
|
|
9
10
|
from boto3 import Session
|
|
10
11
|
|
|
11
12
|
from dbt_platform_helper.platform_exception import PlatformException
|
|
13
|
+
from dbt_platform_helper.providers.ecr import ECRProvider
|
|
12
14
|
from dbt_platform_helper.providers.files import FileProvider
|
|
13
15
|
from dbt_platform_helper.providers.io import ClickIOProvider
|
|
16
|
+
from dbt_platform_helper.providers.parameter_store import ParameterStore
|
|
14
17
|
from dbt_platform_helper.utils.application import Application
|
|
15
|
-
from dbt_platform_helper.utils.application import
|
|
18
|
+
from dbt_platform_helper.utils.application import (
|
|
19
|
+
ApplicationEnvironmentNotFoundException,
|
|
20
|
+
)
|
|
16
21
|
from dbt_platform_helper.utils.application import load_application
|
|
17
|
-
from dbt_platform_helper.utils.aws import check_image_exists
|
|
18
22
|
from dbt_platform_helper.utils.aws import get_aws_session_or_abort
|
|
19
23
|
from dbt_platform_helper.utils.aws import get_build_url_from_arn
|
|
20
24
|
from dbt_platform_helper.utils.aws import get_build_url_from_pipeline_execution_id
|
|
25
|
+
from dbt_platform_helper.utils.aws import get_image_build_project
|
|
26
|
+
from dbt_platform_helper.utils.aws import get_manual_release_pipeline
|
|
21
27
|
from dbt_platform_helper.utils.aws import list_latest_images
|
|
22
28
|
from dbt_platform_helper.utils.aws import start_build_extraction
|
|
23
29
|
from dbt_platform_helper.utils.aws import start_pipeline_and_return_execution_id
|
|
24
|
-
from dbt_platform_helper.utils.git import check_if_commit_exists
|
|
25
30
|
from dbt_platform_helper.utils.template import setup_templates
|
|
26
31
|
|
|
27
32
|
|
|
28
33
|
class Codebase:
|
|
29
34
|
def __init__(
|
|
30
35
|
self,
|
|
36
|
+
parameter_provider: ParameterStore,
|
|
31
37
|
io: ClickIOProvider = ClickIOProvider(),
|
|
32
38
|
load_application: Callable[[str], Application] = load_application,
|
|
33
39
|
get_aws_session_or_abort: Callable[[str], Session] = get_aws_session_or_abort,
|
|
34
|
-
|
|
40
|
+
ecr_provider: ECRProvider = ECRProvider(),
|
|
41
|
+
get_image_build_project: Callable[[str], str] = get_image_build_project,
|
|
42
|
+
get_manual_release_pipeline: Callable[[str], str] = get_manual_release_pipeline,
|
|
35
43
|
get_build_url_from_arn: Callable[[str], str] = get_build_url_from_arn,
|
|
36
44
|
get_build_url_from_pipeline_execution_id: Callable[
|
|
37
45
|
[str], str
|
|
@@ -41,19 +49,20 @@ class Codebase:
|
|
|
41
49
|
start_pipeline_and_return_execution_id: Callable[
|
|
42
50
|
[str], str
|
|
43
51
|
] = start_pipeline_and_return_execution_id,
|
|
44
|
-
check_if_commit_exists: Callable[[str], str] = check_if_commit_exists,
|
|
45
52
|
run_subprocess: Callable[[str], str] = subprocess.run,
|
|
46
53
|
):
|
|
54
|
+
self.parameter_provider = parameter_provider
|
|
47
55
|
self.io = io
|
|
48
56
|
self.load_application = load_application
|
|
49
57
|
self.get_aws_session_or_abort = get_aws_session_or_abort
|
|
50
|
-
self.
|
|
58
|
+
self.ecr_provider = ecr_provider
|
|
59
|
+
self.get_image_build_project = get_image_build_project
|
|
60
|
+
self.get_manual_release_pipeline = get_manual_release_pipeline
|
|
51
61
|
self.get_build_url_from_arn = get_build_url_from_arn
|
|
52
62
|
self.get_build_url_from_pipeline_execution_id = get_build_url_from_pipeline_execution_id
|
|
53
63
|
self.list_latest_images = list_latest_images
|
|
54
64
|
self.start_build_extraction = start_build_extraction
|
|
55
65
|
self.start_pipeline_and_return_execution_id = start_pipeline_and_return_execution_id
|
|
56
|
-
self.check_if_commit_exists = check_if_commit_exists
|
|
57
66
|
self.run_subprocess = run_subprocess
|
|
58
67
|
|
|
59
68
|
def prepare(self):
|
|
@@ -121,15 +130,14 @@ class Codebase:
|
|
|
121
130
|
session = self.get_aws_session_or_abort()
|
|
122
131
|
self.load_application(app, default_session=session)
|
|
123
132
|
|
|
124
|
-
self.check_if_commit_exists(commit)
|
|
125
|
-
|
|
126
133
|
codebuild_client = session.client("codebuild")
|
|
134
|
+
project_name = self.get_image_build_project(codebuild_client, app, codebase)
|
|
127
135
|
build_url = self.__start_build_with_confirmation(
|
|
128
136
|
codebuild_client,
|
|
129
137
|
self.get_build_url_from_arn,
|
|
130
138
|
f'You are about to build "{app}" for "{codebase}" with commit "{commit}". Do you want to continue?',
|
|
131
139
|
{
|
|
132
|
-
"projectName":
|
|
140
|
+
"projectName": project_name,
|
|
133
141
|
"artifactsOverride": {"type": "NO_ARTIFACTS"},
|
|
134
142
|
"sourceVersion": commit,
|
|
135
143
|
},
|
|
@@ -142,30 +150,57 @@ class Codebase:
|
|
|
142
150
|
|
|
143
151
|
raise ApplicationDeploymentNotTriggered(codebase)
|
|
144
152
|
|
|
145
|
-
def deploy(
|
|
153
|
+
def deploy(
|
|
154
|
+
self,
|
|
155
|
+
app: str,
|
|
156
|
+
env: str,
|
|
157
|
+
codebase: str,
|
|
158
|
+
commit: str = None,
|
|
159
|
+
tag: str = None,
|
|
160
|
+
branch: str = None,
|
|
161
|
+
):
|
|
146
162
|
"""Trigger a CodePipeline pipeline based deployment."""
|
|
147
|
-
session = self.get_aws_session_or_abort()
|
|
148
163
|
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
164
|
+
self._validate_reference_flags(commit, tag, branch)
|
|
165
|
+
|
|
166
|
+
application, session = self._populate_application_values(app, env)
|
|
152
167
|
|
|
153
|
-
|
|
168
|
+
image_ref = None
|
|
169
|
+
if commit:
|
|
170
|
+
self._validate_sha_length(commit)
|
|
171
|
+
image_ref = f"commit-{commit}"
|
|
172
|
+
elif tag:
|
|
173
|
+
image_ref = f"tag-{tag}"
|
|
174
|
+
elif branch:
|
|
175
|
+
image_ref = f"branch-{branch}"
|
|
176
|
+
|
|
177
|
+
image_ref = self.ecr_provider.get_commit_tag_for_reference(
|
|
178
|
+
application.name, codebase, image_ref
|
|
179
|
+
)
|
|
154
180
|
|
|
155
|
-
pipeline_name = f"{app}-{codebase}-manual-release-pipeline"
|
|
156
181
|
codepipeline_client = session.client("codepipeline")
|
|
182
|
+
pipeline_name = self.get_manual_release_pipeline(codepipeline_client, app, codebase)
|
|
183
|
+
|
|
184
|
+
corresponding_to = ""
|
|
185
|
+
if tag:
|
|
186
|
+
corresponding_to = f"(corresponding to tag {tag}) "
|
|
187
|
+
elif branch:
|
|
188
|
+
corresponding_to = f"(corresponding to branch {branch}) "
|
|
189
|
+
|
|
190
|
+
confirmation_message = f'\nYou are about to deploy "{app}" for "{codebase}" with image reference "{image_ref}" {corresponding_to}to the "{env}" environment using the "{pipeline_name}" deployment pipeline. Do you want to continue?'
|
|
191
|
+
build_options = {
|
|
192
|
+
"name": pipeline_name,
|
|
193
|
+
"variables": [
|
|
194
|
+
{"name": "ENVIRONMENT", "value": env},
|
|
195
|
+
{"name": "IMAGE_TAG", "value": image_ref},
|
|
196
|
+
],
|
|
197
|
+
}
|
|
157
198
|
|
|
158
199
|
build_url = self.__start_pipeline_execution_with_confirmation(
|
|
159
200
|
codepipeline_client,
|
|
160
201
|
self.get_build_url_from_pipeline_execution_id,
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
"name": pipeline_name,
|
|
164
|
-
"variables": [
|
|
165
|
-
{"name": "ENVIRONMENT", "value": env},
|
|
166
|
-
{"name": "IMAGE_TAG", "value": f"commit-{commit}"},
|
|
167
|
-
],
|
|
168
|
-
},
|
|
202
|
+
confirmation_message,
|
|
203
|
+
build_options,
|
|
169
204
|
)
|
|
170
205
|
|
|
171
206
|
if build_url:
|
|
@@ -176,13 +211,31 @@ class Codebase:
|
|
|
176
211
|
|
|
177
212
|
raise ApplicationDeploymentNotTriggered(codebase)
|
|
178
213
|
|
|
214
|
+
def _validate_reference_flags(self, commit: str, tag: str, branch: str):
|
|
215
|
+
provided = [ref for ref in [commit, tag, branch] if ref]
|
|
216
|
+
|
|
217
|
+
if len(provided) == 0:
|
|
218
|
+
self.io.abort_with_error(
|
|
219
|
+
"To deploy, you must provide one of the options --commit, --tag or --branch."
|
|
220
|
+
)
|
|
221
|
+
elif len(provided) > 1:
|
|
222
|
+
self.io.abort_with_error(
|
|
223
|
+
"You have provided more than one of the --tag, --branch and --commit options but these are mutually exclusive. Please provide only one of these options."
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
def _populate_application_values(self, app: str, env: str) -> Tuple[Application, Session]:
|
|
227
|
+
session = self.get_aws_session_or_abort()
|
|
228
|
+
application = self.load_application(app, default_session=session)
|
|
229
|
+
if not application.environments.get(env):
|
|
230
|
+
raise ApplicationEnvironmentNotFoundException(application.name, env)
|
|
231
|
+
return application, session
|
|
232
|
+
|
|
179
233
|
def list(self, app: str, with_images: bool):
|
|
180
234
|
"""List available codebases for the application."""
|
|
181
235
|
session = self.get_aws_session_or_abort()
|
|
182
236
|
application = self.load_application(app, session)
|
|
183
|
-
ssm_client = session.client("ssm")
|
|
184
237
|
ecr_client = session.client("ecr")
|
|
185
|
-
codebases = self.__get_codebases(application,
|
|
238
|
+
codebases = self.__get_codebases(application, session.client("ssm"))
|
|
186
239
|
|
|
187
240
|
self.io.info("The following codebases are available:")
|
|
188
241
|
|
|
@@ -199,11 +252,9 @@ class Codebase:
|
|
|
199
252
|
self.io.info("")
|
|
200
253
|
|
|
201
254
|
def __get_codebases(self, application, ssm_client):
|
|
202
|
-
parameters =
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
)["Parameters"]
|
|
206
|
-
|
|
255
|
+
parameters = self.parameter_provider.get_ssm_parameters_by_path(
|
|
256
|
+
f"/copilot/applications/{application.name}/codebases"
|
|
257
|
+
)
|
|
207
258
|
codebases = [json.loads(p["Value"]) for p in parameters]
|
|
208
259
|
|
|
209
260
|
if not codebases:
|
|
@@ -236,19 +287,18 @@ class Codebase:
|
|
|
236
287
|
return get_build_url_from_pipeline_execution_id(execution_id, build_options["name"])
|
|
237
288
|
return None
|
|
238
289
|
|
|
290
|
+
def _validate_sha_length(self, commit):
|
|
291
|
+
if len(commit) < 7:
|
|
292
|
+
self.io.abort_with_error(
|
|
293
|
+
"Your commit reference is too short. Commit sha hashes specified by '--commit' must be at least 7 characters long."
|
|
294
|
+
)
|
|
295
|
+
|
|
239
296
|
|
|
240
297
|
class ApplicationDeploymentNotTriggered(PlatformException):
|
|
241
298
|
def __init__(self, codebase: str):
|
|
242
299
|
super().__init__(f"""Your deployment for {codebase} was not triggered.""")
|
|
243
300
|
|
|
244
301
|
|
|
245
|
-
class ApplicationEnvironmentNotFoundException(ApplicationException):
|
|
246
|
-
def __init__(self, environment: str):
|
|
247
|
-
super().__init__(
|
|
248
|
-
f"""The environment "{environment}" either does not exist or has not been deployed."""
|
|
249
|
-
)
|
|
250
|
-
|
|
251
|
-
|
|
252
302
|
class NotInCodeBaseRepositoryException(PlatformException):
|
|
253
303
|
def __init__(self):
|
|
254
304
|
super().__init__(
|
|
@@ -1,119 +1,378 @@
|
|
|
1
|
-
import
|
|
1
|
+
import json
|
|
2
|
+
from abc import ABC
|
|
3
|
+
from abc import abstractmethod
|
|
4
|
+
from typing import Callable
|
|
5
|
+
from typing import Optional
|
|
2
6
|
|
|
3
7
|
from dbt_platform_helper.providers.cloudformation import CloudFormation
|
|
8
|
+
from dbt_platform_helper.providers.copilot import _normalise_secret_name
|
|
4
9
|
from dbt_platform_helper.providers.copilot import connect_to_addon_client_task
|
|
5
10
|
from dbt_platform_helper.providers.copilot import create_addon_client_task
|
|
11
|
+
from dbt_platform_helper.providers.copilot import get_postgres_admin_connection_string
|
|
6
12
|
from dbt_platform_helper.providers.ecs import ECS
|
|
7
13
|
from dbt_platform_helper.providers.io import ClickIOProvider
|
|
8
14
|
from dbt_platform_helper.providers.secrets import Secrets
|
|
15
|
+
from dbt_platform_helper.providers.vpc import VpcProvider
|
|
9
16
|
from dbt_platform_helper.utils.application import Application
|
|
10
17
|
|
|
11
18
|
|
|
12
|
-
class
|
|
19
|
+
class ConduitECSStrategy(ABC):
|
|
20
|
+
@abstractmethod
|
|
21
|
+
def get_data(self):
|
|
22
|
+
pass
|
|
23
|
+
|
|
24
|
+
@abstractmethod
|
|
25
|
+
def start_task(self, data_context: dict):
|
|
26
|
+
pass
|
|
27
|
+
|
|
28
|
+
@abstractmethod
|
|
29
|
+
def exec_task(self, data_context: dict):
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class TerraformConduitStrategy(ConduitECSStrategy):
|
|
13
34
|
def __init__(
|
|
14
35
|
self,
|
|
36
|
+
clients,
|
|
37
|
+
ecs_provider: ECS,
|
|
15
38
|
application: Application,
|
|
39
|
+
addon_name: str,
|
|
40
|
+
addon_type: str,
|
|
41
|
+
access: str,
|
|
42
|
+
env: str,
|
|
43
|
+
io: ClickIOProvider,
|
|
44
|
+
vpc_provider: Callable,
|
|
45
|
+
get_postgres_admin_connection_string: Callable,
|
|
46
|
+
):
|
|
47
|
+
self.clients = clients
|
|
48
|
+
self.ecs_provider = ecs_provider
|
|
49
|
+
self.io = io
|
|
50
|
+
self.vpc_provider = vpc_provider
|
|
51
|
+
self.access = access
|
|
52
|
+
self.addon_name = addon_name
|
|
53
|
+
self.addon_type = addon_type
|
|
54
|
+
self.application = application
|
|
55
|
+
self.env = env
|
|
56
|
+
self.get_postgres_admin_connection_string = get_postgres_admin_connection_string
|
|
57
|
+
|
|
58
|
+
def get_data(self):
|
|
59
|
+
self.io.info("Starting conduit in Terraform mode.")
|
|
60
|
+
try:
|
|
61
|
+
cluster_arn = self.ecs_provider.get_cluster_arn_by_name(
|
|
62
|
+
f"{self.application.name}-{self.env}-cluster"
|
|
63
|
+
)
|
|
64
|
+
except:
|
|
65
|
+
cluster_arn = self.ecs_provider.get_cluster_arn_by_name(
|
|
66
|
+
f"{self.application.name}-{self.env}"
|
|
67
|
+
)
|
|
68
|
+
return {
|
|
69
|
+
"cluster_arn": cluster_arn,
|
|
70
|
+
"task_def_family": self._generate_container_name(),
|
|
71
|
+
"vpc_name": self._resolve_vpc_name(),
|
|
72
|
+
"addon_type": self.addon_type,
|
|
73
|
+
"access": self.access,
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
def start_task(self, data_context: dict):
|
|
77
|
+
|
|
78
|
+
environments = self.application.environments
|
|
79
|
+
environment = environments.get(self.env)
|
|
80
|
+
env_session = environment.session
|
|
81
|
+
|
|
82
|
+
vpc_provider = self.vpc_provider(env_session)
|
|
83
|
+
vpc_config = vpc_provider.get_vpc(
|
|
84
|
+
self.application.name,
|
|
85
|
+
self.env,
|
|
86
|
+
data_context["vpc_name"],
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
postgres_admin_env_vars = None
|
|
90
|
+
if data_context["addon_type"] == "postgres" and data_context["access"] == "admin":
|
|
91
|
+
postgres_admin_env_vars = [
|
|
92
|
+
{
|
|
93
|
+
"name": "CONNECTION_SECRET",
|
|
94
|
+
"value": self.get_postgres_admin_connection_string(
|
|
95
|
+
self.clients.get("ssm"),
|
|
96
|
+
f"/copilot/{self.application.name}/{self.env}/secrets/{_normalise_secret_name(self.addon_name)}",
|
|
97
|
+
self.application,
|
|
98
|
+
self.env,
|
|
99
|
+
self.addon_name,
|
|
100
|
+
),
|
|
101
|
+
},
|
|
102
|
+
]
|
|
103
|
+
|
|
104
|
+
cluster_name = data_context["cluster_arn"].split("/")[-1]
|
|
105
|
+
|
|
106
|
+
self.ecs_provider.start_ecs_task(
|
|
107
|
+
cluster_name,
|
|
108
|
+
self._generate_container_name(),
|
|
109
|
+
data_context["task_def_family"],
|
|
110
|
+
vpc_config,
|
|
111
|
+
postgres_admin_env_vars,
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
def exec_task(self, data_context: dict):
|
|
115
|
+
self.ecs_provider.exec_task(data_context["cluster_arn"], data_context["task_arns"][0])
|
|
116
|
+
|
|
117
|
+
def _generate_container_name(self):
|
|
118
|
+
return f"conduit-{self.addon_type}-{self.access}-{self.application.name}-{self.env}-{self.addon_name}"
|
|
119
|
+
|
|
120
|
+
def _resolve_vpc_name(self):
|
|
121
|
+
ssm_client = self.clients["ssm"]
|
|
122
|
+
parameter_key = f"/platform/applications/{self.application.name}/environments/{self.env}"
|
|
123
|
+
|
|
124
|
+
try:
|
|
125
|
+
response = ssm_client.get_parameter(Name=parameter_key)["Parameter"]["Value"]
|
|
126
|
+
return json.loads(response)["vpc_name"]
|
|
127
|
+
except ssm_client.exceptions.ParameterNotFound:
|
|
128
|
+
self.io.abort_with_error(
|
|
129
|
+
f"Could not find AWS SSM parameter {parameter_key}. Please ensure your environment Terraform is up to date."
|
|
130
|
+
)
|
|
131
|
+
except KeyError:
|
|
132
|
+
self.io.abort_with_error(
|
|
133
|
+
f"The parameter {parameter_key} exists but does not contain the 'vpc_name' field. Please ensure your environment Terraform is up to date."
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
class CopilotConduitStrategy(ConduitECSStrategy):
|
|
138
|
+
def __init__(
|
|
139
|
+
self,
|
|
140
|
+
clients,
|
|
141
|
+
ecs_provider: ECS,
|
|
16
142
|
secrets_provider: Secrets,
|
|
17
143
|
cloudformation_provider: CloudFormation,
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
144
|
+
application: Application,
|
|
145
|
+
addon_name: str,
|
|
146
|
+
access: str,
|
|
147
|
+
env: str,
|
|
148
|
+
io: ClickIOProvider,
|
|
149
|
+
connect_to_addon_client_task: Callable,
|
|
150
|
+
create_addon_client_task: Callable,
|
|
23
151
|
):
|
|
24
|
-
|
|
25
|
-
self.application = application
|
|
26
|
-
self.secrets_provider = secrets_provider
|
|
152
|
+
self.clients = clients
|
|
27
153
|
self.cloudformation_provider = cloudformation_provider
|
|
28
154
|
self.ecs_provider = ecs_provider
|
|
29
|
-
self.
|
|
155
|
+
self.secrets_provider = secrets_provider
|
|
156
|
+
|
|
30
157
|
self.io = io
|
|
158
|
+
self.access = access
|
|
159
|
+
self.addon_name = addon_name
|
|
160
|
+
self.application = application
|
|
161
|
+
self.env = env
|
|
31
162
|
self.connect_to_addon_client_task = connect_to_addon_client_task
|
|
32
163
|
self.create_addon_client_task = create_addon_client_task
|
|
33
164
|
|
|
34
|
-
def
|
|
35
|
-
|
|
36
|
-
addon_type
|
|
37
|
-
|
|
165
|
+
def get_data(self):
|
|
166
|
+
|
|
167
|
+
addon_type = self.secrets_provider.get_addon_type(self.addon_name)
|
|
168
|
+
parameter_name = self.secrets_provider.get_parameter_name(
|
|
169
|
+
addon_type, self.addon_name, self.access
|
|
38
170
|
)
|
|
171
|
+
task_name = self.ecs_provider.get_or_create_task_name(self.addon_name, parameter_name)
|
|
39
172
|
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
173
|
+
return {
|
|
174
|
+
"cluster_arn": self.ecs_provider.get_cluster_arn_by_copilot_tag(),
|
|
175
|
+
"addon_type": addon_type,
|
|
176
|
+
"task_def_family": f"copilot-{task_name}",
|
|
177
|
+
"parameter_name": parameter_name,
|
|
178
|
+
"task_name": task_name,
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
def start_task(self, data_context: dict):
|
|
182
|
+
self.create_addon_client_task(
|
|
183
|
+
self.clients["iam"],
|
|
184
|
+
self.clients["ssm"],
|
|
185
|
+
self.application,
|
|
186
|
+
self.env,
|
|
187
|
+
data_context["addon_type"],
|
|
188
|
+
self.addon_name,
|
|
189
|
+
data_context["task_name"],
|
|
190
|
+
self.access,
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
self.io.info("Updating conduit task")
|
|
194
|
+
self.cloudformation_provider.add_stack_delete_policy_to_task_role(data_context["task_name"])
|
|
195
|
+
stack_name = self.cloudformation_provider.update_conduit_stack_resources(
|
|
196
|
+
self.application.name,
|
|
197
|
+
self.env,
|
|
198
|
+
data_context["addon_type"],
|
|
199
|
+
self.addon_name,
|
|
200
|
+
data_context["task_name"],
|
|
201
|
+
data_context["parameter_name"],
|
|
202
|
+
self.access,
|
|
203
|
+
)
|
|
204
|
+
self.io.info("Waiting for conduit task update to complete...")
|
|
205
|
+
self.cloudformation_provider.wait_for_cloudformation_to_reach_status(
|
|
206
|
+
"stack_update_complete", stack_name
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
def exec_task(self, data_context: dict):
|
|
210
|
+
self.connect_to_addon_client_task(
|
|
211
|
+
self.clients["ecs"],
|
|
212
|
+
self.application.name,
|
|
213
|
+
self.env,
|
|
214
|
+
data_context["cluster_arn"],
|
|
215
|
+
data_context["task_name"],
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
class ConduitStrategyFactory:
|
|
220
|
+
|
|
221
|
+
@staticmethod
|
|
222
|
+
def detect_mode(
|
|
223
|
+
ecs_client,
|
|
224
|
+
application,
|
|
225
|
+
environment,
|
|
226
|
+
addon_name: str,
|
|
227
|
+
addon_type: str,
|
|
228
|
+
access: str,
|
|
229
|
+
io: ClickIOProvider,
|
|
230
|
+
) -> str:
|
|
231
|
+
"""Detect if Terraform-based conduit task definitions are present,
|
|
232
|
+
otherwise default to Copilot mode."""
|
|
233
|
+
paginator = ecs_client.get_paginator("list_task_definitions")
|
|
234
|
+
prefix = f"conduit-{addon_type}-{access}-{application}-{environment}-{addon_name}"
|
|
235
|
+
|
|
236
|
+
for page in paginator.paginate():
|
|
237
|
+
for arn in page["taskDefinitionArns"]:
|
|
238
|
+
if arn.split("/")[-1].startswith(prefix):
|
|
239
|
+
return "terraform"
|
|
240
|
+
|
|
241
|
+
io.info("Defaulting to copilot mode.")
|
|
242
|
+
return "copilot"
|
|
243
|
+
|
|
244
|
+
@staticmethod
|
|
245
|
+
def create_strategy(
|
|
246
|
+
mode: str,
|
|
247
|
+
clients,
|
|
248
|
+
ecs_provider: ECS,
|
|
249
|
+
secrets_provider: Secrets,
|
|
250
|
+
cloudformation_provider: CloudFormation,
|
|
251
|
+
application: Application,
|
|
252
|
+
addon_name: str,
|
|
253
|
+
addon_type: str,
|
|
254
|
+
access: str,
|
|
255
|
+
env: str,
|
|
256
|
+
io: ClickIOProvider,
|
|
257
|
+
):
|
|
258
|
+
|
|
259
|
+
if mode == "terraform":
|
|
260
|
+
return TerraformConduitStrategy(
|
|
261
|
+
clients,
|
|
262
|
+
ecs_provider,
|
|
263
|
+
application,
|
|
51
264
|
addon_name,
|
|
52
|
-
|
|
265
|
+
addon_type,
|
|
53
266
|
access,
|
|
54
|
-
)
|
|
55
|
-
|
|
56
|
-
self.io.info("Updating conduit task")
|
|
57
|
-
self._update_stack_resources(
|
|
58
|
-
self.application.name,
|
|
59
267
|
env,
|
|
60
|
-
|
|
268
|
+
io,
|
|
269
|
+
vpc_provider=VpcProvider,
|
|
270
|
+
get_postgres_admin_connection_string=get_postgres_admin_connection_string,
|
|
271
|
+
)
|
|
272
|
+
else:
|
|
273
|
+
return CopilotConduitStrategy(
|
|
274
|
+
clients,
|
|
275
|
+
ecs_provider,
|
|
276
|
+
secrets_provider,
|
|
277
|
+
cloudformation_provider,
|
|
278
|
+
application,
|
|
61
279
|
addon_name,
|
|
62
|
-
task_name,
|
|
63
|
-
parameter_name,
|
|
64
280
|
access,
|
|
281
|
+
env,
|
|
282
|
+
io,
|
|
283
|
+
connect_to_addon_client_task=connect_to_addon_client_task,
|
|
284
|
+
create_addon_client_task=create_addon_client_task,
|
|
65
285
|
)
|
|
66
286
|
|
|
67
|
-
task_arns = self.ecs_provider.get_ecs_task_arns(cluster_arn, task_name)
|
|
68
287
|
|
|
69
|
-
|
|
70
|
-
|
|
288
|
+
class Conduit:
|
|
289
|
+
def __init__(
|
|
290
|
+
self,
|
|
291
|
+
application: Application,
|
|
292
|
+
secrets_provider: Secrets,
|
|
293
|
+
cloudformation_provider: CloudFormation,
|
|
294
|
+
ecs_provider: ECS,
|
|
295
|
+
io: ClickIOProvider = ClickIOProvider(),
|
|
296
|
+
vpc_provider=VpcProvider,
|
|
297
|
+
strategy_factory: Optional[ConduitStrategyFactory] = None,
|
|
298
|
+
):
|
|
71
299
|
|
|
72
|
-
self.
|
|
300
|
+
self.application = application
|
|
301
|
+
self.secrets_provider = secrets_provider
|
|
302
|
+
self.cloudformation_provider = cloudformation_provider
|
|
303
|
+
self.ecs_provider = ecs_provider
|
|
304
|
+
self.io = io
|
|
305
|
+
self.vpc_provider = vpc_provider
|
|
306
|
+
self.strategy_factory = strategy_factory or ConduitStrategyFactory()
|
|
307
|
+
|
|
308
|
+
def start(self, env: str, addon_name: str, access: str = "read"):
|
|
309
|
+
self.clients = self._initialise_clients(env)
|
|
310
|
+
addon_type = self.secrets_provider.get_addon_type(addon_name)
|
|
73
311
|
|
|
74
|
-
|
|
312
|
+
if (addon_type == "opensearch" or addon_type == "redis") and (access != "read"):
|
|
313
|
+
access = "read"
|
|
75
314
|
|
|
76
|
-
self.
|
|
77
|
-
|
|
78
|
-
|
|
315
|
+
mode = self.strategy_factory.detect_mode(
|
|
316
|
+
self.clients.get("ecs"),
|
|
317
|
+
self.application.name,
|
|
318
|
+
env,
|
|
319
|
+
addon_name,
|
|
320
|
+
addon_type,
|
|
321
|
+
access,
|
|
322
|
+
self.io,
|
|
323
|
+
)
|
|
324
|
+
|
|
325
|
+
strategy = self.strategy_factory.create_strategy(
|
|
326
|
+
mode=mode,
|
|
327
|
+
clients=self.clients,
|
|
328
|
+
ecs_provider=self.ecs_provider,
|
|
329
|
+
secrets_provider=self.secrets_provider,
|
|
330
|
+
cloudformation_provider=self.cloudformation_provider,
|
|
331
|
+
application=self.application,
|
|
332
|
+
addon_name=addon_name,
|
|
333
|
+
addon_type=addon_type,
|
|
334
|
+
access=access,
|
|
335
|
+
env=env,
|
|
336
|
+
io=self.io,
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
data_context = strategy.get_data()
|
|
340
|
+
|
|
341
|
+
data_context["task_arns"] = self.ecs_provider.get_ecs_task_arns(
|
|
342
|
+
cluster=data_context["cluster_arn"], task_def_family=data_context["task_def_family"]
|
|
343
|
+
)
|
|
344
|
+
|
|
345
|
+
info_log = (
|
|
346
|
+
f"Checking if a conduit ECS task is already running for:\n"
|
|
347
|
+
f" Addon Name : {addon_name}\n"
|
|
348
|
+
f" Addon Type : {addon_type}"
|
|
79
349
|
)
|
|
80
350
|
|
|
351
|
+
if addon_type == "postgres":
|
|
352
|
+
info_log += f"\n Access Level : {access}"
|
|
353
|
+
|
|
354
|
+
self.io.info(info_log)
|
|
355
|
+
|
|
356
|
+
if not data_context["task_arns"]:
|
|
357
|
+
self.io.info("Creating conduit ECS task...")
|
|
358
|
+
strategy.start_task(data_context)
|
|
359
|
+
data_context["task_arns"] = self.ecs_provider.wait_for_task_to_register(
|
|
360
|
+
data_context["cluster_arn"], data_context["task_def_family"]
|
|
361
|
+
)
|
|
362
|
+
else:
|
|
363
|
+
self.io.info(f"Found a task already running: {data_context['task_arns'][0]}")
|
|
364
|
+
|
|
365
|
+
self.io.info(f"Waiting for ECS Exec agent to become available on the conduit task...")
|
|
366
|
+
self.ecs_provider.ecs_exec_is_available(
|
|
367
|
+
data_context["cluster_arn"], data_context["task_arns"]
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
self.io.info("Connecting to conduit task...")
|
|
371
|
+
strategy.exec_task(data_context)
|
|
372
|
+
|
|
81
373
|
def _initialise_clients(self, env):
|
|
82
374
|
return {
|
|
83
375
|
"ecs": self.application.environments[env].session.client("ecs"),
|
|
84
376
|
"iam": self.application.environments[env].session.client("iam"),
|
|
85
377
|
"ssm": self.application.environments[env].session.client("ssm"),
|
|
86
378
|
}
|
|
87
|
-
|
|
88
|
-
def _get_addon_details(self, addon_name, access):
|
|
89
|
-
addon_type = self.secrets_provider.get_addon_type(addon_name)
|
|
90
|
-
cluster_arn = self.ecs_provider.get_cluster_arn()
|
|
91
|
-
parameter_name = self.secrets_provider.get_parameter_name(addon_type, addon_name, access)
|
|
92
|
-
task_name = self.ecs_provider.get_or_create_task_name(addon_name, parameter_name)
|
|
93
|
-
|
|
94
|
-
return addon_type, cluster_arn, parameter_name, task_name
|
|
95
|
-
|
|
96
|
-
def _update_stack_resources(
|
|
97
|
-
self,
|
|
98
|
-
app_name,
|
|
99
|
-
env,
|
|
100
|
-
addon_type,
|
|
101
|
-
addon_name,
|
|
102
|
-
task_name,
|
|
103
|
-
parameter_name,
|
|
104
|
-
access,
|
|
105
|
-
):
|
|
106
|
-
self.cloudformation_provider.add_stack_delete_policy_to_task_role(task_name)
|
|
107
|
-
stack_name = self.cloudformation_provider.update_conduit_stack_resources(
|
|
108
|
-
app_name,
|
|
109
|
-
env,
|
|
110
|
-
addon_type,
|
|
111
|
-
addon_name,
|
|
112
|
-
task_name,
|
|
113
|
-
parameter_name,
|
|
114
|
-
access,
|
|
115
|
-
)
|
|
116
|
-
self.io.info("Waiting for conduit task update to complete...")
|
|
117
|
-
self.cloudformation_provider.wait_for_cloudformation_to_reach_status(
|
|
118
|
-
"stack_update_complete", stack_name
|
|
119
|
-
)
|