dbt-platform-helper 13.1.0__py3-none-any.whl → 15.16.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbt_platform_helper/COMMANDS.md +107 -27
- dbt_platform_helper/commands/application.py +5 -6
- dbt_platform_helper/commands/codebase.py +31 -10
- dbt_platform_helper/commands/conduit.py +3 -5
- dbt_platform_helper/commands/config.py +20 -311
- dbt_platform_helper/commands/copilot.py +18 -391
- dbt_platform_helper/commands/database.py +17 -9
- dbt_platform_helper/commands/environment.py +20 -14
- dbt_platform_helper/commands/generate.py +0 -3
- dbt_platform_helper/commands/internal.py +140 -0
- dbt_platform_helper/commands/notify.py +58 -78
- dbt_platform_helper/commands/pipeline.py +23 -19
- dbt_platform_helper/commands/secrets.py +39 -93
- dbt_platform_helper/commands/version.py +7 -12
- dbt_platform_helper/constants.py +52 -7
- dbt_platform_helper/domain/codebase.py +89 -39
- dbt_platform_helper/domain/conduit.py +335 -76
- dbt_platform_helper/domain/config.py +381 -0
- dbt_platform_helper/domain/copilot.py +398 -0
- dbt_platform_helper/domain/copilot_environment.py +8 -8
- dbt_platform_helper/domain/database_copy.py +2 -2
- dbt_platform_helper/domain/maintenance_page.py +254 -430
- dbt_platform_helper/domain/notify.py +64 -0
- dbt_platform_helper/domain/pipelines.py +43 -35
- dbt_platform_helper/domain/plans.py +41 -0
- dbt_platform_helper/domain/secrets.py +279 -0
- dbt_platform_helper/domain/service.py +570 -0
- dbt_platform_helper/domain/terraform_environment.py +14 -13
- dbt_platform_helper/domain/update_alb_rules.py +412 -0
- dbt_platform_helper/domain/versioning.py +249 -0
- dbt_platform_helper/{providers → entities}/platform_config_schema.py +75 -82
- dbt_platform_helper/entities/semantic_version.py +83 -0
- dbt_platform_helper/entities/service.py +339 -0
- dbt_platform_helper/platform_exception.py +4 -0
- dbt_platform_helper/providers/autoscaling.py +24 -0
- dbt_platform_helper/providers/aws/__init__.py +0 -0
- dbt_platform_helper/providers/aws/exceptions.py +70 -0
- dbt_platform_helper/providers/aws/interfaces.py +13 -0
- dbt_platform_helper/providers/aws/opensearch.py +23 -0
- dbt_platform_helper/providers/aws/redis.py +21 -0
- dbt_platform_helper/providers/aws/sso_auth.py +75 -0
- dbt_platform_helper/providers/cache.py +40 -4
- dbt_platform_helper/providers/cloudformation.py +1 -1
- dbt_platform_helper/providers/config.py +137 -19
- dbt_platform_helper/providers/config_validator.py +112 -51
- dbt_platform_helper/providers/copilot.py +24 -16
- dbt_platform_helper/providers/ecr.py +89 -7
- dbt_platform_helper/providers/ecs.py +228 -36
- dbt_platform_helper/providers/environment_variable.py +24 -0
- dbt_platform_helper/providers/files.py +1 -1
- dbt_platform_helper/providers/io.py +36 -4
- dbt_platform_helper/providers/kms.py +22 -0
- dbt_platform_helper/providers/load_balancers.py +402 -42
- dbt_platform_helper/providers/logs.py +72 -0
- dbt_platform_helper/providers/parameter_store.py +134 -0
- dbt_platform_helper/providers/s3.py +21 -0
- dbt_platform_helper/providers/schema_migrations/__init__.py +0 -0
- dbt_platform_helper/providers/schema_migrations/schema_v0_to_v1_migration.py +43 -0
- dbt_platform_helper/providers/schema_migrator.py +77 -0
- dbt_platform_helper/providers/secrets.py +5 -5
- dbt_platform_helper/providers/slack_channel_notifier.py +62 -0
- dbt_platform_helper/providers/terraform_manifest.py +121 -19
- dbt_platform_helper/providers/version.py +106 -23
- dbt_platform_helper/providers/version_status.py +27 -0
- dbt_platform_helper/providers/vpc.py +36 -5
- dbt_platform_helper/providers/yaml_file.py +58 -2
- dbt_platform_helper/templates/environment-pipelines/main.tf +4 -3
- dbt_platform_helper/templates/svc/overrides/cfn.patches.yml +5 -0
- dbt_platform_helper/utilities/decorators.py +103 -0
- dbt_platform_helper/utils/application.py +119 -22
- dbt_platform_helper/utils/aws.py +39 -150
- dbt_platform_helper/utils/deep_merge.py +10 -0
- dbt_platform_helper/utils/git.py +1 -14
- dbt_platform_helper/utils/validation.py +1 -1
- {dbt_platform_helper-13.1.0.dist-info → dbt_platform_helper-15.16.0.dist-info}/METADATA +11 -20
- dbt_platform_helper-15.16.0.dist-info/RECORD +118 -0
- {dbt_platform_helper-13.1.0.dist-info → dbt_platform_helper-15.16.0.dist-info}/WHEEL +1 -1
- platform_helper.py +3 -1
- terraform/elasticache-redis/plans.yml +85 -0
- terraform/opensearch/plans.yml +71 -0
- terraform/postgres/plans.yml +128 -0
- dbt_platform_helper/addon-plans.yml +0 -224
- dbt_platform_helper/providers/aws.py +0 -37
- dbt_platform_helper/providers/opensearch.py +0 -36
- dbt_platform_helper/providers/redis.py +0 -34
- dbt_platform_helper/providers/semantic_version.py +0 -126
- dbt_platform_helper/templates/svc/manifest-backend.yml +0 -69
- dbt_platform_helper/templates/svc/manifest-public.yml +0 -109
- dbt_platform_helper/utils/cloudfoundry.py +0 -14
- dbt_platform_helper/utils/files.py +0 -53
- dbt_platform_helper/utils/manifests.py +0 -18
- dbt_platform_helper/utils/versioning.py +0 -238
- dbt_platform_helper-13.1.0.dist-info/RECORD +0 -96
- {dbt_platform_helper-13.1.0.dist-info → dbt_platform_helper-15.16.0.dist-info}/entry_points.txt +0 -0
- {dbt_platform_helper-13.1.0.dist-info → dbt_platform_helper-15.16.0.dist-info/licenses}/LICENSE +0 -0
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
from dbt_platform_helper.providers.slack_channel_notifier import SlackChannelNotifier
|
|
2
|
+
from dbt_platform_helper.utils.arn_parser import ARN
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class Notify:
|
|
6
|
+
def __init__(self, notifier: SlackChannelNotifier):
|
|
7
|
+
self.notifier = notifier
|
|
8
|
+
|
|
9
|
+
def post_message(
|
|
10
|
+
self,
|
|
11
|
+
message: str,
|
|
12
|
+
build_arn: str = None,
|
|
13
|
+
repository: str = None,
|
|
14
|
+
commit_sha: str = None,
|
|
15
|
+
original_message_ref: str = None,
|
|
16
|
+
):
|
|
17
|
+
context = []
|
|
18
|
+
|
|
19
|
+
if repository:
|
|
20
|
+
context.append(f"*Repository*: <https://github.com/{repository}|{repository}>")
|
|
21
|
+
if commit_sha:
|
|
22
|
+
context.append(
|
|
23
|
+
f"*Revision*: <https://github.com/{repository}/commit/{commit_sha}|{commit_sha}>"
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
if build_arn:
|
|
27
|
+
context.append(f"<{get_build_url(build_arn)}|Build Logs>")
|
|
28
|
+
|
|
29
|
+
if original_message_ref:
|
|
30
|
+
return self.notifier.post_update(original_message_ref, message, context)
|
|
31
|
+
else:
|
|
32
|
+
return self.notifier.post_new(message, context)
|
|
33
|
+
|
|
34
|
+
def add_comment(
|
|
35
|
+
self,
|
|
36
|
+
original_message_ref: str,
|
|
37
|
+
message: str,
|
|
38
|
+
title: str,
|
|
39
|
+
reply_broadcast: bool,
|
|
40
|
+
):
|
|
41
|
+
self.notifier.post_new(
|
|
42
|
+
message=message,
|
|
43
|
+
title=title,
|
|
44
|
+
context=[],
|
|
45
|
+
reply_broadcast=reply_broadcast,
|
|
46
|
+
thread_ref=original_message_ref,
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def get_build_url(build_arn: str):
|
|
51
|
+
try:
|
|
52
|
+
arn = ARN(build_arn)
|
|
53
|
+
url = (
|
|
54
|
+
"https://{region}.console.aws.amazon.com/codesuite/codebuild/{account}/projects/{"
|
|
55
|
+
"project}/build/{project}%3A{build_id}"
|
|
56
|
+
)
|
|
57
|
+
return url.format(
|
|
58
|
+
region=arn.region,
|
|
59
|
+
account=arn.account_id,
|
|
60
|
+
project=arn.project.replace("build/", ""),
|
|
61
|
+
build_id=arn.build_id,
|
|
62
|
+
)
|
|
63
|
+
except ValueError:
|
|
64
|
+
return ""
|
|
@@ -7,16 +7,13 @@ from dbt_platform_helper.constants import CODEBASE_PIPELINES_KEY
|
|
|
7
7
|
from dbt_platform_helper.constants import ENVIRONMENT_PIPELINES_KEY
|
|
8
8
|
from dbt_platform_helper.constants import SUPPORTED_AWS_PROVIDER_VERSION
|
|
9
9
|
from dbt_platform_helper.constants import SUPPORTED_TERRAFORM_VERSION
|
|
10
|
+
from dbt_platform_helper.domain.versioning import PlatformHelperVersioning
|
|
10
11
|
from dbt_platform_helper.providers.config import ConfigProvider
|
|
11
12
|
from dbt_platform_helper.providers.ecr import ECRProvider
|
|
12
13
|
from dbt_platform_helper.providers.files import FileProvider
|
|
13
14
|
from dbt_platform_helper.providers.io import ClickIOProvider
|
|
14
15
|
from dbt_platform_helper.providers.terraform_manifest import TerraformManifestProvider
|
|
15
|
-
from dbt_platform_helper.utils.application import get_application_name
|
|
16
16
|
from dbt_platform_helper.utils.template import setup_templates
|
|
17
|
-
from dbt_platform_helper.utils.versioning import (
|
|
18
|
-
get_required_terraform_platform_modules_version,
|
|
19
|
-
)
|
|
20
17
|
|
|
21
18
|
|
|
22
19
|
class Pipelines:
|
|
@@ -26,19 +23,43 @@ class Pipelines:
|
|
|
26
23
|
terraform_manifest_provider: TerraformManifestProvider,
|
|
27
24
|
ecr_provider: ECRProvider,
|
|
28
25
|
get_git_remote: Callable[[], str],
|
|
29
|
-
get_codestar_arn: Callable[[str], str],
|
|
30
26
|
io: ClickIOProvider = ClickIOProvider(),
|
|
31
27
|
file_provider: FileProvider = FileProvider(),
|
|
28
|
+
platform_helper_versioning: PlatformHelperVersioning = None,
|
|
32
29
|
):
|
|
33
30
|
self.config_provider = config_provider
|
|
34
31
|
self.get_git_remote = get_git_remote
|
|
35
|
-
self.get_codestar_arn = get_codestar_arn
|
|
36
32
|
self.terraform_manifest_provider = terraform_manifest_provider
|
|
37
33
|
self.ecr_provider = ecr_provider
|
|
38
34
|
self.io = io
|
|
39
35
|
self.file_provider = file_provider
|
|
36
|
+
self.platform_helper_versioning = platform_helper_versioning
|
|
37
|
+
|
|
38
|
+
def _map_environment_pipeline_accounts(self, platform_config) -> list[tuple[str, str]]:
|
|
39
|
+
environment_pipelines_config = platform_config[ENVIRONMENT_PIPELINES_KEY]
|
|
40
|
+
environment_config = platform_config["environments"]
|
|
41
|
+
|
|
42
|
+
account_id_lookup = {
|
|
43
|
+
env["accounts"]["deploy"]["name"]: env["accounts"]["deploy"]["id"]
|
|
44
|
+
for env in environment_config.values()
|
|
45
|
+
if env is not None and "accounts" in env and "deploy" in env["accounts"]
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
accounts = set()
|
|
49
|
+
|
|
50
|
+
for config in environment_pipelines_config.values():
|
|
51
|
+
account = config.get("account")
|
|
52
|
+
deploy_account_id = account_id_lookup.get(account)
|
|
53
|
+
accounts.add((account, deploy_account_id))
|
|
54
|
+
|
|
55
|
+
return list(accounts)
|
|
56
|
+
|
|
57
|
+
def generate(
|
|
58
|
+
self,
|
|
59
|
+
deploy_branch: str,
|
|
60
|
+
):
|
|
61
|
+
self.platform_helper_versioning.check_platform_helper_version_mismatch()
|
|
40
62
|
|
|
41
|
-
def generate(self, cli_terraform_platform_modules_version: str, deploy_branch: str):
|
|
42
63
|
platform_config = self.config_provider.load_and_validate_platform_config()
|
|
43
64
|
|
|
44
65
|
has_codebase_pipelines = CODEBASE_PIPELINES_KEY in platform_config
|
|
@@ -48,31 +69,16 @@ class Pipelines:
|
|
|
48
69
|
self.io.warn("No pipelines defined: nothing to do.")
|
|
49
70
|
return
|
|
50
71
|
|
|
51
|
-
platform_config_terraform_modules_default_version = platform_config.get(
|
|
52
|
-
"default_versions", {}
|
|
53
|
-
).get("terraform-platform-modules", "")
|
|
54
|
-
|
|
55
|
-
app_name = get_application_name()
|
|
56
|
-
|
|
57
72
|
git_repo = self.get_git_remote()
|
|
58
73
|
if not git_repo:
|
|
59
74
|
self.io.abort_with_error("The current directory is not a git repository")
|
|
60
75
|
|
|
61
|
-
codestar_connection_arn = self.get_codestar_arn(app_name)
|
|
62
|
-
if codestar_connection_arn is None:
|
|
63
|
-
self.io.abort_with_error(f'There is no CodeStar Connection named "{app_name}" to use')
|
|
64
|
-
|
|
65
76
|
base_path = Path(".")
|
|
66
77
|
copilot_pipelines_dir = base_path / f"copilot/pipelines"
|
|
67
78
|
|
|
68
79
|
self._clean_pipeline_config(copilot_pipelines_dir)
|
|
69
80
|
|
|
70
|
-
|
|
71
|
-
cli_terraform_platform_modules_version,
|
|
72
|
-
platform_config_terraform_modules_default_version,
|
|
73
|
-
)
|
|
74
|
-
|
|
75
|
-
# TODO - this whole code block/if-statement can fall away once the deploy_repository is a required key.
|
|
81
|
+
# TODO: DBTP-1965: - this whole code block/if-statement can fall away once the deploy_repository is a required key.
|
|
76
82
|
deploy_repository = ""
|
|
77
83
|
if "deploy_repository" in platform_config.keys():
|
|
78
84
|
deploy_repository = f"{platform_config['deploy_repository']}"
|
|
@@ -83,20 +89,17 @@ class Pipelines:
|
|
|
83
89
|
deploy_repository = f"uktrade/{platform_config['application']}-deploy"
|
|
84
90
|
|
|
85
91
|
if has_environment_pipelines:
|
|
86
|
-
|
|
87
|
-
accounts = {
|
|
88
|
-
config.get("account")
|
|
89
|
-
for config in environment_pipelines.values()
|
|
90
|
-
if "account" in config
|
|
91
|
-
}
|
|
92
|
+
accounts = self._map_environment_pipeline_accounts(platform_config)
|
|
92
93
|
|
|
93
|
-
for
|
|
94
|
+
for account_name, account_id in accounts:
|
|
94
95
|
self._generate_terraform_environment_pipeline_manifest(
|
|
95
96
|
platform_config["application"],
|
|
96
97
|
deploy_repository,
|
|
97
|
-
|
|
98
|
-
|
|
98
|
+
account_name,
|
|
99
|
+
self.platform_helper_versioning.get_environment_pipeline_modules_source(),
|
|
99
100
|
deploy_branch,
|
|
101
|
+
account_id,
|
|
102
|
+
self.platform_helper_versioning.get_pinned_version(),
|
|
100
103
|
)
|
|
101
104
|
|
|
102
105
|
if has_codebase_pipelines:
|
|
@@ -114,9 +117,10 @@ class Pipelines:
|
|
|
114
117
|
|
|
115
118
|
self.terraform_manifest_provider.generate_codebase_pipeline_config(
|
|
116
119
|
platform_config,
|
|
117
|
-
|
|
120
|
+
self.platform_helper_versioning.get_template_version(),
|
|
118
121
|
ecrs_that_need_importing,
|
|
119
122
|
deploy_repository,
|
|
123
|
+
self.platform_helper_versioning.get_codebase_pipeline_modules_source(),
|
|
120
124
|
)
|
|
121
125
|
|
|
122
126
|
def _clean_pipeline_config(self, pipelines_dir: Path):
|
|
@@ -129,8 +133,10 @@ class Pipelines:
|
|
|
129
133
|
application: str,
|
|
130
134
|
deploy_repository: str,
|
|
131
135
|
aws_account: str,
|
|
132
|
-
|
|
136
|
+
module_source: str,
|
|
133
137
|
deploy_branch: str,
|
|
138
|
+
aws_account_id: str,
|
|
139
|
+
pinned_version: str,
|
|
134
140
|
):
|
|
135
141
|
env_pipeline_template = setup_templates().get_template("environment-pipelines/main.tf")
|
|
136
142
|
|
|
@@ -139,10 +145,12 @@ class Pipelines:
|
|
|
139
145
|
"application": application,
|
|
140
146
|
"deploy_repository": deploy_repository,
|
|
141
147
|
"aws_account": aws_account,
|
|
142
|
-
"
|
|
148
|
+
"module_source": module_source,
|
|
143
149
|
"deploy_branch": deploy_branch,
|
|
144
150
|
"terraform_version": SUPPORTED_TERRAFORM_VERSION,
|
|
145
151
|
"aws_provider_version": SUPPORTED_AWS_PROVIDER_VERSION,
|
|
152
|
+
"deploy_account_id": aws_account_id,
|
|
153
|
+
"pinned_version": pinned_version,
|
|
146
154
|
}
|
|
147
155
|
)
|
|
148
156
|
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from dbt_platform_helper.providers.yaml_file import YamlFileProvider
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class PlanLoader:
|
|
7
|
+
|
|
8
|
+
PROJECT_DIR = Path(__file__).resolve().parent.parent.parent
|
|
9
|
+
|
|
10
|
+
def __init__(
|
|
11
|
+
self,
|
|
12
|
+
extensions: dict = None,
|
|
13
|
+
terraform_dir: str = "terraform",
|
|
14
|
+
loader: YamlFileProvider = YamlFileProvider,
|
|
15
|
+
):
|
|
16
|
+
self.path = terraform_dir
|
|
17
|
+
self.loader = loader
|
|
18
|
+
self._cache = {}
|
|
19
|
+
self.extensions = extensions or {
|
|
20
|
+
"redis": "elasticache-redis",
|
|
21
|
+
"opensearch": "opensearch",
|
|
22
|
+
"postgres": "postgres",
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
def load(self):
|
|
26
|
+
result = {}
|
|
27
|
+
for key, value in self.extensions.items():
|
|
28
|
+
result[key] = self._load_plan(key, f"{self.PROJECT_DIR}/{self.path}/{value}/plans.yml")
|
|
29
|
+
return result
|
|
30
|
+
|
|
31
|
+
def _load_plan(self, name, path):
|
|
32
|
+
if name in self._cache:
|
|
33
|
+
return self._cache[name]
|
|
34
|
+
else:
|
|
35
|
+
plan = self.loader.load(path)
|
|
36
|
+
self._cache[name] = plan
|
|
37
|
+
return plan
|
|
38
|
+
|
|
39
|
+
def get_plan_names(self, extension):
|
|
40
|
+
plans = self.load()
|
|
41
|
+
return list(plans[extension].keys())
|
|
@@ -0,0 +1,279 @@
|
|
|
1
|
+
import botocore
|
|
2
|
+
|
|
3
|
+
from dbt_platform_helper.constants import MANAGED_BY_PLATFORM
|
|
4
|
+
from dbt_platform_helper.constants import MANAGED_BY_PLATFORM_TERRAFORM
|
|
5
|
+
from dbt_platform_helper.platform_exception import PlatformException
|
|
6
|
+
from dbt_platform_helper.providers.io import ClickIOProvider
|
|
7
|
+
from dbt_platform_helper.providers.parameter_store import Parameter
|
|
8
|
+
from dbt_platform_helper.providers.parameter_store import ParameterStore
|
|
9
|
+
from dbt_platform_helper.utils.application import load_application
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class Secrets:
|
|
13
|
+
|
|
14
|
+
def __init__(
|
|
15
|
+
self,
|
|
16
|
+
load_application=load_application,
|
|
17
|
+
io: ClickIOProvider = ClickIOProvider(),
|
|
18
|
+
parameter_store_provider: ParameterStore = ParameterStore,
|
|
19
|
+
):
|
|
20
|
+
self.load_application_fn = load_application
|
|
21
|
+
self.application = None
|
|
22
|
+
self.io = io
|
|
23
|
+
self.parameter_store_provider: ParameterStore = parameter_store_provider
|
|
24
|
+
|
|
25
|
+
def _check_ssm_write_access(self, accounts):
|
|
26
|
+
"""Check access."""
|
|
27
|
+
no_access = []
|
|
28
|
+
for account, session in accounts.items():
|
|
29
|
+
sts = session.client("sts")
|
|
30
|
+
iam = session.client("iam")
|
|
31
|
+
|
|
32
|
+
sts_arn = sts.get_caller_identity()["Arn"]
|
|
33
|
+
role_name = sts_arn.split("/")[1]
|
|
34
|
+
|
|
35
|
+
role_arn = (
|
|
36
|
+
f"arn:aws:iam::{account}:role/aws-reserved/sso.amazonaws.com/eu-west-2/{role_name}"
|
|
37
|
+
)
|
|
38
|
+
response = iam.simulate_principal_policy(
|
|
39
|
+
PolicySourceArn=role_arn,
|
|
40
|
+
ActionNames=[
|
|
41
|
+
"ssm:PutParameter",
|
|
42
|
+
],
|
|
43
|
+
ContextEntries=[
|
|
44
|
+
{
|
|
45
|
+
"ContextKeyName": "aws:RequestedRegion",
|
|
46
|
+
"ContextKeyValues": [
|
|
47
|
+
"eu-west-2",
|
|
48
|
+
],
|
|
49
|
+
"ContextKeyType": "string",
|
|
50
|
+
}
|
|
51
|
+
],
|
|
52
|
+
)["EvaluationResults"]
|
|
53
|
+
|
|
54
|
+
has_access = [
|
|
55
|
+
account for eval_result in response if eval_result["EvalDecision"] == "allowed"
|
|
56
|
+
]
|
|
57
|
+
|
|
58
|
+
if not has_access:
|
|
59
|
+
no_access.append(account)
|
|
60
|
+
|
|
61
|
+
if no_access:
|
|
62
|
+
account_ids = "', '".join(no_access)
|
|
63
|
+
raise PlatformException(
|
|
64
|
+
f"You do not have AWS Parameter Store write access to the following AWS accounts: '{account_ids}'"
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
def _check_for_existing_params(self, get_secret_name):
|
|
68
|
+
found_params = []
|
|
69
|
+
for _, environment in self.application.environments.items():
|
|
70
|
+
parameter_store: ParameterStore = self.parameter_store_provider(
|
|
71
|
+
environment.session.client("ssm")
|
|
72
|
+
)
|
|
73
|
+
try:
|
|
74
|
+
param = parameter_store.get_ssm_parameter_by_name(get_secret_name(environment.name))
|
|
75
|
+
if param:
|
|
76
|
+
found_params.append(environment.name)
|
|
77
|
+
except botocore.exceptions.ClientError as error:
|
|
78
|
+
if error.response["Error"]["Code"] == "ParameterNotFound":
|
|
79
|
+
pass
|
|
80
|
+
else:
|
|
81
|
+
raise PlatformException(error)
|
|
82
|
+
|
|
83
|
+
return found_params
|
|
84
|
+
|
|
85
|
+
def create(self, app_name, name, overwrite):
|
|
86
|
+
self.application = (
|
|
87
|
+
self.load_application_fn(app_name) if not self.application else self.application
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
accounts = {}
|
|
91
|
+
for _, environment in self.application.environments.items():
|
|
92
|
+
if environment.account_id not in accounts:
|
|
93
|
+
accounts[environment.account_id] = environment.session
|
|
94
|
+
|
|
95
|
+
self._check_ssm_write_access(accounts)
|
|
96
|
+
|
|
97
|
+
get_secret_name = lambda env: f"/platform/{app_name}/{env}/secrets/{name.upper()}"
|
|
98
|
+
found_params = self._check_for_existing_params(get_secret_name)
|
|
99
|
+
|
|
100
|
+
if overwrite is False and found_params:
|
|
101
|
+
envs = "', '".join(found_params)
|
|
102
|
+
raise PlatformException(
|
|
103
|
+
f"AWS Parameter Store secret '{name.upper()}' already exists for the following environments: '{envs}'. \nUse the --overwrite flag to replacing existing secret values."
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
values = {}
|
|
107
|
+
for _, environment in self.application.environments.items():
|
|
108
|
+
value = self.io.input(
|
|
109
|
+
f"Please enter value for secret '{name.upper()}' in environment '{environment.name}'",
|
|
110
|
+
hide_input=True,
|
|
111
|
+
)
|
|
112
|
+
values[environment.name] = value
|
|
113
|
+
|
|
114
|
+
for environment_name, secret_value in values.items():
|
|
115
|
+
|
|
116
|
+
environment = self.application.environments[environment_name]
|
|
117
|
+
parameter_store: ParameterStore = self.parameter_store_provider(
|
|
118
|
+
environment.session.client("ssm")
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
data_dict = dict(
|
|
122
|
+
Name=get_secret_name(environment.name),
|
|
123
|
+
Value=secret_value,
|
|
124
|
+
Overwrite=overwrite,
|
|
125
|
+
Type="SecureString",
|
|
126
|
+
Tags=[
|
|
127
|
+
{"Key": "application", "Value": app_name},
|
|
128
|
+
{"Key": "environment", "Value": environment.name},
|
|
129
|
+
{"Key": "managed-by", "Value": MANAGED_BY_PLATFORM},
|
|
130
|
+
],
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
# If in found params we are overwriting
|
|
134
|
+
if overwrite and environment_name in found_params:
|
|
135
|
+
data_dict["Overwrite"] = True
|
|
136
|
+
del data_dict["Tags"]
|
|
137
|
+
self.io.debug(
|
|
138
|
+
f"Creating AWS Parameter Store secret {get_secret_name(environment.name)} ..."
|
|
139
|
+
)
|
|
140
|
+
parameter_store.put_parameter(data_dict)
|
|
141
|
+
self.io.debug(
|
|
142
|
+
f"Successfully created AWS Parameter Store secret {get_secret_name(environment.name)}"
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
self.io.info(
|
|
146
|
+
"\nTo check or update your secrets, log into your AWS account via the Console and visit the Parameter Store https://eu-west-2.console.aws.amazon.com/systems-manager/parameters/\n"
|
|
147
|
+
"You can attach secrets into ECS container by adding them to the `secrets` section of your 'service-config.yml' file."
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
self.io.info(
|
|
151
|
+
message=f"```\nsecrets:\n\t{name.upper()}: /platform/${{PLATFORM_APPLICATION_NAME}}/${{PLATFORM_ENVIRONMENT_NAME}}/secrets/{name.upper()}\n```",
|
|
152
|
+
fg="cyan",
|
|
153
|
+
bold=True,
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
def __has_access(self, env, actions=["ssm:PutParameter"], access_type="write"):
|
|
157
|
+
sts_arn = env.session.client("sts").get_caller_identity()["Arn"]
|
|
158
|
+
role_name = sts_arn.split("/")[1]
|
|
159
|
+
|
|
160
|
+
role_arn = f"arn:aws:iam::{env.account_id}:role/aws-reserved/sso.amazonaws.com/eu-west-2/{role_name}"
|
|
161
|
+
response = env.session.client("iam").simulate_principal_policy(
|
|
162
|
+
PolicySourceArn=role_arn,
|
|
163
|
+
ActionNames=actions,
|
|
164
|
+
ContextEntries=[
|
|
165
|
+
{
|
|
166
|
+
"ContextKeyName": "aws:RequestedRegion",
|
|
167
|
+
"ContextKeyValues": [
|
|
168
|
+
"eu-west-2",
|
|
169
|
+
],
|
|
170
|
+
"ContextKeyType": "string",
|
|
171
|
+
}
|
|
172
|
+
],
|
|
173
|
+
)["EvaluationResults"]
|
|
174
|
+
has_access = [
|
|
175
|
+
env.account_id for eval_result in response if eval_result["EvalDecision"] == "allowed"
|
|
176
|
+
]
|
|
177
|
+
|
|
178
|
+
if not has_access:
|
|
179
|
+
raise PlatformException(
|
|
180
|
+
f"You do not have AWS Parameter Store {access_type} access to the following AWS accounts: '{env.account_id}'"
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
def copy(self, app_name: str, source: str, target: str):
|
|
184
|
+
"""Copy AWS Parameter Store secrets from one environment into
|
|
185
|
+
another."""
|
|
186
|
+
|
|
187
|
+
self.application = (
|
|
188
|
+
self.load_application_fn(app_name) if not self.application else self.application
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
if not self.application.environments.get(target, ""):
|
|
192
|
+
raise PlatformException(
|
|
193
|
+
f"Environment '{target}' not found for application '{app_name}'."
|
|
194
|
+
)
|
|
195
|
+
elif not self.application.environments.get(source, ""):
|
|
196
|
+
raise PlatformException(
|
|
197
|
+
f"Environment '{source}' not found for application '{app_name}'."
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
source_env = self.application.environments.get(source)
|
|
201
|
+
target_env = self.application.environments.get(target)
|
|
202
|
+
|
|
203
|
+
self.__has_access(source_env, actions=["ssm:GetParameter"], access_type="read")
|
|
204
|
+
self.__has_access(target_env)
|
|
205
|
+
|
|
206
|
+
prod_account_id = self.application.environments["prod"].account_id
|
|
207
|
+
if (
|
|
208
|
+
self.application.environments[source].account_id == prod_account_id
|
|
209
|
+
and self.application.environments[target].account_id != prod_account_id
|
|
210
|
+
):
|
|
211
|
+
raise PlatformException(
|
|
212
|
+
f"Cannot transfer secrets out from '{source}' in the prod account '{prod_account_id}'"
|
|
213
|
+
f" to '{target}' in '{self.application.environments[target].account_id}'"
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
parameter_store: ParameterStore = self.parameter_store_provider(
|
|
217
|
+
source_env.session.client("ssm"), with_model=True
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
target_parameter_store: ParameterStore = self.parameter_store_provider(
|
|
221
|
+
target_env.session.client("ssm"), with_model=True
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
copilot_secrets: list[Parameter] = parameter_store.get_ssm_parameters_by_path(
|
|
225
|
+
f"/copilot/{app_name}/{source}/secrets", add_tags=True
|
|
226
|
+
)
|
|
227
|
+
platform_secrets: list[Parameter] = parameter_store.get_ssm_parameters_by_path(
|
|
228
|
+
f"/platform/{app_name}/{source}/secrets", add_tags=True
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
secrets = copilot_secrets + platform_secrets
|
|
232
|
+
|
|
233
|
+
for secret in secrets:
|
|
234
|
+
secret.name = secret.name.replace(f"/{source}/", f"/{target}/")
|
|
235
|
+
|
|
236
|
+
if (
|
|
237
|
+
"AWS_" in secret.name
|
|
238
|
+
or secret.tags.get("managed-by", "") == MANAGED_BY_PLATFORM_TERRAFORM
|
|
239
|
+
or secret.tags.get("managed-by", "")
|
|
240
|
+
== "Terraform" # SSM params POSTGRES_APPLICATION_USER and POSTGRES_READ_ONLY_USER are tagged differently from the rest
|
|
241
|
+
):
|
|
242
|
+
message = f"Skipping AWS Parameter Store secret {secret.name}"
|
|
243
|
+
if secret.tags.get("managed-by", ""):
|
|
244
|
+
managed_by = secret.tags["managed-by"]
|
|
245
|
+
message += f" with managed-by: {managed_by}"
|
|
246
|
+
self.io.debug(message)
|
|
247
|
+
continue
|
|
248
|
+
|
|
249
|
+
secret.tags["application"] = app_name
|
|
250
|
+
secret.tags["environment"] = target
|
|
251
|
+
secret.tags["managed-by"] = MANAGED_BY_PLATFORM
|
|
252
|
+
secret.tags["copied-from"] = source
|
|
253
|
+
|
|
254
|
+
if secret.name.startswith("/copilot/"):
|
|
255
|
+
secret.tags["copilot-environment"] = target
|
|
256
|
+
|
|
257
|
+
data_dict = dict(
|
|
258
|
+
Name=secret.name,
|
|
259
|
+
Value=secret.value,
|
|
260
|
+
Overwrite=False,
|
|
261
|
+
Type=secret.type,
|
|
262
|
+
Description=f"Copied from {source} environment.",
|
|
263
|
+
Tags=secret.tags_to_list(),
|
|
264
|
+
)
|
|
265
|
+
self.io.debug(f"Creating AWS Parameter Store secret {secret.name} ...")
|
|
266
|
+
|
|
267
|
+
try:
|
|
268
|
+
target_parameter_store.put_parameter(data_dict)
|
|
269
|
+
secret_name = secret.name.split("/")[-1]
|
|
270
|
+
self.io.info(
|
|
271
|
+
f"Secret {secret_name} was successfully copied from the '{source} environment to '{target}'"
|
|
272
|
+
)
|
|
273
|
+
except botocore.exceptions.ClientError as e:
|
|
274
|
+
if e.response["Error"]["Code"] == "ParameterAlreadyExists":
|
|
275
|
+
self.io.warn(
|
|
276
|
+
f"""The "{secret.name.split("/")[-1]}" parameter already exists for the "{target}" environment.""",
|
|
277
|
+
)
|
|
278
|
+
else:
|
|
279
|
+
raise PlatformException(e)
|