dbt-platform-helper 12.0.2__py3-none-any.whl → 12.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbt-platform-helper might be problematic. Click here for more details.
- dbt_platform_helper/COMMANDS.md +7 -8
- dbt_platform_helper/commands/application.py +1 -0
- dbt_platform_helper/commands/codebase.py +63 -228
- dbt_platform_helper/commands/conduit.py +34 -409
- dbt_platform_helper/commands/secrets.py +1 -1
- dbt_platform_helper/constants.py +12 -1
- dbt_platform_helper/domain/codebase.py +222 -0
- dbt_platform_helper/domain/conduit.py +172 -0
- dbt_platform_helper/domain/database_copy.py +1 -1
- dbt_platform_helper/exceptions.py +61 -0
- dbt_platform_helper/providers/__init__.py +0 -0
- dbt_platform_helper/providers/cloudformation.py +105 -0
- dbt_platform_helper/providers/copilot.py +144 -0
- dbt_platform_helper/providers/ecs.py +78 -0
- dbt_platform_helper/providers/secrets.py +85 -0
- dbt_platform_helper/templates/addons/svc/prometheus-policy.yml +2 -0
- dbt_platform_helper/templates/pipelines/environments/manifest.yml +0 -1
- dbt_platform_helper/utils/application.py +1 -4
- dbt_platform_helper/utils/aws.py +132 -0
- dbt_platform_helper/utils/files.py +70 -0
- dbt_platform_helper/utils/git.py +13 -0
- dbt_platform_helper/utils/validation.py +121 -3
- {dbt_platform_helper-12.0.2.dist-info → dbt_platform_helper-12.2.0.dist-info}/METADATA +2 -1
- {dbt_platform_helper-12.0.2.dist-info → dbt_platform_helper-12.2.0.dist-info}/RECORD +27 -29
- {dbt_platform_helper-12.0.2.dist-info → dbt_platform_helper-12.2.0.dist-info}/WHEEL +1 -1
- dbt_platform_helper/templates/env/overrides/.gitignore +0 -12
- dbt_platform_helper/templates/env/overrides/README.md +0 -11
- dbt_platform_helper/templates/env/overrides/bin/override.ts +0 -9
- dbt_platform_helper/templates/env/overrides/cdk.json +0 -20
- dbt_platform_helper/templates/env/overrides/log_resource_policy.json +0 -68
- dbt_platform_helper/templates/env/overrides/package-lock.json +0 -4307
- dbt_platform_helper/templates/env/overrides/package.json +0 -27
- dbt_platform_helper/templates/env/overrides/stack.ts +0 -51
- dbt_platform_helper/templates/env/overrides/tsconfig.json +0 -32
- {dbt_platform_helper-12.0.2.dist-info → dbt_platform_helper-12.2.0.dist-info}/LICENSE +0 -0
- {dbt_platform_helper-12.0.2.dist-info → dbt_platform_helper-12.2.0.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import time
|
|
3
|
+
|
|
4
|
+
from botocore.exceptions import ClientError
|
|
5
|
+
|
|
6
|
+
from dbt_platform_helper.constants import CONDUIT_DOCKER_IMAGE_LOCATION
|
|
7
|
+
from dbt_platform_helper.exceptions import CreateTaskTimeoutError
|
|
8
|
+
from dbt_platform_helper.providers.ecs import get_ecs_task_arns
|
|
9
|
+
from dbt_platform_helper.providers.secrets import get_connection_secret_arn
|
|
10
|
+
from dbt_platform_helper.providers.secrets import (
|
|
11
|
+
get_postgres_connection_data_updated_with_master_secret,
|
|
12
|
+
)
|
|
13
|
+
from dbt_platform_helper.utils.application import Application
|
|
14
|
+
from dbt_platform_helper.utils.messages import abort_with_error
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def create_addon_client_task(
|
|
18
|
+
iam_client,
|
|
19
|
+
ssm_client,
|
|
20
|
+
secrets_manager_client,
|
|
21
|
+
subprocess,
|
|
22
|
+
application: Application,
|
|
23
|
+
env: str,
|
|
24
|
+
addon_type: str,
|
|
25
|
+
addon_name: str,
|
|
26
|
+
task_name: str,
|
|
27
|
+
access: str,
|
|
28
|
+
):
|
|
29
|
+
secret_name = f"/copilot/{application.name}/{env}/secrets/{_normalise_secret_name(addon_name)}"
|
|
30
|
+
|
|
31
|
+
if addon_type == "postgres":
|
|
32
|
+
if access == "read":
|
|
33
|
+
secret_name += "_READ_ONLY_USER"
|
|
34
|
+
elif access == "write":
|
|
35
|
+
secret_name += "_APPLICATION_USER"
|
|
36
|
+
elif access == "admin":
|
|
37
|
+
create_postgres_admin_task(
|
|
38
|
+
ssm_client,
|
|
39
|
+
secrets_manager_client,
|
|
40
|
+
subprocess,
|
|
41
|
+
application,
|
|
42
|
+
addon_name,
|
|
43
|
+
addon_type,
|
|
44
|
+
env,
|
|
45
|
+
secret_name,
|
|
46
|
+
task_name,
|
|
47
|
+
)
|
|
48
|
+
return
|
|
49
|
+
elif addon_type == "redis" or addon_type == "opensearch":
|
|
50
|
+
secret_name += "_ENDPOINT"
|
|
51
|
+
|
|
52
|
+
role_name = f"{addon_name}-{application.name}-{env}-conduitEcsTask"
|
|
53
|
+
|
|
54
|
+
try:
|
|
55
|
+
iam_client.get_role(RoleName=role_name)
|
|
56
|
+
execution_role = f"--execution-role {role_name} "
|
|
57
|
+
except ClientError as ex:
|
|
58
|
+
execution_role = ""
|
|
59
|
+
# We cannot check for botocore.errorfactory.NoSuchEntityException as botocore generates that class on the fly as part of errorfactory.
|
|
60
|
+
# factory. Checking the error code is the recommended way of handling these exceptions.
|
|
61
|
+
if ex.response.get("Error", {}).get("Code", None) != "NoSuchEntity":
|
|
62
|
+
# TODO Raise an exception to be caught at the command layer
|
|
63
|
+
abort_with_error(
|
|
64
|
+
f"cannot obtain Role {role_name}: {ex.response.get('Error', {}).get('Message', '')}"
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
subprocess.call(
|
|
68
|
+
f"copilot task run --app {application.name} --env {env} "
|
|
69
|
+
f"--task-group-name {task_name} "
|
|
70
|
+
f"{execution_role}"
|
|
71
|
+
f"--image {CONDUIT_DOCKER_IMAGE_LOCATION}:{addon_type} "
|
|
72
|
+
f"--secrets CONNECTION_SECRET={get_connection_secret_arn(ssm_client,secrets_manager_client, secret_name)} "
|
|
73
|
+
"--platform-os linux "
|
|
74
|
+
"--platform-arch arm64",
|
|
75
|
+
shell=True,
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def create_postgres_admin_task(
|
|
80
|
+
ssm_client,
|
|
81
|
+
secrets_manager_client,
|
|
82
|
+
subprocess,
|
|
83
|
+
app: Application,
|
|
84
|
+
addon_name: str,
|
|
85
|
+
addon_type: str,
|
|
86
|
+
env: str,
|
|
87
|
+
secret_name: str,
|
|
88
|
+
task_name: str,
|
|
89
|
+
):
|
|
90
|
+
read_only_secret_name = secret_name + "_READ_ONLY_USER"
|
|
91
|
+
master_secret_name = (
|
|
92
|
+
f"/copilot/{app.name}/{env}/secrets/{_normalise_secret_name(addon_name)}_RDS_MASTER_ARN"
|
|
93
|
+
)
|
|
94
|
+
master_secret_arn = ssm_client.get_parameter(Name=master_secret_name, WithDecryption=True)[
|
|
95
|
+
"Parameter"
|
|
96
|
+
]["Value"]
|
|
97
|
+
connection_string = json.dumps(
|
|
98
|
+
get_postgres_connection_data_updated_with_master_secret(
|
|
99
|
+
ssm_client, secrets_manager_client, read_only_secret_name, master_secret_arn
|
|
100
|
+
)
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
subprocess.call(
|
|
104
|
+
f"copilot task run --app {app.name} --env {env} "
|
|
105
|
+
f"--task-group-name {task_name} "
|
|
106
|
+
f"--image {CONDUIT_DOCKER_IMAGE_LOCATION}:{addon_type} "
|
|
107
|
+
f"--env-vars CONNECTION_SECRET='{connection_string}' "
|
|
108
|
+
"--platform-os linux "
|
|
109
|
+
"--platform-arch arm64",
|
|
110
|
+
shell=True,
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def connect_to_addon_client_task(
|
|
115
|
+
ecs_client,
|
|
116
|
+
subprocess,
|
|
117
|
+
application_name,
|
|
118
|
+
env,
|
|
119
|
+
cluster_arn,
|
|
120
|
+
task_name,
|
|
121
|
+
addon_client_is_running_fn=get_ecs_task_arns,
|
|
122
|
+
):
|
|
123
|
+
running = False
|
|
124
|
+
tries = 0
|
|
125
|
+
while tries < 15 and not running:
|
|
126
|
+
tries += 1
|
|
127
|
+
if addon_client_is_running_fn(ecs_client, cluster_arn, task_name):
|
|
128
|
+
subprocess.call(
|
|
129
|
+
"copilot task exec "
|
|
130
|
+
f"--app {application_name} --env {env} "
|
|
131
|
+
f"--name {task_name} "
|
|
132
|
+
f"--command bash",
|
|
133
|
+
shell=True,
|
|
134
|
+
)
|
|
135
|
+
running = True
|
|
136
|
+
|
|
137
|
+
time.sleep(1)
|
|
138
|
+
|
|
139
|
+
if not running:
|
|
140
|
+
raise CreateTaskTimeoutError
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def _normalise_secret_name(addon_name: str) -> str:
|
|
144
|
+
return addon_name.replace("-", "_").upper()
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import random
|
|
2
|
+
import string
|
|
3
|
+
import time
|
|
4
|
+
from typing import List
|
|
5
|
+
|
|
6
|
+
from dbt_platform_helper.exceptions import ECSAgentNotRunning
|
|
7
|
+
from dbt_platform_helper.exceptions import NoClusterError
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def get_cluster_arn(ecs_client, application_name: str, env: str) -> str:
|
|
11
|
+
for cluster_arn in ecs_client.list_clusters()["clusterArns"]:
|
|
12
|
+
tags_response = ecs_client.list_tags_for_resource(resourceArn=cluster_arn)
|
|
13
|
+
tags = tags_response["tags"]
|
|
14
|
+
|
|
15
|
+
app_key_found = False
|
|
16
|
+
env_key_found = False
|
|
17
|
+
cluster_key_found = False
|
|
18
|
+
|
|
19
|
+
for tag in tags:
|
|
20
|
+
if tag["key"] == "copilot-application" and tag["value"] == application_name:
|
|
21
|
+
app_key_found = True
|
|
22
|
+
if tag["key"] == "copilot-environment" and tag["value"] == env:
|
|
23
|
+
env_key_found = True
|
|
24
|
+
if tag["key"] == "aws:cloudformation:logical-id" and tag["value"] == "Cluster":
|
|
25
|
+
cluster_key_found = True
|
|
26
|
+
|
|
27
|
+
if app_key_found and env_key_found and cluster_key_found:
|
|
28
|
+
return cluster_arn
|
|
29
|
+
|
|
30
|
+
raise NoClusterError
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def get_or_create_task_name(
|
|
34
|
+
ssm_client, application_name: str, env: str, addon_name: str, parameter_name: str
|
|
35
|
+
) -> str:
|
|
36
|
+
try:
|
|
37
|
+
return ssm_client.get_parameter(Name=parameter_name)["Parameter"]["Value"]
|
|
38
|
+
except ssm_client.exceptions.ParameterNotFound:
|
|
39
|
+
random_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=12))
|
|
40
|
+
return f"conduit-{application_name}-{env}-{addon_name}-{random_id}"
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def get_ecs_task_arns(ecs_client, cluster_arn: str, task_name: str):
|
|
44
|
+
|
|
45
|
+
tasks = ecs_client.list_tasks(
|
|
46
|
+
cluster=cluster_arn,
|
|
47
|
+
desiredStatus="RUNNING",
|
|
48
|
+
family=f"copilot-{task_name}",
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
if not tasks["taskArns"]:
|
|
52
|
+
return []
|
|
53
|
+
|
|
54
|
+
return tasks["taskArns"]
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def ecs_exec_is_available(ecs_client, cluster_arn: str, task_arns: List[str]):
|
|
58
|
+
|
|
59
|
+
current_attemps = 0
|
|
60
|
+
execute_command_agent_status = ""
|
|
61
|
+
|
|
62
|
+
while execute_command_agent_status != "RUNNING" and current_attemps < 25:
|
|
63
|
+
|
|
64
|
+
current_attemps += 1
|
|
65
|
+
|
|
66
|
+
task_details = ecs_client.describe_tasks(cluster=cluster_arn, tasks=task_arns)
|
|
67
|
+
|
|
68
|
+
managed_agents = task_details["tasks"][0]["containers"][0]["managedAgents"]
|
|
69
|
+
execute_command_agent_status = [
|
|
70
|
+
agent["lastStatus"]
|
|
71
|
+
for agent in managed_agents
|
|
72
|
+
if agent["name"] == "ExecuteCommandAgent"
|
|
73
|
+
][0]
|
|
74
|
+
|
|
75
|
+
time.sleep(1)
|
|
76
|
+
|
|
77
|
+
if execute_command_agent_status != "RUNNING":
|
|
78
|
+
raise ECSAgentNotRunning
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import urllib
|
|
3
|
+
|
|
4
|
+
from dbt_platform_helper.constants import CONDUIT_ADDON_TYPES
|
|
5
|
+
from dbt_platform_helper.exceptions import AddonNotFoundError
|
|
6
|
+
from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError
|
|
7
|
+
from dbt_platform_helper.exceptions import InvalidAddonTypeError
|
|
8
|
+
from dbt_platform_helper.exceptions import ParameterNotFoundError
|
|
9
|
+
from dbt_platform_helper.exceptions import SecretNotFoundError
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def get_postgres_connection_data_updated_with_master_secret(
|
|
13
|
+
ssm_client, secrets_manager_client, parameter_name, secret_arn
|
|
14
|
+
):
|
|
15
|
+
response = ssm_client.get_parameter(Name=parameter_name, WithDecryption=True)
|
|
16
|
+
parameter_value = response["Parameter"]["Value"]
|
|
17
|
+
|
|
18
|
+
parameter_data = json.loads(parameter_value)
|
|
19
|
+
|
|
20
|
+
secret_response = secrets_manager_client.get_secret_value(SecretId=secret_arn)
|
|
21
|
+
secret_value = json.loads(secret_response["SecretString"])
|
|
22
|
+
|
|
23
|
+
parameter_data["username"] = urllib.parse.quote(secret_value["username"])
|
|
24
|
+
parameter_data["password"] = urllib.parse.quote(secret_value["password"])
|
|
25
|
+
|
|
26
|
+
return parameter_data
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def get_connection_secret_arn(ssm_client, secrets_manager_client, secret_name: str) -> str:
|
|
30
|
+
|
|
31
|
+
try:
|
|
32
|
+
return ssm_client.get_parameter(Name=secret_name, WithDecryption=False)["Parameter"]["ARN"]
|
|
33
|
+
except ssm_client.exceptions.ParameterNotFound:
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
try:
|
|
37
|
+
return secrets_manager_client.describe_secret(SecretId=secret_name)["ARN"]
|
|
38
|
+
except secrets_manager_client.exceptions.ResourceNotFoundException:
|
|
39
|
+
pass
|
|
40
|
+
|
|
41
|
+
raise SecretNotFoundError(secret_name)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def get_addon_type(ssm_client, application_name: str, env: str, addon_name: str) -> str:
|
|
45
|
+
addon_type = None
|
|
46
|
+
try:
|
|
47
|
+
addon_config = json.loads(
|
|
48
|
+
ssm_client.get_parameter(
|
|
49
|
+
Name=f"/copilot/applications/{application_name}/environments/{env}/addons"
|
|
50
|
+
)["Parameter"]["Value"]
|
|
51
|
+
)
|
|
52
|
+
except ssm_client.exceptions.ParameterNotFound:
|
|
53
|
+
raise ParameterNotFoundError
|
|
54
|
+
|
|
55
|
+
if addon_name not in addon_config.keys():
|
|
56
|
+
raise AddonNotFoundError
|
|
57
|
+
|
|
58
|
+
for name, config in addon_config.items():
|
|
59
|
+
if name == addon_name:
|
|
60
|
+
if not config.get("type"):
|
|
61
|
+
raise AddonTypeMissingFromConfigError()
|
|
62
|
+
addon_type = config["type"]
|
|
63
|
+
|
|
64
|
+
if not addon_type or addon_type not in CONDUIT_ADDON_TYPES:
|
|
65
|
+
raise InvalidAddonTypeError(addon_type)
|
|
66
|
+
|
|
67
|
+
if "postgres" in addon_type:
|
|
68
|
+
addon_type = "postgres"
|
|
69
|
+
|
|
70
|
+
return addon_type
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def get_parameter_name(
|
|
74
|
+
application_name: str, env: str, addon_type: str, addon_name: str, access: str
|
|
75
|
+
) -> str:
|
|
76
|
+
if addon_type == "postgres":
|
|
77
|
+
return f"/copilot/{application_name}/{env}/conduits/{_normalise_secret_name(addon_name)}_{access.upper()}"
|
|
78
|
+
elif addon_type == "redis" or addon_type == "opensearch":
|
|
79
|
+
return f"/copilot/{application_name}/{env}/conduits/{_normalise_secret_name(addon_name)}_ENDPOINT"
|
|
80
|
+
else:
|
|
81
|
+
return f"/copilot/{application_name}/{env}/conduits/{_normalise_secret_name(addon_name)}"
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _normalise_secret_name(addon_name: str) -> str:
|
|
85
|
+
return addon_name.replace("-", "_").upper()
|
|
@@ -16,7 +16,6 @@ source:
|
|
|
16
16
|
provider: GitHub
|
|
17
17
|
# Additional properties that further specify the location of the artifacts.
|
|
18
18
|
properties:
|
|
19
|
-
# Todo: Allow for overriding this, but without risking deploying a branch to higher environments
|
|
20
19
|
branch: main
|
|
21
20
|
repository: https://github.com/{{ git_repo }}
|
|
22
21
|
connection_name: {{ app_name }}
|
|
@@ -8,6 +8,7 @@ import yaml
|
|
|
8
8
|
from boto3 import Session
|
|
9
9
|
from yaml.parser import ParserError
|
|
10
10
|
|
|
11
|
+
from dbt_platform_helper.exceptions import ApplicationNotFoundError
|
|
11
12
|
from dbt_platform_helper.utils.aws import get_aws_session_or_abort
|
|
12
13
|
from dbt_platform_helper.utils.aws import get_profile_name_from_account_id
|
|
13
14
|
from dbt_platform_helper.utils.aws import get_ssm_secrets
|
|
@@ -67,10 +68,6 @@ class Application:
|
|
|
67
68
|
return str(self) == str(other)
|
|
68
69
|
|
|
69
70
|
|
|
70
|
-
class ApplicationNotFoundError(Exception):
|
|
71
|
-
pass
|
|
72
|
-
|
|
73
|
-
|
|
74
71
|
def load_application(app: str = None, default_session: Session = None) -> Application:
|
|
75
72
|
application = Application(app if app else get_application_name())
|
|
76
73
|
current_session = default_session if default_session else get_aws_session_or_abort()
|
dbt_platform_helper/utils/aws.py
CHANGED
|
@@ -13,7 +13,12 @@ import yaml
|
|
|
13
13
|
from boto3 import Session
|
|
14
14
|
|
|
15
15
|
from dbt_platform_helper.exceptions import AWSException
|
|
16
|
+
from dbt_platform_helper.exceptions import CopilotCodebaseNotFoundError
|
|
17
|
+
from dbt_platform_helper.exceptions import ImageNotFoundError
|
|
16
18
|
from dbt_platform_helper.exceptions import ValidationException
|
|
19
|
+
from dbt_platform_helper.utils.files import cache_refresh_required
|
|
20
|
+
from dbt_platform_helper.utils.files import read_supported_versions_from_cache
|
|
21
|
+
from dbt_platform_helper.utils.files import write_to_cache
|
|
17
22
|
|
|
18
23
|
SSM_BASE_PATH = "/copilot/{app}/{env}/secrets/"
|
|
19
24
|
SSM_PATH = "/copilot/{app}/{env}/secrets/{name}"
|
|
@@ -351,6 +356,59 @@ def get_postgres_connection_data_updated_with_master_secret(session, parameter_n
|
|
|
351
356
|
return parameter_data
|
|
352
357
|
|
|
353
358
|
|
|
359
|
+
def get_supported_redis_versions():
|
|
360
|
+
|
|
361
|
+
if cache_refresh_required("redis"):
|
|
362
|
+
|
|
363
|
+
supported_versions = []
|
|
364
|
+
|
|
365
|
+
session = get_aws_session_or_abort()
|
|
366
|
+
elasticache_client = session.client("elasticache")
|
|
367
|
+
|
|
368
|
+
supported_versions_response = elasticache_client.describe_cache_engine_versions(
|
|
369
|
+
Engine="redis"
|
|
370
|
+
)
|
|
371
|
+
|
|
372
|
+
supported_versions = [
|
|
373
|
+
version["EngineVersion"]
|
|
374
|
+
for version in supported_versions_response["CacheEngineVersions"]
|
|
375
|
+
]
|
|
376
|
+
|
|
377
|
+
write_to_cache("redis", supported_versions)
|
|
378
|
+
|
|
379
|
+
return supported_versions
|
|
380
|
+
|
|
381
|
+
else:
|
|
382
|
+
return read_supported_versions_from_cache("redis")
|
|
383
|
+
|
|
384
|
+
|
|
385
|
+
def get_supported_opensearch_versions():
|
|
386
|
+
|
|
387
|
+
if cache_refresh_required("opensearch"):
|
|
388
|
+
|
|
389
|
+
supported_versions = []
|
|
390
|
+
|
|
391
|
+
session = get_aws_session_or_abort()
|
|
392
|
+
opensearch_client = session.client("opensearch")
|
|
393
|
+
|
|
394
|
+
response = opensearch_client.list_versions()
|
|
395
|
+
all_versions = response["Versions"]
|
|
396
|
+
|
|
397
|
+
opensearch_versions = [
|
|
398
|
+
version for version in all_versions if not version.startswith("Elasticsearch_")
|
|
399
|
+
]
|
|
400
|
+
supported_versions = [
|
|
401
|
+
version.removeprefix("OpenSearch_") for version in opensearch_versions
|
|
402
|
+
]
|
|
403
|
+
|
|
404
|
+
write_to_cache("opensearch", supported_versions)
|
|
405
|
+
|
|
406
|
+
return supported_versions
|
|
407
|
+
|
|
408
|
+
else:
|
|
409
|
+
return read_supported_versions_from_cache("opensearch")
|
|
410
|
+
|
|
411
|
+
|
|
354
412
|
def get_connection_string(
|
|
355
413
|
session: Session,
|
|
356
414
|
app: str,
|
|
@@ -420,3 +478,77 @@ def get_vpc_info_by_name(session: Session, app: str, env: str, vpc_name: str) ->
|
|
|
420
478
|
raise AWSException(f"No matching security groups found in vpc '{vpc_name}'")
|
|
421
479
|
|
|
422
480
|
return Vpc(subnets, sec_groups)
|
|
481
|
+
|
|
482
|
+
|
|
483
|
+
def start_build_extraction(codebuild_client, build_options):
|
|
484
|
+
response = codebuild_client.start_build(**build_options)
|
|
485
|
+
return response["build"]["arn"]
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
def check_codebase_exists(session: Session, application, codebase: str):
|
|
489
|
+
try:
|
|
490
|
+
ssm_client = session.client("ssm")
|
|
491
|
+
ssm_client.get_parameter(
|
|
492
|
+
Name=f"/copilot/applications/{application.name}/codebases/{codebase}"
|
|
493
|
+
)["Parameter"]["Value"]
|
|
494
|
+
except (
|
|
495
|
+
KeyError,
|
|
496
|
+
ValueError,
|
|
497
|
+
ssm_client.exceptions.ParameterNotFound,
|
|
498
|
+
):
|
|
499
|
+
raise CopilotCodebaseNotFoundError
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
def check_image_exists(session, application, codebase, commit):
|
|
503
|
+
ecr_client = session.client("ecr")
|
|
504
|
+
try:
|
|
505
|
+
ecr_client.describe_images(
|
|
506
|
+
repositoryName=f"{application.name}/{codebase}",
|
|
507
|
+
imageIds=[{"imageTag": f"commit-{commit}"}],
|
|
508
|
+
)
|
|
509
|
+
except (
|
|
510
|
+
ecr_client.exceptions.RepositoryNotFoundException,
|
|
511
|
+
ecr_client.exceptions.ImageNotFoundException,
|
|
512
|
+
):
|
|
513
|
+
raise ImageNotFoundError
|
|
514
|
+
|
|
515
|
+
|
|
516
|
+
def get_build_url_from_arn(build_arn: str) -> str:
|
|
517
|
+
_, _, _, region, account_id, project_name, build_id = build_arn.split(":")
|
|
518
|
+
project_name = project_name.removeprefix("build/")
|
|
519
|
+
return (
|
|
520
|
+
f"https://eu-west-2.console.aws.amazon.com/codesuite/codebuild/{account_id}/projects/"
|
|
521
|
+
f"{project_name}/build/{project_name}%3A{build_id}"
|
|
522
|
+
)
|
|
523
|
+
|
|
524
|
+
|
|
525
|
+
def list_latest_images(ecr_client, ecr_repository_name, codebase_repository, echo_fn):
|
|
526
|
+
paginator = ecr_client.get_paginator("describe_images")
|
|
527
|
+
describe_images_response_iterator = paginator.paginate(
|
|
528
|
+
repositoryName=ecr_repository_name,
|
|
529
|
+
filter={"tagStatus": "TAGGED"},
|
|
530
|
+
)
|
|
531
|
+
images = []
|
|
532
|
+
for page in describe_images_response_iterator:
|
|
533
|
+
images += page["imageDetails"]
|
|
534
|
+
|
|
535
|
+
sorted_images = sorted(
|
|
536
|
+
images,
|
|
537
|
+
key=lambda i: i["imagePushedAt"],
|
|
538
|
+
reverse=True,
|
|
539
|
+
)
|
|
540
|
+
|
|
541
|
+
MAX_RESULTS = 20
|
|
542
|
+
|
|
543
|
+
for image in sorted_images[:MAX_RESULTS]:
|
|
544
|
+
try:
|
|
545
|
+
commit_tag = next(t for t in image["imageTags"] if t.startswith("commit-"))
|
|
546
|
+
if not commit_tag:
|
|
547
|
+
continue
|
|
548
|
+
|
|
549
|
+
commit_hash = commit_tag.replace("commit-", "")
|
|
550
|
+
echo_fn(
|
|
551
|
+
f" - https://github.com/{codebase_repository}/commit/{commit_hash} - published: {image['imagePushedAt']}"
|
|
552
|
+
)
|
|
553
|
+
except StopIteration:
|
|
554
|
+
continue
|
|
@@ -1,4 +1,6 @@
|
|
|
1
|
+
import os
|
|
1
2
|
from copy import deepcopy
|
|
3
|
+
from datetime import datetime
|
|
2
4
|
from os import makedirs
|
|
3
5
|
from pathlib import Path
|
|
4
6
|
|
|
@@ -7,6 +9,8 @@ import yaml
|
|
|
7
9
|
from jinja2 import Environment
|
|
8
10
|
from jinja2 import FileSystemLoader
|
|
9
11
|
|
|
12
|
+
from dbt_platform_helper.constants import PLATFORM_HELPER_CACHE_FILE
|
|
13
|
+
|
|
10
14
|
|
|
11
15
|
def to_yaml(value):
|
|
12
16
|
return yaml.dump(value, sort_keys=False)
|
|
@@ -102,3 +106,69 @@ def apply_environment_defaults(config):
|
|
|
102
106
|
enriched_config["environments"] = defaulted_envs
|
|
103
107
|
|
|
104
108
|
return enriched_config
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def read_supported_versions_from_cache(resource_name):
|
|
112
|
+
|
|
113
|
+
platform_helper_config = read_file_as_yaml(PLATFORM_HELPER_CACHE_FILE)
|
|
114
|
+
|
|
115
|
+
return platform_helper_config.get(resource_name).get("versions")
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def write_to_cache(resource_name, supported_versions):
|
|
119
|
+
|
|
120
|
+
platform_helper_config = {}
|
|
121
|
+
|
|
122
|
+
if os.path.exists(PLATFORM_HELPER_CACHE_FILE):
|
|
123
|
+
platform_helper_config = read_file_as_yaml(PLATFORM_HELPER_CACHE_FILE)
|
|
124
|
+
|
|
125
|
+
cache_dict = {
|
|
126
|
+
resource_name: {
|
|
127
|
+
"versions": supported_versions,
|
|
128
|
+
"date-retrieved": datetime.now().strftime("%d-%m-%y %H:%M:%S"),
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
platform_helper_config.update(cache_dict)
|
|
133
|
+
|
|
134
|
+
with open(PLATFORM_HELPER_CACHE_FILE, "w") as file:
|
|
135
|
+
file.write("# [!] This file is autogenerated via the platform-helper. Do not edit.\n")
|
|
136
|
+
yaml.dump(platform_helper_config, file)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def cache_refresh_required(resource_name) -> bool:
|
|
140
|
+
"""
|
|
141
|
+
Checks if the platform-helper should reach out to AWS to 'refresh' its
|
|
142
|
+
cached values.
|
|
143
|
+
|
|
144
|
+
An API call is needed if any of the following conditions are met:
|
|
145
|
+
1. No cache file (.platform-helper-config.yml) exists.
|
|
146
|
+
2. The resource name (e.g. redis, opensearch) does not exist within the cache file.
|
|
147
|
+
3. The date-retrieved value of the cached data is > than a time interval. In this case 1 day.
|
|
148
|
+
"""
|
|
149
|
+
|
|
150
|
+
if not os.path.exists(PLATFORM_HELPER_CACHE_FILE):
|
|
151
|
+
return True
|
|
152
|
+
|
|
153
|
+
platform_helper_config = read_file_as_yaml(PLATFORM_HELPER_CACHE_FILE)
|
|
154
|
+
|
|
155
|
+
if platform_helper_config.get(resource_name):
|
|
156
|
+
return check_if_cached_datetime_is_greater_than_interval(
|
|
157
|
+
platform_helper_config[resource_name].get("date-retrieved"), 1
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
return True
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def check_if_cached_datetime_is_greater_than_interval(date_retrieved, interval_in_days):
|
|
164
|
+
|
|
165
|
+
current_datetime = datetime.now()
|
|
166
|
+
cached_datetime = datetime.strptime(date_retrieved, "%d-%m-%y %H:%M:%S")
|
|
167
|
+
delta = current_datetime - cached_datetime
|
|
168
|
+
|
|
169
|
+
return delta.days > interval_in_days
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def read_file_as_yaml(file_name):
|
|
173
|
+
|
|
174
|
+
return yaml.safe_load(Path(file_name).read_text())
|
dbt_platform_helper/utils/git.py
CHANGED
|
@@ -2,6 +2,10 @@ import re
|
|
|
2
2
|
import subprocess
|
|
3
3
|
|
|
4
4
|
|
|
5
|
+
class CommitNotFoundError(Exception):
|
|
6
|
+
pass
|
|
7
|
+
|
|
8
|
+
|
|
5
9
|
def git_remote():
|
|
6
10
|
git_repo = subprocess.run(
|
|
7
11
|
["git", "remote", "get-url", "origin"], capture_output=True, text=True
|
|
@@ -14,3 +18,12 @@ def extract_repository_name(repository_url):
|
|
|
14
18
|
return
|
|
15
19
|
|
|
16
20
|
return re.search(r"([^/:]*/[^/]*)\.git", repository_url).group(1)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def check_if_commit_exists(commit):
|
|
24
|
+
branches_containing_commit = subprocess.run(
|
|
25
|
+
["git", "branch", "-r", "--contains", f"{commit}"], capture_output=True, text=True
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
if branches_containing_commit.stderr:
|
|
29
|
+
raise CommitNotFoundError()
|