dbt-platform-helper 12.4.1__py3-none-any.whl → 12.5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbt-platform-helper might be problematic. Click here for more details.
- dbt_platform_helper/COMMANDS.md +1 -6
- dbt_platform_helper/commands/config.py +2 -2
- dbt_platform_helper/commands/copilot.py +51 -30
- dbt_platform_helper/commands/environment.py +25 -185
- dbt_platform_helper/commands/pipeline.py +10 -173
- dbt_platform_helper/constants.py +10 -0
- dbt_platform_helper/domain/codebase.py +8 -4
- dbt_platform_helper/domain/config_validator.py +242 -0
- dbt_platform_helper/domain/copilot_environment.py +204 -0
- dbt_platform_helper/domain/database_copy.py +16 -12
- dbt_platform_helper/domain/maintenance_page.py +44 -20
- dbt_platform_helper/domain/pipelines.py +213 -0
- dbt_platform_helper/domain/terraform_environment.py +86 -0
- dbt_platform_helper/domain/test_platform_terraform_manifest_generator.py +100 -0
- dbt_platform_helper/jinja2_tags.py +1 -1
- dbt_platform_helper/providers/cache.py +14 -21
- dbt_platform_helper/providers/cloudformation.py +0 -1
- dbt_platform_helper/providers/config.py +100 -0
- dbt_platform_helper/providers/copilot.py +2 -0
- dbt_platform_helper/providers/files.py +26 -0
- dbt_platform_helper/providers/opensearch.py +36 -0
- dbt_platform_helper/providers/platform_config_schema.py +589 -527
- dbt_platform_helper/providers/redis.py +34 -0
- dbt_platform_helper/providers/vpc.py +57 -0
- dbt_platform_helper/providers/yaml_file.py +72 -0
- dbt_platform_helper/templates/addons/svc/s3-cross-account-policy.yml +67 -0
- dbt_platform_helper/utils/application.py +32 -34
- dbt_platform_helper/utils/aws.py +1 -107
- dbt_platform_helper/utils/files.py +8 -59
- dbt_platform_helper/utils/platform_config.py +0 -7
- dbt_platform_helper/utils/template.py +10 -0
- dbt_platform_helper/utils/validation.py +5 -327
- dbt_platform_helper/utils/versioning.py +12 -0
- {dbt_platform_helper-12.4.1.dist-info → dbt_platform_helper-12.5.1.dist-info}/METADATA +2 -2
- {dbt_platform_helper-12.4.1.dist-info → dbt_platform_helper-12.5.1.dist-info}/RECORD +38 -26
- {dbt_platform_helper-12.4.1.dist-info → dbt_platform_helper-12.5.1.dist-info}/WHEEL +1 -1
- {dbt_platform_helper-12.4.1.dist-info → dbt_platform_helper-12.5.1.dist-info}/LICENSE +0 -0
- {dbt_platform_helper-12.4.1.dist-info → dbt_platform_helper-12.5.1.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from dbt_platform_helper.providers.cache import CacheProvider
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class RedisProvider:
|
|
5
|
+
def __init__(self, elasticache_client):
|
|
6
|
+
self.elasticache_client = elasticache_client
|
|
7
|
+
|
|
8
|
+
def get_supported_redis_versions(self):
|
|
9
|
+
|
|
10
|
+
cache_provider = self.__get_cache_provider()
|
|
11
|
+
|
|
12
|
+
if cache_provider.cache_refresh_required("redis"):
|
|
13
|
+
|
|
14
|
+
supported_versions_response = self.elasticache_client.describe_cache_engine_versions(
|
|
15
|
+
Engine="redis"
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
supported_versions = [
|
|
19
|
+
version["EngineVersion"]
|
|
20
|
+
for version in supported_versions_response["CacheEngineVersions"]
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
cache_provider.update_cache("redis", supported_versions)
|
|
24
|
+
|
|
25
|
+
return supported_versions
|
|
26
|
+
|
|
27
|
+
else:
|
|
28
|
+
return cache_provider.read_supported_versions_from_cache("redis")
|
|
29
|
+
|
|
30
|
+
# TODO - cache provider instantiated here rather than via dependancy injection since it will likely only be used in the get_supported_redis_versions method.
|
|
31
|
+
# If another method is added which needs a CacheProvider, it should be injected into the constructor instead.
|
|
32
|
+
@staticmethod
|
|
33
|
+
def __get_cache_provider():
|
|
34
|
+
return CacheProvider()
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
|
|
3
|
+
from dbt_platform_helper.providers.aws import AWSException
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
@dataclass
|
|
7
|
+
class Vpc:
|
|
8
|
+
subnets: list[str]
|
|
9
|
+
security_groups: list[str]
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class VpcProvider:
|
|
13
|
+
def __init__(self, session):
|
|
14
|
+
self.ec2_client = session.client("ec2")
|
|
15
|
+
self.ec2_resource = session.resource("ec2")
|
|
16
|
+
|
|
17
|
+
def get_vpc_info_by_name(self, app: str, env: str, vpc_name: str) -> Vpc:
|
|
18
|
+
vpc_response = self.ec2_client.describe_vpcs(
|
|
19
|
+
Filters=[{"Name": "tag:Name", "Values": [vpc_name]}]
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
matching_vpcs = vpc_response.get("Vpcs", [])
|
|
23
|
+
|
|
24
|
+
if not matching_vpcs:
|
|
25
|
+
raise AWSException(f"VPC not found for name '{vpc_name}'")
|
|
26
|
+
|
|
27
|
+
vpc_id = vpc_response["Vpcs"][0].get("VpcId")
|
|
28
|
+
|
|
29
|
+
if not vpc_id:
|
|
30
|
+
raise AWSException(f"VPC id not present in vpc '{vpc_name}'")
|
|
31
|
+
|
|
32
|
+
vpc = self.ec2_resource.Vpc(vpc_id)
|
|
33
|
+
|
|
34
|
+
route_tables = self.ec2_client.describe_route_tables(
|
|
35
|
+
Filters=[{"Name": "vpc-id", "Values": [vpc_id]}]
|
|
36
|
+
)["RouteTables"]
|
|
37
|
+
|
|
38
|
+
subnets = []
|
|
39
|
+
for route_table in route_tables:
|
|
40
|
+
private_routes = [route for route in route_table["Routes"] if "NatGatewayId" in route]
|
|
41
|
+
if not private_routes:
|
|
42
|
+
continue
|
|
43
|
+
for association in route_table["Associations"]:
|
|
44
|
+
if "SubnetId" in association:
|
|
45
|
+
subnet_id = association["SubnetId"]
|
|
46
|
+
subnets.append(subnet_id)
|
|
47
|
+
|
|
48
|
+
if not subnets:
|
|
49
|
+
raise AWSException(f"No private subnets found in vpc '{vpc_name}'")
|
|
50
|
+
|
|
51
|
+
tag_value = {"Key": "Name", "Value": f"copilot-{app}-{env}-env"}
|
|
52
|
+
sec_groups = [sg.id for sg in vpc.security_groups.all() if sg.tags and tag_value in sg.tags]
|
|
53
|
+
|
|
54
|
+
if not sec_groups:
|
|
55
|
+
raise AWSException(f"No matching security groups found in vpc '{vpc_name}'")
|
|
56
|
+
|
|
57
|
+
return Vpc(subnets, sec_groups)
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
import yaml
|
|
4
|
+
from yaml.parser import ParserError
|
|
5
|
+
from yamllint import linter
|
|
6
|
+
from yamllint.config import YamlLintConfig
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class FileProviderException(Exception):
|
|
10
|
+
pass
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class YamlFileProviderException(FileProviderException):
|
|
14
|
+
pass
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class FileNotFoundException(FileProviderException):
|
|
18
|
+
pass
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class InvalidYamlException(YamlFileProviderException):
|
|
22
|
+
pass
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class DuplicateKeysException(YamlFileProviderException):
|
|
26
|
+
pass
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class YamlFileProvider:
|
|
30
|
+
def load(path: str) -> dict:
|
|
31
|
+
"""
|
|
32
|
+
Raises:
|
|
33
|
+
FileNotFoundException: file is not there
|
|
34
|
+
InvalidYamlException: file contains invalid yaml
|
|
35
|
+
DuplicateKeysException: yaml contains duplicate keys
|
|
36
|
+
"""
|
|
37
|
+
if not Path(path).exists():
|
|
38
|
+
raise FileNotFoundException(f"`{path}` is missing.")
|
|
39
|
+
try:
|
|
40
|
+
yaml_content = yaml.safe_load(Path(path).read_text())
|
|
41
|
+
except ParserError:
|
|
42
|
+
raise InvalidYamlException(f"{path} is not valid YAML.")
|
|
43
|
+
|
|
44
|
+
if not yaml_content:
|
|
45
|
+
return {}
|
|
46
|
+
|
|
47
|
+
YamlFileProvider.lint_yaml_for_duplicate_keys(path)
|
|
48
|
+
|
|
49
|
+
return yaml_content
|
|
50
|
+
|
|
51
|
+
def write(path: str, contents: dict, comment: str = ""):
|
|
52
|
+
with open(path, "w") as file:
|
|
53
|
+
file.write(comment)
|
|
54
|
+
yaml.dump(contents, file)
|
|
55
|
+
|
|
56
|
+
@staticmethod
|
|
57
|
+
def lint_yaml_for_duplicate_keys(path):
|
|
58
|
+
duplicate_keys = []
|
|
59
|
+
with open(path, "r") as yaml_file:
|
|
60
|
+
file_contents = yaml_file.read()
|
|
61
|
+
results = linter.run(
|
|
62
|
+
file_contents, YamlLintConfig(yaml.dump({"rules": {"key-duplicates": "enable"}}))
|
|
63
|
+
)
|
|
64
|
+
duplicate_keys = [
|
|
65
|
+
"\t"
|
|
66
|
+
+ f"Line {result.line}: {result.message}".replace(
|
|
67
|
+
" in mapping (key-duplicates)", ""
|
|
68
|
+
)
|
|
69
|
+
for result in results
|
|
70
|
+
]
|
|
71
|
+
if duplicate_keys:
|
|
72
|
+
raise DuplicateKeysException(",".join(duplicate_keys))
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
# {% extra_header %}
|
|
2
|
+
# {% version_info %}
|
|
3
|
+
|
|
4
|
+
Metadata:
|
|
5
|
+
cfn-lint:
|
|
6
|
+
config:
|
|
7
|
+
ignore_checks:
|
|
8
|
+
- W2001 # Parameter not used
|
|
9
|
+
|
|
10
|
+
Parameters:
|
|
11
|
+
# Copilot required Parameters...
|
|
12
|
+
App:
|
|
13
|
+
Type: String
|
|
14
|
+
Description: Your application's name.
|
|
15
|
+
Env:
|
|
16
|
+
Type: String
|
|
17
|
+
Description: The environment name your service, job, or workflow is being deployed to.
|
|
18
|
+
Name:
|
|
19
|
+
Type: String
|
|
20
|
+
Description: The name of the service, job, or workflow being deployed.
|
|
21
|
+
|
|
22
|
+
Resources: {% for resource in resources %}
|
|
23
|
+
{{ resource.app_prefix }}XEnvAccessPolicy:
|
|
24
|
+
Metadata:
|
|
25
|
+
'aws:copilot:description': 'An IAM ManagedPolicy for your service to access the bucket'
|
|
26
|
+
Type: AWS::IAM::ManagedPolicy
|
|
27
|
+
Properties:
|
|
28
|
+
Description: Grants Read access to the S3 bucket.
|
|
29
|
+
PolicyDocument:
|
|
30
|
+
Version: 2012-10-17
|
|
31
|
+
Statement:
|
|
32
|
+
- Sid: 'KMSDecryptAndGenerate'
|
|
33
|
+
Effect: Allow
|
|
34
|
+
Action:
|
|
35
|
+
- kms:Decrypt
|
|
36
|
+
- kms:GenerateDataKey
|
|
37
|
+
Resource: 'arn:aws:kms:eu-west-2:{{ resource.bucket_account }}:key/*'
|
|
38
|
+
Condition:
|
|
39
|
+
StringEquals:
|
|
40
|
+
aws:PrincipalTag/copilot-environment:
|
|
41
|
+
- "{{ resource.access_env }}"
|
|
42
|
+
- Sid: 'S3ObjectActions'
|
|
43
|
+
Effect: Allow
|
|
44
|
+
Action:
|
|
45
|
+
{% if resource.read %}- s3:Get*{% endif %}
|
|
46
|
+
{% if resource.write %}- s3:Put*{% endif %}
|
|
47
|
+
Resource: 'arn:aws:s3:::{{ resource.bucket_name }}/*'
|
|
48
|
+
Condition:
|
|
49
|
+
StringEquals:
|
|
50
|
+
aws:PrincipalTag/copilot-environment:
|
|
51
|
+
- "{{ resource.access_env }}"
|
|
52
|
+
- Sid: 'S3ListAction'
|
|
53
|
+
Effect: Allow
|
|
54
|
+
Action:
|
|
55
|
+
- s3:ListBucket
|
|
56
|
+
Resource: 'arn:aws:s3:::{{ resource.bucket_name }}'
|
|
57
|
+
Condition:
|
|
58
|
+
StringEquals:
|
|
59
|
+
aws:PrincipalTag/copilot-environment:
|
|
60
|
+
- "{{ resource.access_env }}"
|
|
61
|
+
{% endfor %}
|
|
62
|
+
Outputs:{% for resource in resources %}
|
|
63
|
+
{{ resource.app_prefix }}XEnvAccessPolicy:
|
|
64
|
+
Description: "The IAM::ManagedPolicy to attach to the task role"
|
|
65
|
+
Value:
|
|
66
|
+
Ref: {{ resource.app_prefix }}XEnvAccessPolicy
|
|
67
|
+
{% endfor %}
|
|
@@ -1,31 +1,28 @@
|
|
|
1
1
|
import json
|
|
2
2
|
import os
|
|
3
3
|
import re
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from dataclasses import field
|
|
4
6
|
from pathlib import Path
|
|
5
7
|
from typing import Dict
|
|
6
8
|
|
|
7
9
|
import boto3
|
|
8
|
-
import yaml
|
|
9
|
-
from boto3 import Session
|
|
10
|
-
from yaml.parser import ParserError
|
|
11
10
|
|
|
11
|
+
from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
|
|
12
12
|
from dbt_platform_helper.platform_exception import PlatformException
|
|
13
13
|
from dbt_platform_helper.utils.aws import get_aws_session_or_abort
|
|
14
14
|
from dbt_platform_helper.utils.aws import get_profile_name_from_account_id
|
|
15
15
|
from dbt_platform_helper.utils.aws import get_ssm_secrets
|
|
16
16
|
from dbt_platform_helper.utils.messages import abort_with_error
|
|
17
|
+
from dbt_platform_helper.utils.platform_config import load_unvalidated_config_file
|
|
17
18
|
|
|
18
19
|
|
|
20
|
+
@dataclass
|
|
19
21
|
class Environment:
|
|
20
22
|
name: str
|
|
21
23
|
account_id: str
|
|
22
24
|
sessions: Dict[str, boto3.Session]
|
|
23
25
|
|
|
24
|
-
def __init__(self, name: str, account_id: str, sessions: Dict[str, boto3.Session]):
|
|
25
|
-
self.name = name
|
|
26
|
-
self.account_id = account_id
|
|
27
|
-
self.sessions = sessions
|
|
28
|
-
|
|
29
26
|
@property
|
|
30
27
|
def session(self):
|
|
31
28
|
if self.account_id not in self.sessions:
|
|
@@ -36,24 +33,17 @@ class Environment:
|
|
|
36
33
|
return self.sessions[self.account_id]
|
|
37
34
|
|
|
38
35
|
|
|
36
|
+
@dataclass
|
|
39
37
|
class Service:
|
|
40
38
|
name: str
|
|
41
39
|
kind: str
|
|
42
40
|
|
|
43
|
-
def __init__(self, name: str, kind: str):
|
|
44
|
-
self.name = name
|
|
45
|
-
self.kind = kind
|
|
46
|
-
|
|
47
41
|
|
|
42
|
+
@dataclass
|
|
48
43
|
class Application:
|
|
49
44
|
name: str
|
|
50
|
-
environments: Dict[str, Environment]
|
|
51
|
-
services: Dict[str, Service]
|
|
52
|
-
|
|
53
|
-
def __init__(self, name: str):
|
|
54
|
-
self.name = name
|
|
55
|
-
self.environments = {}
|
|
56
|
-
self.services = {}
|
|
45
|
+
environments: Dict[str, Environment] = field(default_factory=dict)
|
|
46
|
+
services: Dict[str, Service] = field(default_factory=dict)
|
|
57
47
|
|
|
58
48
|
def __str__(self):
|
|
59
49
|
output = f"Application {self.name} with"
|
|
@@ -69,7 +59,7 @@ class Application:
|
|
|
69
59
|
return str(self) == str(other)
|
|
70
60
|
|
|
71
61
|
|
|
72
|
-
def load_application(app
|
|
62
|
+
def load_application(app=None, default_session=None) -> Application:
|
|
73
63
|
application = Application(app if app else get_application_name())
|
|
74
64
|
current_session = default_session if default_session else get_aws_session_or_abort()
|
|
75
65
|
|
|
@@ -81,7 +71,7 @@ def load_application(app: str = None, default_session: Session = None) -> Applic
|
|
|
81
71
|
WithDecryption=False,
|
|
82
72
|
)
|
|
83
73
|
except ssm_client.exceptions.ParameterNotFound:
|
|
84
|
-
raise ApplicationNotFoundException(
|
|
74
|
+
raise ApplicationNotFoundException(application.name)
|
|
85
75
|
|
|
86
76
|
path = f"/copilot/applications/{application.name}/environments"
|
|
87
77
|
secrets = get_ssm_secrets(app, None, current_session, path)
|
|
@@ -115,27 +105,35 @@ def load_application(app: str = None, default_session: Session = None) -> Applic
|
|
|
115
105
|
Recursive=False,
|
|
116
106
|
WithDecryption=False,
|
|
117
107
|
)
|
|
108
|
+
results = response["Parameters"]
|
|
109
|
+
while "NextToken" in response:
|
|
110
|
+
response = ssm_client.get_parameters_by_path(
|
|
111
|
+
Path=f"/copilot/applications/{application.name}/components",
|
|
112
|
+
Recursive=False,
|
|
113
|
+
WithDecryption=False,
|
|
114
|
+
NextToken=response["NextToken"],
|
|
115
|
+
)
|
|
116
|
+
results.extend(response["Parameters"])
|
|
118
117
|
|
|
119
118
|
application.services = {
|
|
120
119
|
svc["name"]: Service(svc["name"], svc["type"])
|
|
121
|
-
for svc in [json.loads(parameter["Value"]) for parameter in
|
|
120
|
+
for svc in [json.loads(parameter["Value"]) for parameter in results]
|
|
122
121
|
}
|
|
123
122
|
|
|
124
123
|
return application
|
|
125
124
|
|
|
126
125
|
|
|
127
|
-
def get_application_name():
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
return app_name
|
|
126
|
+
def get_application_name(abort=abort_with_error):
|
|
127
|
+
if Path(PLATFORM_CONFIG_FILE).exists():
|
|
128
|
+
try:
|
|
129
|
+
app_config = load_unvalidated_config_file()
|
|
130
|
+
return app_config["application"]
|
|
131
|
+
except KeyError:
|
|
132
|
+
abort(
|
|
133
|
+
f"Cannot get application name. No 'application' key can be found in {PLATFORM_CONFIG_FILE}"
|
|
134
|
+
)
|
|
135
|
+
else:
|
|
136
|
+
abort(f"Cannot get application name. {PLATFORM_CONFIG_FILE} is missing.")
|
|
139
137
|
|
|
140
138
|
|
|
141
139
|
class ApplicationException(PlatformException):
|
dbt_platform_helper/utils/aws.py
CHANGED
|
@@ -13,12 +13,11 @@ import click
|
|
|
13
13
|
import yaml
|
|
14
14
|
from boto3 import Session
|
|
15
15
|
|
|
16
|
+
from dbt_platform_helper.constants import REFRESH_TOKEN_MESSAGE
|
|
16
17
|
from dbt_platform_helper.platform_exception import PlatformException
|
|
17
|
-
from dbt_platform_helper.providers.aws import AWSException
|
|
18
18
|
from dbt_platform_helper.providers.aws import CopilotCodebaseNotFoundException
|
|
19
19
|
from dbt_platform_helper.providers.aws import ImageNotFoundException
|
|
20
20
|
from dbt_platform_helper.providers.aws import LogGroupNotFoundException
|
|
21
|
-
from dbt_platform_helper.providers.cache import CacheProvider
|
|
22
21
|
from dbt_platform_helper.providers.validation import ValidationException
|
|
23
22
|
|
|
24
23
|
SSM_BASE_PATH = "/copilot/{app}/{env}/secrets/"
|
|
@@ -27,9 +26,6 @@ AWS_SESSION_CACHE = {}
|
|
|
27
26
|
|
|
28
27
|
|
|
29
28
|
def get_aws_session_or_abort(aws_profile: str = None) -> boto3.session.Session:
|
|
30
|
-
REFRESH_TOKEN_MESSAGE = (
|
|
31
|
-
"To refresh this SSO session run `aws sso login` with the corresponding profile"
|
|
32
|
-
)
|
|
33
29
|
aws_profile = aws_profile or os.getenv("AWS_PROFILE")
|
|
34
30
|
if aws_profile in AWS_SESSION_CACHE:
|
|
35
31
|
return AWS_SESSION_CACHE[aws_profile]
|
|
@@ -358,59 +354,6 @@ def get_postgres_connection_data_updated_with_master_secret(session, parameter_n
|
|
|
358
354
|
return parameter_data
|
|
359
355
|
|
|
360
356
|
|
|
361
|
-
def get_supported_redis_versions():
|
|
362
|
-
|
|
363
|
-
cache_provider = CacheProvider()
|
|
364
|
-
|
|
365
|
-
if cache_provider.cache_refresh_required("redis"):
|
|
366
|
-
|
|
367
|
-
session = get_aws_session_or_abort()
|
|
368
|
-
elasticache_client = session.client("elasticache")
|
|
369
|
-
|
|
370
|
-
supported_versions_response = elasticache_client.describe_cache_engine_versions(
|
|
371
|
-
Engine="redis"
|
|
372
|
-
)
|
|
373
|
-
|
|
374
|
-
supported_versions = [
|
|
375
|
-
version["EngineVersion"]
|
|
376
|
-
for version in supported_versions_response["CacheEngineVersions"]
|
|
377
|
-
]
|
|
378
|
-
|
|
379
|
-
cache_provider.update_cache("redis", supported_versions)
|
|
380
|
-
|
|
381
|
-
return supported_versions
|
|
382
|
-
|
|
383
|
-
else:
|
|
384
|
-
return cache_provider.read_supported_versions_from_cache("redis")
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
def get_supported_opensearch_versions():
|
|
388
|
-
|
|
389
|
-
cache_provider = CacheProvider()
|
|
390
|
-
|
|
391
|
-
if cache_provider.cache_refresh_required("opensearch"):
|
|
392
|
-
|
|
393
|
-
session = get_aws_session_or_abort()
|
|
394
|
-
opensearch_client = session.client("opensearch")
|
|
395
|
-
|
|
396
|
-
response = opensearch_client.list_versions()
|
|
397
|
-
all_versions = response["Versions"]
|
|
398
|
-
|
|
399
|
-
opensearch_versions = [
|
|
400
|
-
version for version in all_versions if not version.startswith("Elasticsearch_")
|
|
401
|
-
]
|
|
402
|
-
supported_versions = [
|
|
403
|
-
version.removeprefix("OpenSearch_") for version in opensearch_versions
|
|
404
|
-
]
|
|
405
|
-
|
|
406
|
-
cache_provider.update_cache("opensearch", supported_versions)
|
|
407
|
-
|
|
408
|
-
return supported_versions
|
|
409
|
-
|
|
410
|
-
else:
|
|
411
|
-
return cache_provider.read_supported_versions_from_cache("opensearch")
|
|
412
|
-
|
|
413
|
-
|
|
414
357
|
def get_connection_string(
|
|
415
358
|
session: Session,
|
|
416
359
|
app: str,
|
|
@@ -433,55 +376,6 @@ def get_connection_string(
|
|
|
433
376
|
return f"postgres://{conn['username']}:{conn['password']}@{conn['host']}:{conn['port']}/{conn['dbname']}"
|
|
434
377
|
|
|
435
378
|
|
|
436
|
-
class Vpc:
|
|
437
|
-
def __init__(self, subnets: list[str], security_groups: list[str]):
|
|
438
|
-
self.subnets = subnets
|
|
439
|
-
self.security_groups = security_groups
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
def get_vpc_info_by_name(session: Session, app: str, env: str, vpc_name: str) -> Vpc:
|
|
443
|
-
ec2_client = session.client("ec2")
|
|
444
|
-
vpc_response = ec2_client.describe_vpcs(Filters=[{"Name": "tag:Name", "Values": [vpc_name]}])
|
|
445
|
-
|
|
446
|
-
matching_vpcs = vpc_response.get("Vpcs", [])
|
|
447
|
-
|
|
448
|
-
if not matching_vpcs:
|
|
449
|
-
raise AWSException(f"VPC not found for name '{vpc_name}'")
|
|
450
|
-
|
|
451
|
-
vpc_id = vpc_response["Vpcs"][0].get("VpcId")
|
|
452
|
-
|
|
453
|
-
if not vpc_id:
|
|
454
|
-
raise AWSException(f"VPC id not present in vpc '{vpc_name}'")
|
|
455
|
-
|
|
456
|
-
ec2_resource = session.resource("ec2")
|
|
457
|
-
vpc = ec2_resource.Vpc(vpc_id)
|
|
458
|
-
|
|
459
|
-
route_tables = ec2_client.describe_route_tables(
|
|
460
|
-
Filters=[{"Name": "vpc-id", "Values": [vpc_id]}]
|
|
461
|
-
)["RouteTables"]
|
|
462
|
-
|
|
463
|
-
subnets = []
|
|
464
|
-
for route_table in route_tables:
|
|
465
|
-
private_routes = [route for route in route_table["Routes"] if "NatGatewayId" in route]
|
|
466
|
-
if not private_routes:
|
|
467
|
-
continue
|
|
468
|
-
for association in route_table["Associations"]:
|
|
469
|
-
if "SubnetId" in association:
|
|
470
|
-
subnet_id = association["SubnetId"]
|
|
471
|
-
subnets.append(subnet_id)
|
|
472
|
-
|
|
473
|
-
if not subnets:
|
|
474
|
-
raise AWSException(f"No private subnets found in vpc '{vpc_name}'")
|
|
475
|
-
|
|
476
|
-
tag_value = {"Key": "Name", "Value": f"copilot-{app}-{env}-env"}
|
|
477
|
-
sec_groups = [sg.id for sg in vpc.security_groups.all() if sg.tags and tag_value in sg.tags]
|
|
478
|
-
|
|
479
|
-
if not sec_groups:
|
|
480
|
-
raise AWSException(f"No matching security groups found in vpc '{vpc_name}'")
|
|
481
|
-
|
|
482
|
-
return Vpc(subnets, sec_groups)
|
|
483
|
-
|
|
484
|
-
|
|
485
379
|
def start_build_extraction(codebuild_client, build_options):
|
|
486
380
|
response = codebuild_client.start_build(**build_options)
|
|
487
381
|
return response["build"]["arn"]
|
|
@@ -1,35 +1,17 @@
|
|
|
1
|
-
from copy import deepcopy
|
|
2
|
-
from os import makedirs
|
|
3
|
-
from pathlib import Path
|
|
4
|
-
|
|
5
1
|
import click
|
|
6
2
|
import yaml
|
|
7
3
|
from jinja2 import Environment
|
|
8
4
|
from jinja2 import FileSystemLoader
|
|
9
5
|
|
|
6
|
+
from dbt_platform_helper.providers.files import FileProvider
|
|
7
|
+
|
|
10
8
|
|
|
11
9
|
def to_yaml(value):
|
|
12
10
|
return yaml.dump(value, sort_keys=False)
|
|
13
11
|
|
|
14
12
|
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
file = Path(base_path).joinpath(file_path)
|
|
18
|
-
file_exists = file.exists()
|
|
19
|
-
|
|
20
|
-
if not file_path.parent.exists():
|
|
21
|
-
makedirs(file_path.parent)
|
|
22
|
-
|
|
23
|
-
if file_exists and not overwrite:
|
|
24
|
-
return f"File {file_path} exists; doing nothing"
|
|
25
|
-
|
|
26
|
-
action = "overwritten" if file_exists and overwrite else "created"
|
|
27
|
-
|
|
28
|
-
file.write_text(contents)
|
|
29
|
-
|
|
30
|
-
return f"File {file_path} {action}"
|
|
31
|
-
|
|
32
|
-
|
|
13
|
+
# TODO - extract file provider functionality from this - and figure out what it actually does!
|
|
14
|
+
# Move to the new file provider - or potentially copilot?
|
|
33
15
|
def generate_override_files(base_path, file_path, output_dir):
|
|
34
16
|
def generate_files_for_dir(pattern):
|
|
35
17
|
for file in file_path.glob(pattern):
|
|
@@ -37,7 +19,7 @@ def generate_override_files(base_path, file_path, output_dir):
|
|
|
37
19
|
contents = file.read_text()
|
|
38
20
|
file_name = str(file).removeprefix(f"{file_path}/")
|
|
39
21
|
click.echo(
|
|
40
|
-
mkfile(
|
|
22
|
+
FileProvider.mkfile(
|
|
41
23
|
base_path,
|
|
42
24
|
output_dir / file_name,
|
|
43
25
|
contents,
|
|
@@ -62,43 +44,10 @@ def generate_override_files_from_template(base_path, overrides_path, output_dir,
|
|
|
62
44
|
if file.is_file():
|
|
63
45
|
file_name = str(file).removeprefix(f"{overrides_path}/")
|
|
64
46
|
contents = templates.get_template(str(file_name)).render(data)
|
|
65
|
-
message = mkfile(
|
|
47
|
+
message = FileProvider.mkfile(
|
|
48
|
+
base_path, output_dir / file_name, contents, overwrite=True
|
|
49
|
+
)
|
|
66
50
|
click.echo(message)
|
|
67
51
|
|
|
68
52
|
generate_files_for_dir("*")
|
|
69
53
|
generate_files_for_dir("bin/*")
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
def apply_environment_defaults(config):
|
|
73
|
-
if "environments" not in config:
|
|
74
|
-
return config
|
|
75
|
-
|
|
76
|
-
enriched_config = deepcopy(config)
|
|
77
|
-
|
|
78
|
-
environments = enriched_config["environments"]
|
|
79
|
-
env_defaults = environments.get("*", {})
|
|
80
|
-
without_defaults_entry = {
|
|
81
|
-
name: data if data else {} for name, data in environments.items() if name != "*"
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
default_versions = config.get("default_versions", {})
|
|
85
|
-
|
|
86
|
-
def combine_env_data(data):
|
|
87
|
-
return {
|
|
88
|
-
**env_defaults,
|
|
89
|
-
**data,
|
|
90
|
-
"versions": {
|
|
91
|
-
**default_versions,
|
|
92
|
-
**env_defaults.get("versions", {}),
|
|
93
|
-
**data.get("versions", {}),
|
|
94
|
-
},
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
defaulted_envs = {
|
|
98
|
-
env_name: combine_env_data(env_data)
|
|
99
|
-
for env_name, env_data in without_defaults_entry.items()
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
enriched_config["environments"] = defaulted_envs
|
|
103
|
-
|
|
104
|
-
return enriched_config
|
|
@@ -18,10 +18,3 @@ def load_unvalidated_config_file():
|
|
|
18
18
|
return yaml.safe_load(file_contents)
|
|
19
19
|
except yaml.parser.ParserError:
|
|
20
20
|
return {}
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
def get_environment_pipeline_names():
|
|
24
|
-
pipelines_config = load_unvalidated_config_file().get("environment_pipelines")
|
|
25
|
-
if pipelines_config:
|
|
26
|
-
return pipelines_config.keys()
|
|
27
|
-
return {}
|
|
@@ -5,6 +5,16 @@ import jinja2
|
|
|
5
5
|
from dbt_platform_helper.jinja2_tags import ExtraHeaderTag
|
|
6
6
|
from dbt_platform_helper.jinja2_tags import VersionTag
|
|
7
7
|
|
|
8
|
+
S3_CROSS_ACCOUNT_POLICY = "addons/svc/s3-cross-account-policy.yml"
|
|
9
|
+
|
|
10
|
+
ADDON_TEMPLATE_MAP = {
|
|
11
|
+
"s3": ["addons/svc/s3-policy.yml"],
|
|
12
|
+
"s3-policy": ["addons/svc/s3-policy.yml"],
|
|
13
|
+
"appconfig-ipfilter": ["addons/svc/appconfig-ipfilter.yml"],
|
|
14
|
+
"subscription-filter": ["addons/svc/subscription-filter.yml"],
|
|
15
|
+
"prometheus-policy": ["addons/svc/prometheus-policy.yml"],
|
|
16
|
+
}
|
|
17
|
+
|
|
8
18
|
|
|
9
19
|
def camel_case(s):
|
|
10
20
|
s = re.sub(r"(_|-)+", " ", s).title().replace(" ", "")
|