dbt-platform-helper 12.4.0__py3-none-any.whl → 12.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbt-platform-helper might be problematic. Click here for more details.
- dbt_platform_helper/COMMANDS.md +0 -3
- dbt_platform_helper/commands/config.py +2 -2
- dbt_platform_helper/commands/copilot.py +47 -28
- dbt_platform_helper/commands/environment.py +16 -178
- dbt_platform_helper/commands/pipeline.py +5 -34
- dbt_platform_helper/constants.py +12 -1
- dbt_platform_helper/domain/config_validator.py +242 -0
- dbt_platform_helper/domain/copilot_environment.py +204 -0
- dbt_platform_helper/domain/database_copy.py +7 -5
- dbt_platform_helper/domain/maintenance_page.py +1 -1
- dbt_platform_helper/domain/terraform_environment.py +53 -0
- dbt_platform_helper/jinja2_tags.py +1 -1
- dbt_platform_helper/providers/cache.py +77 -0
- dbt_platform_helper/providers/cloudformation.py +0 -1
- dbt_platform_helper/providers/config.py +90 -0
- dbt_platform_helper/providers/opensearch.py +36 -0
- dbt_platform_helper/providers/platform_config_schema.py +667 -0
- dbt_platform_helper/providers/redis.py +34 -0
- dbt_platform_helper/providers/yaml_file.py +83 -0
- dbt_platform_helper/templates/addons/svc/s3-cross-account-policy.yml +67 -0
- dbt_platform_helper/utils/aws.py +1 -59
- dbt_platform_helper/utils/files.py +0 -106
- dbt_platform_helper/utils/template.py +10 -0
- dbt_platform_helper/utils/validation.py +5 -889
- {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/METADATA +2 -2
- {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/RECORD +29 -22
- {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/WHEEL +1 -1
- dbt_platform_helper/templates/pipelines/environments/buildspec.yml +0 -80
- dbt_platform_helper/templates/pipelines/environments/manifest.yml +0 -48
- dbt_platform_helper/templates/pipelines/environments/overrides/cfn.patches.yml +0 -21
- {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/LICENSE +0 -0
- {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from dbt_platform_helper.providers.cache import CacheProvider
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class RedisProvider:
|
|
5
|
+
def __init__(self, elasticache_client):
|
|
6
|
+
self.elasticache_client = elasticache_client
|
|
7
|
+
|
|
8
|
+
def get_supported_redis_versions(self):
|
|
9
|
+
|
|
10
|
+
cache_provider = self.__get_cache_provider()
|
|
11
|
+
|
|
12
|
+
if cache_provider.cache_refresh_required("redis"):
|
|
13
|
+
|
|
14
|
+
supported_versions_response = self.elasticache_client.describe_cache_engine_versions(
|
|
15
|
+
Engine="redis"
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
supported_versions = [
|
|
19
|
+
version["EngineVersion"]
|
|
20
|
+
for version in supported_versions_response["CacheEngineVersions"]
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
cache_provider.update_cache("redis", supported_versions)
|
|
24
|
+
|
|
25
|
+
return supported_versions
|
|
26
|
+
|
|
27
|
+
else:
|
|
28
|
+
return cache_provider.read_supported_versions_from_cache("redis")
|
|
29
|
+
|
|
30
|
+
# TODO - cache provider instantiated here rather than via dependancy injection since it will likely only be used in the get_supported_redis_versions method.
|
|
31
|
+
# If another method is added which needs a CacheProvider, it should be injected into the constructor instead.
|
|
32
|
+
@staticmethod
|
|
33
|
+
def __get_cache_provider():
|
|
34
|
+
return CacheProvider()
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
from abc import ABC
|
|
2
|
+
from abc import abstractmethod
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
import yaml
|
|
6
|
+
from yaml.parser import ParserError
|
|
7
|
+
from yamllint import linter
|
|
8
|
+
from yamllint.config import YamlLintConfig
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class FileProviderException(Exception):
|
|
12
|
+
pass
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class YamlFileProviderException(FileProviderException):
|
|
16
|
+
pass
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class FileNotFoundException(YamlFileProviderException):
|
|
20
|
+
pass
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class InvalidYamlException(YamlFileProviderException):
|
|
24
|
+
pass
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class DuplicateKeysException(YamlFileProviderException):
|
|
28
|
+
pass
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class FileProvider(ABC):
|
|
32
|
+
@abstractmethod
|
|
33
|
+
def load(path: str) -> dict:
|
|
34
|
+
raise NotImplementedError("Implement this in the subclass")
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class YamlFileProvider(FileProvider):
|
|
38
|
+
def load(path: str) -> dict:
|
|
39
|
+
"""
|
|
40
|
+
Raises:
|
|
41
|
+
FileNotFoundException: file is not there
|
|
42
|
+
InvalidYamlException: file contains invalid yaml
|
|
43
|
+
DuplicateKeysException: yaml contains duplicate keys
|
|
44
|
+
"""
|
|
45
|
+
if not Path(path).exists():
|
|
46
|
+
# TODO this error message is domain specific and should not mention deployment directory project here
|
|
47
|
+
raise FileNotFoundException(
|
|
48
|
+
f"`{path}` is missing. Please check it exists and you are in the root directory of your deployment project."
|
|
49
|
+
)
|
|
50
|
+
try:
|
|
51
|
+
yaml_content = yaml.safe_load(Path(path).read_text())
|
|
52
|
+
except ParserError:
|
|
53
|
+
raise InvalidYamlException(f"{path} is not valid YAML.")
|
|
54
|
+
|
|
55
|
+
if not yaml_content:
|
|
56
|
+
return {}
|
|
57
|
+
|
|
58
|
+
YamlFileProvider.lint_yaml_for_duplicate_keys(path)
|
|
59
|
+
|
|
60
|
+
return yaml_content
|
|
61
|
+
|
|
62
|
+
def write(path: str, contents: dict, comment: str = ""):
|
|
63
|
+
with open(path, "w") as file:
|
|
64
|
+
file.write(comment)
|
|
65
|
+
yaml.dump(contents, file)
|
|
66
|
+
|
|
67
|
+
@staticmethod
|
|
68
|
+
def lint_yaml_for_duplicate_keys(path):
|
|
69
|
+
duplicate_keys = []
|
|
70
|
+
with open(path, "r") as yaml_file:
|
|
71
|
+
file_contents = yaml_file.read()
|
|
72
|
+
results = linter.run(
|
|
73
|
+
file_contents, YamlLintConfig(yaml.dump({"rules": {"key-duplicates": "enable"}}))
|
|
74
|
+
)
|
|
75
|
+
duplicate_keys = [
|
|
76
|
+
"\t"
|
|
77
|
+
+ f"Line {result.line}: {result.message}".replace(
|
|
78
|
+
" in mapping (key-duplicates)", ""
|
|
79
|
+
)
|
|
80
|
+
for result in results
|
|
81
|
+
]
|
|
82
|
+
if duplicate_keys:
|
|
83
|
+
raise DuplicateKeysException(",".join(duplicate_keys))
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
# {% extra_header %}
|
|
2
|
+
# {% version_info %}
|
|
3
|
+
|
|
4
|
+
Metadata:
|
|
5
|
+
cfn-lint:
|
|
6
|
+
config:
|
|
7
|
+
ignore_checks:
|
|
8
|
+
- W2001 # Parameter not used
|
|
9
|
+
|
|
10
|
+
Parameters:
|
|
11
|
+
# Copilot required Parameters...
|
|
12
|
+
App:
|
|
13
|
+
Type: String
|
|
14
|
+
Description: Your application's name.
|
|
15
|
+
Env:
|
|
16
|
+
Type: String
|
|
17
|
+
Description: The environment name your service, job, or workflow is being deployed to.
|
|
18
|
+
Name:
|
|
19
|
+
Type: String
|
|
20
|
+
Description: The name of the service, job, or workflow being deployed.
|
|
21
|
+
|
|
22
|
+
Resources: {% for resource in resources %}
|
|
23
|
+
{{ resource.app_prefix }}XEnvAccessPolicy:
|
|
24
|
+
Metadata:
|
|
25
|
+
'aws:copilot:description': 'An IAM ManagedPolicy for your service to access the bucket'
|
|
26
|
+
Type: AWS::IAM::ManagedPolicy
|
|
27
|
+
Properties:
|
|
28
|
+
Description: Grants Read access to the S3 bucket.
|
|
29
|
+
PolicyDocument:
|
|
30
|
+
Version: 2012-10-17
|
|
31
|
+
Statement:
|
|
32
|
+
- Sid: 'KMSDecryptAndGenerate'
|
|
33
|
+
Effect: Allow
|
|
34
|
+
Action:
|
|
35
|
+
- kms:Decrypt
|
|
36
|
+
- kms:GenerateDataKey
|
|
37
|
+
Resource: 'arn:aws:kms:eu-west-2:{{ resource.bucket_account }}:key/*'
|
|
38
|
+
Condition:
|
|
39
|
+
StringEquals:
|
|
40
|
+
aws:PrincipalTag/copilot-environment:
|
|
41
|
+
- "{{ resource.access_env }}"
|
|
42
|
+
- Sid: 'S3ObjectActions'
|
|
43
|
+
Effect: Allow
|
|
44
|
+
Action:
|
|
45
|
+
{% if resource.read %}- s3:Get*{% endif %}
|
|
46
|
+
{% if resource.write %}- s3:Put*{% endif %}
|
|
47
|
+
Resource: 'arn:aws:s3:::{{ resource.bucket_name }}/*'
|
|
48
|
+
Condition:
|
|
49
|
+
StringEquals:
|
|
50
|
+
aws:PrincipalTag/copilot-environment:
|
|
51
|
+
- "{{ resource.access_env }}"
|
|
52
|
+
- Sid: 'S3ListAction'
|
|
53
|
+
Effect: Allow
|
|
54
|
+
Action:
|
|
55
|
+
- s3:ListBucket
|
|
56
|
+
Resource: 'arn:aws:s3:::{{ resource.bucket_name }}'
|
|
57
|
+
Condition:
|
|
58
|
+
StringEquals:
|
|
59
|
+
aws:PrincipalTag/copilot-environment:
|
|
60
|
+
- "{{ resource.access_env }}"
|
|
61
|
+
{% endfor %}
|
|
62
|
+
Outputs:{% for resource in resources %}
|
|
63
|
+
{{ resource.app_prefix }}XEnvAccessPolicy:
|
|
64
|
+
Description: "The IAM::ManagedPolicy to attach to the task role"
|
|
65
|
+
Value:
|
|
66
|
+
Ref: {{ resource.app_prefix }}XEnvAccessPolicy
|
|
67
|
+
{% endfor %}
|
dbt_platform_helper/utils/aws.py
CHANGED
|
@@ -13,15 +13,13 @@ import click
|
|
|
13
13
|
import yaml
|
|
14
14
|
from boto3 import Session
|
|
15
15
|
|
|
16
|
+
from dbt_platform_helper.constants import REFRESH_TOKEN_MESSAGE
|
|
16
17
|
from dbt_platform_helper.platform_exception import PlatformException
|
|
17
18
|
from dbt_platform_helper.providers.aws import AWSException
|
|
18
19
|
from dbt_platform_helper.providers.aws import CopilotCodebaseNotFoundException
|
|
19
20
|
from dbt_platform_helper.providers.aws import ImageNotFoundException
|
|
20
21
|
from dbt_platform_helper.providers.aws import LogGroupNotFoundException
|
|
21
22
|
from dbt_platform_helper.providers.validation import ValidationException
|
|
22
|
-
from dbt_platform_helper.utils.files import cache_refresh_required
|
|
23
|
-
from dbt_platform_helper.utils.files import read_supported_versions_from_cache
|
|
24
|
-
from dbt_platform_helper.utils.files import write_to_cache
|
|
25
23
|
|
|
26
24
|
SSM_BASE_PATH = "/copilot/{app}/{env}/secrets/"
|
|
27
25
|
SSM_PATH = "/copilot/{app}/{env}/secrets/{name}"
|
|
@@ -29,9 +27,6 @@ AWS_SESSION_CACHE = {}
|
|
|
29
27
|
|
|
30
28
|
|
|
31
29
|
def get_aws_session_or_abort(aws_profile: str = None) -> boto3.session.Session:
|
|
32
|
-
REFRESH_TOKEN_MESSAGE = (
|
|
33
|
-
"To refresh this SSO session run `aws sso login` with the corresponding profile"
|
|
34
|
-
)
|
|
35
30
|
aws_profile = aws_profile or os.getenv("AWS_PROFILE")
|
|
36
31
|
if aws_profile in AWS_SESSION_CACHE:
|
|
37
32
|
return AWS_SESSION_CACHE[aws_profile]
|
|
@@ -360,59 +355,6 @@ def get_postgres_connection_data_updated_with_master_secret(session, parameter_n
|
|
|
360
355
|
return parameter_data
|
|
361
356
|
|
|
362
357
|
|
|
363
|
-
def get_supported_redis_versions():
|
|
364
|
-
|
|
365
|
-
if cache_refresh_required("redis"):
|
|
366
|
-
|
|
367
|
-
supported_versions = []
|
|
368
|
-
|
|
369
|
-
session = get_aws_session_or_abort()
|
|
370
|
-
elasticache_client = session.client("elasticache")
|
|
371
|
-
|
|
372
|
-
supported_versions_response = elasticache_client.describe_cache_engine_versions(
|
|
373
|
-
Engine="redis"
|
|
374
|
-
)
|
|
375
|
-
|
|
376
|
-
supported_versions = [
|
|
377
|
-
version["EngineVersion"]
|
|
378
|
-
for version in supported_versions_response["CacheEngineVersions"]
|
|
379
|
-
]
|
|
380
|
-
|
|
381
|
-
write_to_cache("redis", supported_versions)
|
|
382
|
-
|
|
383
|
-
return supported_versions
|
|
384
|
-
|
|
385
|
-
else:
|
|
386
|
-
return read_supported_versions_from_cache("redis")
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
def get_supported_opensearch_versions():
|
|
390
|
-
|
|
391
|
-
if cache_refresh_required("opensearch"):
|
|
392
|
-
|
|
393
|
-
supported_versions = []
|
|
394
|
-
|
|
395
|
-
session = get_aws_session_or_abort()
|
|
396
|
-
opensearch_client = session.client("opensearch")
|
|
397
|
-
|
|
398
|
-
response = opensearch_client.list_versions()
|
|
399
|
-
all_versions = response["Versions"]
|
|
400
|
-
|
|
401
|
-
opensearch_versions = [
|
|
402
|
-
version for version in all_versions if not version.startswith("Elasticsearch_")
|
|
403
|
-
]
|
|
404
|
-
supported_versions = [
|
|
405
|
-
version.removeprefix("OpenSearch_") for version in opensearch_versions
|
|
406
|
-
]
|
|
407
|
-
|
|
408
|
-
write_to_cache("opensearch", supported_versions)
|
|
409
|
-
|
|
410
|
-
return supported_versions
|
|
411
|
-
|
|
412
|
-
else:
|
|
413
|
-
return read_supported_versions_from_cache("opensearch")
|
|
414
|
-
|
|
415
|
-
|
|
416
358
|
def get_connection_string(
|
|
417
359
|
session: Session,
|
|
418
360
|
app: str,
|
|
@@ -1,6 +1,3 @@
|
|
|
1
|
-
import os
|
|
2
|
-
from copy import deepcopy
|
|
3
|
-
from datetime import datetime
|
|
4
1
|
from os import makedirs
|
|
5
2
|
from pathlib import Path
|
|
6
3
|
|
|
@@ -9,8 +6,6 @@ import yaml
|
|
|
9
6
|
from jinja2 import Environment
|
|
10
7
|
from jinja2 import FileSystemLoader
|
|
11
8
|
|
|
12
|
-
from dbt_platform_helper.constants import PLATFORM_HELPER_CACHE_FILE
|
|
13
|
-
|
|
14
9
|
|
|
15
10
|
def to_yaml(value):
|
|
16
11
|
return yaml.dump(value, sort_keys=False)
|
|
@@ -71,104 +66,3 @@ def generate_override_files_from_template(base_path, overrides_path, output_dir,
|
|
|
71
66
|
|
|
72
67
|
generate_files_for_dir("*")
|
|
73
68
|
generate_files_for_dir("bin/*")
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
def apply_environment_defaults(config):
|
|
77
|
-
if "environments" not in config:
|
|
78
|
-
return config
|
|
79
|
-
|
|
80
|
-
enriched_config = deepcopy(config)
|
|
81
|
-
|
|
82
|
-
environments = enriched_config["environments"]
|
|
83
|
-
env_defaults = environments.get("*", {})
|
|
84
|
-
without_defaults_entry = {
|
|
85
|
-
name: data if data else {} for name, data in environments.items() if name != "*"
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
default_versions = config.get("default_versions", {})
|
|
89
|
-
|
|
90
|
-
def combine_env_data(data):
|
|
91
|
-
return {
|
|
92
|
-
**env_defaults,
|
|
93
|
-
**data,
|
|
94
|
-
"versions": {
|
|
95
|
-
**default_versions,
|
|
96
|
-
**env_defaults.get("versions", {}),
|
|
97
|
-
**data.get("versions", {}),
|
|
98
|
-
},
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
defaulted_envs = {
|
|
102
|
-
env_name: combine_env_data(env_data)
|
|
103
|
-
for env_name, env_data in without_defaults_entry.items()
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
enriched_config["environments"] = defaulted_envs
|
|
107
|
-
|
|
108
|
-
return enriched_config
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
def read_supported_versions_from_cache(resource_name):
|
|
112
|
-
|
|
113
|
-
platform_helper_config = read_file_as_yaml(PLATFORM_HELPER_CACHE_FILE)
|
|
114
|
-
|
|
115
|
-
return platform_helper_config.get(resource_name).get("versions")
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
def write_to_cache(resource_name, supported_versions):
|
|
119
|
-
|
|
120
|
-
platform_helper_config = {}
|
|
121
|
-
|
|
122
|
-
if os.path.exists(PLATFORM_HELPER_CACHE_FILE):
|
|
123
|
-
platform_helper_config = read_file_as_yaml(PLATFORM_HELPER_CACHE_FILE)
|
|
124
|
-
|
|
125
|
-
cache_dict = {
|
|
126
|
-
resource_name: {
|
|
127
|
-
"versions": supported_versions,
|
|
128
|
-
"date-retrieved": datetime.now().strftime("%d-%m-%y %H:%M:%S"),
|
|
129
|
-
}
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
platform_helper_config.update(cache_dict)
|
|
133
|
-
|
|
134
|
-
with open(PLATFORM_HELPER_CACHE_FILE, "w") as file:
|
|
135
|
-
file.write("# [!] This file is autogenerated via the platform-helper. Do not edit.\n")
|
|
136
|
-
yaml.dump(platform_helper_config, file)
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
def cache_refresh_required(resource_name) -> bool:
|
|
140
|
-
"""
|
|
141
|
-
Checks if the platform-helper should reach out to AWS to 'refresh' its
|
|
142
|
-
cached values.
|
|
143
|
-
|
|
144
|
-
An API call is needed if any of the following conditions are met:
|
|
145
|
-
1. No cache file (.platform-helper-config.yml) exists.
|
|
146
|
-
2. The resource name (e.g. redis, opensearch) does not exist within the cache file.
|
|
147
|
-
3. The date-retrieved value of the cached data is > than a time interval. In this case 1 day.
|
|
148
|
-
"""
|
|
149
|
-
|
|
150
|
-
if not os.path.exists(PLATFORM_HELPER_CACHE_FILE):
|
|
151
|
-
return True
|
|
152
|
-
|
|
153
|
-
platform_helper_config = read_file_as_yaml(PLATFORM_HELPER_CACHE_FILE)
|
|
154
|
-
|
|
155
|
-
if platform_helper_config.get(resource_name):
|
|
156
|
-
return check_if_cached_datetime_is_greater_than_interval(
|
|
157
|
-
platform_helper_config[resource_name].get("date-retrieved"), 1
|
|
158
|
-
)
|
|
159
|
-
|
|
160
|
-
return True
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
def check_if_cached_datetime_is_greater_than_interval(date_retrieved, interval_in_days):
|
|
164
|
-
|
|
165
|
-
current_datetime = datetime.now()
|
|
166
|
-
cached_datetime = datetime.strptime(date_retrieved, "%d-%m-%y %H:%M:%S")
|
|
167
|
-
delta = current_datetime - cached_datetime
|
|
168
|
-
|
|
169
|
-
return delta.days > interval_in_days
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
def read_file_as_yaml(file_name):
|
|
173
|
-
|
|
174
|
-
return yaml.safe_load(Path(file_name).read_text())
|
|
@@ -5,6 +5,16 @@ import jinja2
|
|
|
5
5
|
from dbt_platform_helper.jinja2_tags import ExtraHeaderTag
|
|
6
6
|
from dbt_platform_helper.jinja2_tags import VersionTag
|
|
7
7
|
|
|
8
|
+
S3_CROSS_ACCOUNT_POLICY = "addons/svc/s3-cross-account-policy.yml"
|
|
9
|
+
|
|
10
|
+
ADDON_TEMPLATE_MAP = {
|
|
11
|
+
"s3": ["addons/svc/s3-policy.yml"],
|
|
12
|
+
"s3-policy": ["addons/svc/s3-policy.yml"],
|
|
13
|
+
"appconfig-ipfilter": ["addons/svc/appconfig-ipfilter.yml"],
|
|
14
|
+
"subscription-filter": ["addons/svc/subscription-filter.yml"],
|
|
15
|
+
"prometheus-policy": ["addons/svc/prometheus-policy.yml"],
|
|
16
|
+
}
|
|
17
|
+
|
|
8
18
|
|
|
9
19
|
def camel_case(s):
|
|
10
20
|
s = re.sub(r"(_|-)+", " ", s).title().replace(" ", "")
|