dbt-platform-helper 12.4.1__py3-none-any.whl → 12.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbt-platform-helper might be problematic. Click here for more details.

Files changed (28) hide show
  1. dbt_platform_helper/COMMANDS.md +0 -3
  2. dbt_platform_helper/commands/config.py +2 -2
  3. dbt_platform_helper/commands/copilot.py +47 -28
  4. dbt_platform_helper/commands/environment.py +16 -178
  5. dbt_platform_helper/commands/pipeline.py +4 -5
  6. dbt_platform_helper/constants.py +9 -0
  7. dbt_platform_helper/domain/config_validator.py +242 -0
  8. dbt_platform_helper/domain/copilot_environment.py +204 -0
  9. dbt_platform_helper/domain/database_copy.py +7 -5
  10. dbt_platform_helper/domain/terraform_environment.py +53 -0
  11. dbt_platform_helper/jinja2_tags.py +1 -1
  12. dbt_platform_helper/providers/cache.py +15 -21
  13. dbt_platform_helper/providers/cloudformation.py +0 -1
  14. dbt_platform_helper/providers/config.py +90 -0
  15. dbt_platform_helper/providers/opensearch.py +36 -0
  16. dbt_platform_helper/providers/platform_config_schema.py +589 -527
  17. dbt_platform_helper/providers/redis.py +34 -0
  18. dbt_platform_helper/providers/yaml_file.py +83 -0
  19. dbt_platform_helper/templates/addons/svc/s3-cross-account-policy.yml +67 -0
  20. dbt_platform_helper/utils/aws.py +1 -57
  21. dbt_platform_helper/utils/files.py +0 -36
  22. dbt_platform_helper/utils/template.py +10 -0
  23. dbt_platform_helper/utils/validation.py +5 -327
  24. {dbt_platform_helper-12.4.1.dist-info → dbt_platform_helper-12.5.0.dist-info}/METADATA +2 -2
  25. {dbt_platform_helper-12.4.1.dist-info → dbt_platform_helper-12.5.0.dist-info}/RECORD +28 -20
  26. {dbt_platform_helper-12.4.1.dist-info → dbt_platform_helper-12.5.0.dist-info}/WHEEL +1 -1
  27. {dbt_platform_helper-12.4.1.dist-info → dbt_platform_helper-12.5.0.dist-info}/LICENSE +0 -0
  28. {dbt_platform_helper-12.4.1.dist-info → dbt_platform_helper-12.5.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,34 @@
1
+ from dbt_platform_helper.providers.cache import CacheProvider
2
+
3
+
4
+ class RedisProvider:
5
+ def __init__(self, elasticache_client):
6
+ self.elasticache_client = elasticache_client
7
+
8
+ def get_supported_redis_versions(self):
9
+
10
+ cache_provider = self.__get_cache_provider()
11
+
12
+ if cache_provider.cache_refresh_required("redis"):
13
+
14
+ supported_versions_response = self.elasticache_client.describe_cache_engine_versions(
15
+ Engine="redis"
16
+ )
17
+
18
+ supported_versions = [
19
+ version["EngineVersion"]
20
+ for version in supported_versions_response["CacheEngineVersions"]
21
+ ]
22
+
23
+ cache_provider.update_cache("redis", supported_versions)
24
+
25
+ return supported_versions
26
+
27
+ else:
28
+ return cache_provider.read_supported_versions_from_cache("redis")
29
+
30
+ # TODO - cache provider instantiated here rather than via dependancy injection since it will likely only be used in the get_supported_redis_versions method.
31
+ # If another method is added which needs a CacheProvider, it should be injected into the constructor instead.
32
+ @staticmethod
33
+ def __get_cache_provider():
34
+ return CacheProvider()
@@ -0,0 +1,83 @@
1
+ from abc import ABC
2
+ from abc import abstractmethod
3
+ from pathlib import Path
4
+
5
+ import yaml
6
+ from yaml.parser import ParserError
7
+ from yamllint import linter
8
+ from yamllint.config import YamlLintConfig
9
+
10
+
11
+ class FileProviderException(Exception):
12
+ pass
13
+
14
+
15
+ class YamlFileProviderException(FileProviderException):
16
+ pass
17
+
18
+
19
+ class FileNotFoundException(YamlFileProviderException):
20
+ pass
21
+
22
+
23
+ class InvalidYamlException(YamlFileProviderException):
24
+ pass
25
+
26
+
27
+ class DuplicateKeysException(YamlFileProviderException):
28
+ pass
29
+
30
+
31
+ class FileProvider(ABC):
32
+ @abstractmethod
33
+ def load(path: str) -> dict:
34
+ raise NotImplementedError("Implement this in the subclass")
35
+
36
+
37
+ class YamlFileProvider(FileProvider):
38
+ def load(path: str) -> dict:
39
+ """
40
+ Raises:
41
+ FileNotFoundException: file is not there
42
+ InvalidYamlException: file contains invalid yaml
43
+ DuplicateKeysException: yaml contains duplicate keys
44
+ """
45
+ if not Path(path).exists():
46
+ # TODO this error message is domain specific and should not mention deployment directory project here
47
+ raise FileNotFoundException(
48
+ f"`{path}` is missing. Please check it exists and you are in the root directory of your deployment project."
49
+ )
50
+ try:
51
+ yaml_content = yaml.safe_load(Path(path).read_text())
52
+ except ParserError:
53
+ raise InvalidYamlException(f"{path} is not valid YAML.")
54
+
55
+ if not yaml_content:
56
+ return {}
57
+
58
+ YamlFileProvider.lint_yaml_for_duplicate_keys(path)
59
+
60
+ return yaml_content
61
+
62
+ def write(path: str, contents: dict, comment: str = ""):
63
+ with open(path, "w") as file:
64
+ file.write(comment)
65
+ yaml.dump(contents, file)
66
+
67
+ @staticmethod
68
+ def lint_yaml_for_duplicate_keys(path):
69
+ duplicate_keys = []
70
+ with open(path, "r") as yaml_file:
71
+ file_contents = yaml_file.read()
72
+ results = linter.run(
73
+ file_contents, YamlLintConfig(yaml.dump({"rules": {"key-duplicates": "enable"}}))
74
+ )
75
+ duplicate_keys = [
76
+ "\t"
77
+ + f"Line {result.line}: {result.message}".replace(
78
+ " in mapping (key-duplicates)", ""
79
+ )
80
+ for result in results
81
+ ]
82
+ if duplicate_keys:
83
+ raise DuplicateKeysException(",".join(duplicate_keys))
@@ -0,0 +1,67 @@
1
+ # {% extra_header %}
2
+ # {% version_info %}
3
+
4
+ Metadata:
5
+ cfn-lint:
6
+ config:
7
+ ignore_checks:
8
+ - W2001 # Parameter not used
9
+
10
+ Parameters:
11
+ # Copilot required Parameters...
12
+ App:
13
+ Type: String
14
+ Description: Your application's name.
15
+ Env:
16
+ Type: String
17
+ Description: The environment name your service, job, or workflow is being deployed to.
18
+ Name:
19
+ Type: String
20
+ Description: The name of the service, job, or workflow being deployed.
21
+
22
+ Resources: {% for resource in resources %}
23
+ {{ resource.app_prefix }}XEnvAccessPolicy:
24
+ Metadata:
25
+ 'aws:copilot:description': 'An IAM ManagedPolicy for your service to access the bucket'
26
+ Type: AWS::IAM::ManagedPolicy
27
+ Properties:
28
+ Description: Grants Read access to the S3 bucket.
29
+ PolicyDocument:
30
+ Version: 2012-10-17
31
+ Statement:
32
+ - Sid: 'KMSDecryptAndGenerate'
33
+ Effect: Allow
34
+ Action:
35
+ - kms:Decrypt
36
+ - kms:GenerateDataKey
37
+ Resource: 'arn:aws:kms:eu-west-2:{{ resource.bucket_account }}:key/*'
38
+ Condition:
39
+ StringEquals:
40
+ aws:PrincipalTag/copilot-environment:
41
+ - "{{ resource.access_env }}"
42
+ - Sid: 'S3ObjectActions'
43
+ Effect: Allow
44
+ Action:
45
+ {% if resource.read %}- s3:Get*{% endif %}
46
+ {% if resource.write %}- s3:Put*{% endif %}
47
+ Resource: 'arn:aws:s3:::{{ resource.bucket_name }}/*'
48
+ Condition:
49
+ StringEquals:
50
+ aws:PrincipalTag/copilot-environment:
51
+ - "{{ resource.access_env }}"
52
+ - Sid: 'S3ListAction'
53
+ Effect: Allow
54
+ Action:
55
+ - s3:ListBucket
56
+ Resource: 'arn:aws:s3:::{{ resource.bucket_name }}'
57
+ Condition:
58
+ StringEquals:
59
+ aws:PrincipalTag/copilot-environment:
60
+ - "{{ resource.access_env }}"
61
+ {% endfor %}
62
+ Outputs:{% for resource in resources %}
63
+ {{ resource.app_prefix }}XEnvAccessPolicy:
64
+ Description: "The IAM::ManagedPolicy to attach to the task role"
65
+ Value:
66
+ Ref: {{ resource.app_prefix }}XEnvAccessPolicy
67
+ {% endfor %}
@@ -13,12 +13,12 @@ import click
13
13
  import yaml
14
14
  from boto3 import Session
15
15
 
16
+ from dbt_platform_helper.constants import REFRESH_TOKEN_MESSAGE
16
17
  from dbt_platform_helper.platform_exception import PlatformException
17
18
  from dbt_platform_helper.providers.aws import AWSException
18
19
  from dbt_platform_helper.providers.aws import CopilotCodebaseNotFoundException
19
20
  from dbt_platform_helper.providers.aws import ImageNotFoundException
20
21
  from dbt_platform_helper.providers.aws import LogGroupNotFoundException
21
- from dbt_platform_helper.providers.cache import CacheProvider
22
22
  from dbt_platform_helper.providers.validation import ValidationException
23
23
 
24
24
  SSM_BASE_PATH = "/copilot/{app}/{env}/secrets/"
@@ -27,9 +27,6 @@ AWS_SESSION_CACHE = {}
27
27
 
28
28
 
29
29
  def get_aws_session_or_abort(aws_profile: str = None) -> boto3.session.Session:
30
- REFRESH_TOKEN_MESSAGE = (
31
- "To refresh this SSO session run `aws sso login` with the corresponding profile"
32
- )
33
30
  aws_profile = aws_profile or os.getenv("AWS_PROFILE")
34
31
  if aws_profile in AWS_SESSION_CACHE:
35
32
  return AWS_SESSION_CACHE[aws_profile]
@@ -358,59 +355,6 @@ def get_postgres_connection_data_updated_with_master_secret(session, parameter_n
358
355
  return parameter_data
359
356
 
360
357
 
361
- def get_supported_redis_versions():
362
-
363
- cache_provider = CacheProvider()
364
-
365
- if cache_provider.cache_refresh_required("redis"):
366
-
367
- session = get_aws_session_or_abort()
368
- elasticache_client = session.client("elasticache")
369
-
370
- supported_versions_response = elasticache_client.describe_cache_engine_versions(
371
- Engine="redis"
372
- )
373
-
374
- supported_versions = [
375
- version["EngineVersion"]
376
- for version in supported_versions_response["CacheEngineVersions"]
377
- ]
378
-
379
- cache_provider.update_cache("redis", supported_versions)
380
-
381
- return supported_versions
382
-
383
- else:
384
- return cache_provider.read_supported_versions_from_cache("redis")
385
-
386
-
387
- def get_supported_opensearch_versions():
388
-
389
- cache_provider = CacheProvider()
390
-
391
- if cache_provider.cache_refresh_required("opensearch"):
392
-
393
- session = get_aws_session_or_abort()
394
- opensearch_client = session.client("opensearch")
395
-
396
- response = opensearch_client.list_versions()
397
- all_versions = response["Versions"]
398
-
399
- opensearch_versions = [
400
- version for version in all_versions if not version.startswith("Elasticsearch_")
401
- ]
402
- supported_versions = [
403
- version.removeprefix("OpenSearch_") for version in opensearch_versions
404
- ]
405
-
406
- cache_provider.update_cache("opensearch", supported_versions)
407
-
408
- return supported_versions
409
-
410
- else:
411
- return cache_provider.read_supported_versions_from_cache("opensearch")
412
-
413
-
414
358
  def get_connection_string(
415
359
  session: Session,
416
360
  app: str,
@@ -1,4 +1,3 @@
1
- from copy import deepcopy
2
1
  from os import makedirs
3
2
  from pathlib import Path
4
3
 
@@ -67,38 +66,3 @@ def generate_override_files_from_template(base_path, overrides_path, output_dir,
67
66
 
68
67
  generate_files_for_dir("*")
69
68
  generate_files_for_dir("bin/*")
70
-
71
-
72
- def apply_environment_defaults(config):
73
- if "environments" not in config:
74
- return config
75
-
76
- enriched_config = deepcopy(config)
77
-
78
- environments = enriched_config["environments"]
79
- env_defaults = environments.get("*", {})
80
- without_defaults_entry = {
81
- name: data if data else {} for name, data in environments.items() if name != "*"
82
- }
83
-
84
- default_versions = config.get("default_versions", {})
85
-
86
- def combine_env_data(data):
87
- return {
88
- **env_defaults,
89
- **data,
90
- "versions": {
91
- **default_versions,
92
- **env_defaults.get("versions", {}),
93
- **data.get("versions", {}),
94
- },
95
- }
96
-
97
- defaulted_envs = {
98
- env_name: combine_env_data(env_data)
99
- for env_name, env_data in without_defaults_entry.items()
100
- }
101
-
102
- enriched_config["environments"] = defaulted_envs
103
-
104
- return enriched_config
@@ -5,6 +5,16 @@ import jinja2
5
5
  from dbt_platform_helper.jinja2_tags import ExtraHeaderTag
6
6
  from dbt_platform_helper.jinja2_tags import VersionTag
7
7
 
8
+ S3_CROSS_ACCOUNT_POLICY = "addons/svc/s3-cross-account-policy.yml"
9
+
10
+ ADDON_TEMPLATE_MAP = {
11
+ "s3": ["addons/svc/s3-policy.yml"],
12
+ "s3-policy": ["addons/svc/s3-policy.yml"],
13
+ "appconfig-ipfilter": ["addons/svc/appconfig-ipfilter.yml"],
14
+ "subscription-filter": ["addons/svc/subscription-filter.yml"],
15
+ "prometheus-policy": ["addons/svc/prometheus-policy.yml"],
16
+ }
17
+
8
18
 
9
19
  def camel_case(s):
10
20
  s = re.sub(r"(_|-)+", " ", s).title().replace(" ", "")
@@ -1,24 +1,7 @@
1
- import os
2
- import re
3
- from pathlib import Path
4
-
5
- import click
6
- import yaml
7
1
  from schema import SchemaError
8
- from yaml.parser import ParserError
9
- from yamllint import config
10
- from yamllint import linter
11
2
 
12
- from dbt_platform_helper.constants import CODEBASE_PIPELINES_KEY
13
- from dbt_platform_helper.constants import ENVIRONMENTS_KEY
14
- from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
15
- from dbt_platform_helper.constants import PLATFORM_HELPER_VERSION_FILE
16
- from dbt_platform_helper.providers.platform_config_schema import EXTENSION_SCHEMAS
17
- from dbt_platform_helper.providers.platform_config_schema import PLATFORM_CONFIG_SCHEMA
18
- from dbt_platform_helper.utils.aws import get_supported_opensearch_versions
19
- from dbt_platform_helper.utils.aws import get_supported_redis_versions
20
- from dbt_platform_helper.utils.files import apply_environment_defaults
21
- from dbt_platform_helper.utils.messages import abort_with_error
3
+ from dbt_platform_helper.domain.config_validator import ConfigValidator
4
+ from dbt_platform_helper.providers.platform_config_schema import PlatformConfigSchema
22
5
 
23
6
 
24
7
  def validate_addons(addons: dict):
@@ -33,7 +16,7 @@ def validate_addons(addons: dict):
33
16
  if not addon_type:
34
17
  errors[addon_name] = f"Missing addon type in addon '{addon_name}'"
35
18
  continue
36
- schema = EXTENSION_SCHEMAS.get(addon_type, None)
19
+ schema = PlatformConfigSchema.extension_schemas().get(addon_type, None)
37
20
  if not schema:
38
21
  errors[addon_name] = (
39
22
  f"Unsupported addon type '{addon_type}' in addon '{addon_name}'"
@@ -43,312 +26,7 @@ def validate_addons(addons: dict):
43
26
  except SchemaError as ex:
44
27
  errors[addon_name] = f"Error in {addon_name}: {ex.code}"
45
28
 
46
- _validate_extension_supported_versions(
47
- config={"extensions": addons},
48
- extension_type="redis",
49
- version_key="engine",
50
- get_supported_versions=get_supported_redis_versions,
51
- )
52
- _validate_extension_supported_versions(
53
- config={"extensions": addons},
54
- extension_type="opensearch",
55
- version_key="engine",
56
- get_supported_versions=get_supported_opensearch_versions,
57
- )
29
+ ConfigValidator().validate_supported_redis_versions({"extensions": addons})
30
+ ConfigValidator().validate_supported_opensearch_versions({"extensions": addons})
58
31
 
59
32
  return errors
60
-
61
-
62
- def float_between_with_halfstep(lower, upper):
63
- def is_between(value):
64
- is_number = isinstance(value, int) or isinstance(value, float)
65
- is_half_step = re.match(r"^\d+(\.[05])?$", str(value))
66
-
67
- if is_number and is_half_step and lower <= value <= upper:
68
- return True
69
- raise SchemaError(f"should be a number between {lower} and {upper} in increments of 0.5")
70
-
71
- return is_between
72
-
73
-
74
- def validate_platform_config(config):
75
- PLATFORM_CONFIG_SCHEMA.validate(config)
76
- enriched_config = apply_environment_defaults(config)
77
- _validate_environment_pipelines(enriched_config)
78
- _validate_environment_pipelines_triggers(enriched_config)
79
- _validate_codebase_pipelines(enriched_config)
80
- validate_database_copy_section(enriched_config)
81
-
82
- _validate_extension_supported_versions(
83
- config=config,
84
- extension_type="redis",
85
- version_key="engine",
86
- get_supported_versions=get_supported_redis_versions,
87
- )
88
- _validate_extension_supported_versions(
89
- config=config,
90
- extension_type="opensearch",
91
- version_key="engine",
92
- get_supported_versions=get_supported_opensearch_versions,
93
- )
94
-
95
-
96
- def _validate_extension_supported_versions(
97
- config, extension_type, version_key, get_supported_versions
98
- ):
99
- extensions = config.get("extensions", {})
100
- if not extensions:
101
- return
102
-
103
- extensions_for_type = [
104
- extension
105
- for extension in config.get("extensions", {}).values()
106
- if extension.get("type") == extension_type
107
- ]
108
-
109
- supported_extension_versions = get_supported_versions()
110
- extensions_with_invalid_version = []
111
-
112
- for extension in extensions_for_type:
113
-
114
- environments = extension.get("environments", {})
115
-
116
- if not isinstance(environments, dict):
117
- click.secho(
118
- f"Error: {extension_type} extension definition is invalid type, expected dictionary",
119
- fg="red",
120
- )
121
- continue
122
- for environment, env_config in environments.items():
123
-
124
- # An extension version doesn't need to be specified for all environments, provided one is specified under "*".
125
- # So check if the version is set before checking if it's supported
126
- extension_version = env_config.get(version_key)
127
- if extension_version and extension_version not in supported_extension_versions:
128
- extensions_with_invalid_version.append(
129
- {"environment": environment, "version": extension_version}
130
- )
131
-
132
- for version_failure in extensions_with_invalid_version:
133
- click.secho(
134
- f"{extension_type} version for environment {version_failure['environment']} is not in the list of supported {extension_type} versions: {supported_extension_versions}. Provided Version: {version_failure['version']}",
135
- fg="red",
136
- )
137
-
138
-
139
- def validate_database_copy_section(config):
140
- extensions = config.get("extensions", {})
141
- if not extensions:
142
- return
143
-
144
- postgres_extensions = {
145
- key: ext for key, ext in extensions.items() if ext.get("type", None) == "postgres"
146
- }
147
-
148
- if not postgres_extensions:
149
- return
150
-
151
- errors = []
152
-
153
- for extension_name, extension in postgres_extensions.items():
154
- database_copy_sections = extension.get("database_copy", [])
155
-
156
- if not database_copy_sections:
157
- return
158
-
159
- all_environments = [env for env in config.get("environments", {}).keys() if not env == "*"]
160
- all_envs_string = ", ".join(all_environments)
161
-
162
- for section in database_copy_sections:
163
- from_env = section["from"]
164
- to_env = section["to"]
165
-
166
- from_account = _get_env_deploy_account_info(config, from_env, "id")
167
- to_account = _get_env_deploy_account_info(config, to_env, "id")
168
-
169
- if from_env == to_env:
170
- errors.append(
171
- f"database_copy 'to' and 'from' cannot be the same environment in extension '{extension_name}'."
172
- )
173
-
174
- if "prod" in to_env:
175
- errors.append(
176
- f"Copying to a prod environment is not supported: database_copy 'to' cannot be '{to_env}' in extension '{extension_name}'."
177
- )
178
-
179
- if from_env not in all_environments:
180
- errors.append(
181
- f"database_copy 'from' parameter must be a valid environment ({all_envs_string}) but was '{from_env}' in extension '{extension_name}'."
182
- )
183
-
184
- if to_env not in all_environments:
185
- errors.append(
186
- f"database_copy 'to' parameter must be a valid environment ({all_envs_string}) but was '{to_env}' in extension '{extension_name}'."
187
- )
188
-
189
- if from_account != to_account:
190
- if "from_account" not in section:
191
- errors.append(
192
- f"Environments '{from_env}' and '{to_env}' are in different AWS accounts. The 'from_account' parameter must be present."
193
- )
194
- elif section["from_account"] != from_account:
195
- errors.append(
196
- f"Incorrect value for 'from_account' for environment '{from_env}'"
197
- )
198
-
199
- if "to_account" not in section:
200
- errors.append(
201
- f"Environments '{from_env}' and '{to_env}' are in different AWS accounts. The 'to_account' parameter must be present."
202
- )
203
- elif section["to_account"] != to_account:
204
- errors.append(f"Incorrect value for 'to_account' for environment '{to_env}'")
205
-
206
- if errors:
207
- abort_with_error("\n".join(errors))
208
-
209
-
210
- def _get_env_deploy_account_info(config, env, key):
211
- return (
212
- config.get("environments", {}).get(env, {}).get("accounts", {}).get("deploy", {}).get(key)
213
- )
214
-
215
-
216
- def _validate_environment_pipelines(config):
217
- bad_pipelines = {}
218
- for pipeline_name, pipeline in config.get("environment_pipelines", {}).items():
219
- bad_envs = []
220
- pipeline_account = pipeline.get("account", None)
221
- if pipeline_account:
222
- for env in pipeline.get("environments", {}).keys():
223
- env_account = _get_env_deploy_account_info(config, env, "name")
224
- if not env_account == pipeline_account:
225
- bad_envs.append(env)
226
- if bad_envs:
227
- bad_pipelines[pipeline_name] = {"account": pipeline_account, "bad_envs": bad_envs}
228
- if bad_pipelines:
229
- message = "The following pipelines are misconfigured:"
230
- for pipeline, detail in bad_pipelines.items():
231
- envs = detail["bad_envs"]
232
- acc = detail["account"]
233
- message += f" '{pipeline}' - these environments are not in the '{acc}' account: {', '.join(envs)}\n"
234
- abort_with_error(message)
235
-
236
-
237
- def _validate_codebase_pipelines(config):
238
- if CODEBASE_PIPELINES_KEY in config:
239
- for codebase in config[CODEBASE_PIPELINES_KEY]:
240
- codebase_environments = []
241
-
242
- for pipeline in codebase["pipelines"]:
243
- codebase_environments += [e["name"] for e in pipeline[ENVIRONMENTS_KEY]]
244
-
245
- unique_codebase_environments = sorted(list(set(codebase_environments)))
246
-
247
- if sorted(codebase_environments) != sorted(unique_codebase_environments):
248
- abort_with_error(
249
- f"The {PLATFORM_CONFIG_FILE} file is invalid, each environment can only be "
250
- "listed in a single pipeline per codebase"
251
- )
252
-
253
-
254
- def _validate_environment_pipelines_triggers(config):
255
- errors = []
256
- pipelines_with_triggers = {
257
- pipeline_name: pipeline
258
- for pipeline_name, pipeline in config.get("environment_pipelines", {}).items()
259
- if "pipeline_to_trigger" in pipeline
260
- }
261
-
262
- for pipeline_name, pipeline in pipelines_with_triggers.items():
263
- pipeline_to_trigger = pipeline["pipeline_to_trigger"]
264
- if pipeline_to_trigger not in config.get("environment_pipelines", {}):
265
- message = f" '{pipeline_name}' - '{pipeline_to_trigger}' is not a valid target pipeline to trigger"
266
-
267
- errors.append(message)
268
- continue
269
-
270
- if pipeline_to_trigger == pipeline_name:
271
- message = f" '{pipeline_name}' - pipelines cannot trigger themselves"
272
- errors.append(message)
273
-
274
- if errors:
275
- error_message = "The following pipelines are misconfigured: \n"
276
- abort_with_error(error_message + "\n ".join(errors))
277
-
278
-
279
- def lint_yaml_for_duplicate_keys(file_path):
280
- lint_yaml_config = """
281
- rules:
282
- key-duplicates: enable
283
- """
284
- yaml_config = config.YamlLintConfig(lint_yaml_config)
285
-
286
- with open(file_path, "r") as yaml_file:
287
- file_contents = yaml_file.read()
288
- results = linter.run(file_contents, yaml_config)
289
-
290
- parsed_results = [
291
- "\t" + f"Line {result.line}: {result.message}".replace(" in mapping (key-duplicates)", "")
292
- for result in results
293
- ]
294
-
295
- return parsed_results
296
-
297
-
298
- def load_and_validate_platform_config(path=PLATFORM_CONFIG_FILE, disable_file_check=False):
299
- if not disable_file_check:
300
- config_file_check(path)
301
- try:
302
- conf = yaml.safe_load(Path(path).read_text())
303
- duplicate_keys = lint_yaml_for_duplicate_keys(path)
304
- if duplicate_keys:
305
- abort_with_error(
306
- "Duplicate keys found in platform-config:"
307
- + os.linesep
308
- + os.linesep.join(duplicate_keys)
309
- )
310
- validate_platform_config(conf)
311
- return conf
312
- except ParserError:
313
- abort_with_error(f"{PLATFORM_CONFIG_FILE} is not valid YAML")
314
- except SchemaError as e:
315
- abort_with_error(f"Schema error in {PLATFORM_CONFIG_FILE}. {e}")
316
-
317
-
318
- def config_file_check(path=PLATFORM_CONFIG_FILE):
319
- platform_config_exists = Path(path).exists()
320
- errors = []
321
- warnings = []
322
-
323
- messages = {
324
- "storage.yml": {"instruction": " under the key 'extensions'", "type": errors},
325
- "extensions.yml": {"instruction": " under the key 'extensions'", "type": errors},
326
- "pipelines.yml": {
327
- "instruction": ", change the key 'codebases' to 'codebase_pipelines'",
328
- "type": errors,
329
- },
330
- PLATFORM_HELPER_VERSION_FILE: {
331
- "instruction": ", under the key `default_versions: platform-helper:`",
332
- "type": warnings,
333
- },
334
- }
335
-
336
- for file in messages.keys():
337
- if Path(file).exists():
338
- message = (
339
- f"`{file}` is no longer supported. Please move its contents into the "
340
- f"`{PLATFORM_CONFIG_FILE}` file{messages[file]['instruction']} and delete `{file}`."
341
- )
342
- messages[file]["type"].append(message)
343
-
344
- if not errors and not warnings and not platform_config_exists:
345
- errors.append(
346
- f"`{PLATFORM_CONFIG_FILE}` is missing. "
347
- "Please check it exists and you are in the root directory of your deployment project."
348
- )
349
-
350
- if warnings:
351
- click.secho("\n".join(warnings), bg="yellow", fg="black")
352
- if errors:
353
- click.secho("\n".join(errors), bg="red", fg="white")
354
- exit(1)
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.3
2
2
  Name: dbt-platform-helper
3
- Version: 12.4.1
3
+ Version: 12.5.0
4
4
  Summary: Set of tools to help transfer applications/services from GOV.UK PaaS to DBT PaaS augmenting AWS Copilot.
5
5
  License: MIT
6
6
  Author: Department for Business and Trade Platform Team