dbt-platform-helper 12.4.0__py3-none-any.whl → 12.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. dbt_platform_helper/COMMANDS.md +0 -3
  2. dbt_platform_helper/commands/config.py +2 -2
  3. dbt_platform_helper/commands/copilot.py +47 -28
  4. dbt_platform_helper/commands/environment.py +16 -178
  5. dbt_platform_helper/commands/pipeline.py +5 -34
  6. dbt_platform_helper/constants.py +12 -1
  7. dbt_platform_helper/domain/config_validator.py +242 -0
  8. dbt_platform_helper/domain/copilot_environment.py +204 -0
  9. dbt_platform_helper/domain/database_copy.py +7 -5
  10. dbt_platform_helper/domain/maintenance_page.py +1 -1
  11. dbt_platform_helper/domain/terraform_environment.py +53 -0
  12. dbt_platform_helper/jinja2_tags.py +1 -1
  13. dbt_platform_helper/providers/cache.py +77 -0
  14. dbt_platform_helper/providers/cloudformation.py +0 -1
  15. dbt_platform_helper/providers/config.py +90 -0
  16. dbt_platform_helper/providers/opensearch.py +36 -0
  17. dbt_platform_helper/providers/platform_config_schema.py +667 -0
  18. dbt_platform_helper/providers/redis.py +34 -0
  19. dbt_platform_helper/providers/yaml_file.py +83 -0
  20. dbt_platform_helper/templates/addons/svc/s3-cross-account-policy.yml +67 -0
  21. dbt_platform_helper/utils/aws.py +1 -59
  22. dbt_platform_helper/utils/files.py +0 -106
  23. dbt_platform_helper/utils/template.py +10 -0
  24. dbt_platform_helper/utils/validation.py +5 -889
  25. {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/METADATA +2 -2
  26. {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/RECORD +29 -22
  27. {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/WHEEL +1 -1
  28. dbt_platform_helper/templates/pipelines/environments/buildspec.yml +0 -80
  29. dbt_platform_helper/templates/pipelines/environments/manifest.yml +0 -48
  30. dbt_platform_helper/templates/pipelines/environments/overrides/cfn.patches.yml +0 -21
  31. {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/LICENSE +0 -0
  32. {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/entry_points.txt +0 -0
@@ -1,81 +1,7 @@
1
- import ipaddress
2
- import os
3
- import re
4
- from pathlib import Path
5
-
6
- import click
7
- import yaml
8
- from schema import Optional
9
- from schema import Or
10
- from schema import Regex
11
- from schema import Schema
12
1
  from schema import SchemaError
13
- from yaml.parser import ParserError
14
- from yamllint import config
15
- from yamllint import linter
16
-
17
- from dbt_platform_helper.constants import CODEBASE_PIPELINES_KEY
18
- from dbt_platform_helper.constants import ENVIRONMENTS_KEY
19
- from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
20
- from dbt_platform_helper.constants import PLATFORM_HELPER_VERSION_FILE
21
- from dbt_platform_helper.utils.aws import get_supported_opensearch_versions
22
- from dbt_platform_helper.utils.aws import get_supported_redis_versions
23
- from dbt_platform_helper.utils.files import apply_environment_defaults
24
- from dbt_platform_helper.utils.messages import abort_with_error
25
-
26
-
27
- def validate_string(regex_pattern: str):
28
- def validator(string):
29
- if not re.match(regex_pattern, string):
30
- raise SchemaError(
31
- f"String '{string}' does not match the required pattern '{regex_pattern}'. For more details on valid string patterns see: https://aws.github.io/copilot-cli/docs/manifest/lb-web-service/"
32
- )
33
- return string
34
-
35
- return validator
36
-
37
-
38
- S3_BUCKET_NAME_ERROR_TEMPLATE = "Bucket name '{}' is invalid:\n{}"
39
- AVAILABILITY_UNCERTAIN_TEMPLATE = (
40
- "Warning: Could not determine the availability of bucket name '{}'."
41
- )
42
- BUCKET_NAME_IN_USE_TEMPLATE = "Warning: Bucket name '{}' is already in use. Check your AWS accounts to see if this is a problem."
43
-
44
-
45
- def validate_s3_bucket_name(name: str):
46
- errors = []
47
- if not (2 < len(name) < 64):
48
- errors.append("Length must be between 3 and 63 characters inclusive.")
49
2
 
50
- if not re.match(r"^[a-z0-9].*[a-z0-9]$", name):
51
- errors.append("Names must start and end with 0-9 or a-z.")
52
-
53
- if not re.match(r"^[a-z0-9.-]*$", name):
54
- errors.append("Names can only contain the characters 0-9, a-z, '.' and '-'.")
55
-
56
- if ".." in name:
57
- errors.append("Names cannot contain two adjacent periods.")
58
-
59
- try:
60
- ipaddress.ip_address(name)
61
- errors.append("Names cannot be IP addresses.")
62
- except ValueError:
63
- pass
64
-
65
- for prefix in ("xn--", "sthree-"):
66
- if name.startswith(prefix):
67
- errors.append(f"Names cannot be prefixed '{prefix}'.")
68
-
69
- for suffix in ("-s3alias", "--ol-s3"):
70
- if name.endswith(suffix):
71
- errors.append(f"Names cannot be suffixed '{suffix}'.")
72
-
73
- if errors:
74
- raise SchemaError(
75
- S3_BUCKET_NAME_ERROR_TEMPLATE.format(name, "\n".join(f" {e}" for e in errors))
76
- )
77
-
78
- return True
3
+ from dbt_platform_helper.domain.config_validator import ConfigValidator
4
+ from dbt_platform_helper.providers.platform_config_schema import PlatformConfigSchema
79
5
 
80
6
 
81
7
  def validate_addons(addons: dict):
@@ -90,7 +16,7 @@ def validate_addons(addons: dict):
90
16
  if not addon_type:
91
17
  errors[addon_name] = f"Missing addon type in addon '{addon_name}'"
92
18
  continue
93
- schema = SCHEMA_MAP.get(addon_type, None)
19
+ schema = PlatformConfigSchema.extension_schemas().get(addon_type, None)
94
20
  if not schema:
95
21
  errors[addon_name] = (
96
22
  f"Unsupported addon type '{addon_type}' in addon '{addon_name}'"
@@ -100,817 +26,7 @@ def validate_addons(addons: dict):
100
26
  except SchemaError as ex:
101
27
  errors[addon_name] = f"Error in {addon_name}: {ex.code}"
102
28
 
103
- _validate_extension_supported_versions(
104
- config={"extensions": addons},
105
- extension_type="redis",
106
- version_key="engine",
107
- get_supported_versions=get_supported_redis_versions,
108
- )
109
- _validate_extension_supported_versions(
110
- config={"extensions": addons},
111
- extension_type="opensearch",
112
- version_key="engine",
113
- get_supported_versions=get_supported_opensearch_versions,
114
- )
29
+ ConfigValidator().validate_supported_redis_versions({"extensions": addons})
30
+ ConfigValidator().validate_supported_opensearch_versions({"extensions": addons})
115
31
 
116
32
  return errors
117
-
118
-
119
- def int_between(lower, upper):
120
- def is_between(value):
121
- if isinstance(value, int) and lower <= value <= upper:
122
- return True
123
- raise SchemaError(f"should be an integer between {lower} and {upper}")
124
-
125
- return is_between
126
-
127
-
128
- def float_between_with_halfstep(lower, upper):
129
- def is_between(value):
130
- is_number = isinstance(value, int) or isinstance(value, float)
131
- is_half_step = re.match(r"^\d+(\.[05])?$", str(value))
132
-
133
- if is_number and is_half_step and lower <= value <= upper:
134
- return True
135
- raise SchemaError(f"should be a number between {lower} and {upper} in increments of 0.5")
136
-
137
- return is_between
138
-
139
-
140
- ENV_NAME = Regex(
141
- r"^([a-z][a-zA-Z0-9]*|\*)$",
142
- error="Environment name {} is invalid: names must only contain lowercase alphanumeric characters, or be the '*' default environment",
143
- # For values the "error" parameter works and outputs the custom text. For keys the custom text doesn't get reported in the exception for some reason.
144
- )
145
-
146
- range_validator = validate_string(r"^\d+-\d+$")
147
- seconds_validator = validate_string(r"^\d+s$")
148
- branch_wildcard_validator = validate_string(r"^((?!\*).)*(\*)?$")
149
-
150
- NUMBER = Or(int, float)
151
- DELETION_POLICY = Or("Delete", "Retain")
152
- DB_DELETION_POLICY = Or("Delete", "Retain", "Snapshot")
153
- DELETION_PROTECTION = bool
154
-
155
- REDIS_PLANS = Or(
156
- "micro",
157
- "micro-ha",
158
- "tiny",
159
- "tiny-ha",
160
- "small",
161
- "small-ha",
162
- "medium",
163
- "medium-ha",
164
- "large",
165
- "large-ha",
166
- "x-large",
167
- "x-large-ha",
168
- )
169
-
170
- REDIS_ENGINE_VERSIONS = str
171
-
172
- REDIS_DEFINITION = {
173
- "type": "redis",
174
- Optional("environments"): {
175
- ENV_NAME: {
176
- Optional("plan"): REDIS_PLANS,
177
- Optional("engine"): REDIS_ENGINE_VERSIONS,
178
- Optional("replicas"): int_between(0, 5),
179
- Optional("deletion_policy"): DELETION_POLICY,
180
- Optional("apply_immediately"): bool,
181
- Optional("automatic_failover_enabled"): bool,
182
- Optional("instance"): str,
183
- Optional("multi_az_enabled"): bool,
184
- }
185
- },
186
- }
187
-
188
- POSTGRES_PLANS = Or(
189
- "tiny",
190
- "small",
191
- "small-ha",
192
- "small-high-io",
193
- "medium",
194
- "medium-ha",
195
- "medium-high-io",
196
- "large",
197
- "large-ha",
198
- "large-high-io",
199
- "x-large",
200
- "x-large-ha",
201
- "x-large-high-io",
202
- )
203
- POSTGRES_STORAGE_TYPES = Or("gp2", "gp3", "io1", "io2")
204
-
205
- RETENTION_POLICY = Or(
206
- None,
207
- {
208
- "mode": Or("GOVERNANCE", "COMPLIANCE"),
209
- Or("days", "years", only_one=True): int,
210
- },
211
- )
212
-
213
- DATABASE_COPY = {
214
- "from": ENV_NAME,
215
- "to": ENV_NAME,
216
- Optional("from_account"): str,
217
- Optional("to_account"): str,
218
- Optional("pipeline"): {Optional("schedule"): str},
219
- }
220
-
221
- POSTGRES_DEFINITION = {
222
- "type": "postgres",
223
- "version": NUMBER,
224
- Optional("deletion_policy"): DB_DELETION_POLICY,
225
- Optional("environments"): {
226
- ENV_NAME: {
227
- Optional("plan"): POSTGRES_PLANS,
228
- Optional("volume_size"): int_between(20, 10000),
229
- Optional("iops"): int_between(1000, 9950),
230
- Optional("snapshot_id"): str,
231
- Optional("deletion_policy"): DB_DELETION_POLICY,
232
- Optional("deletion_protection"): DELETION_PROTECTION,
233
- Optional("multi_az"): bool,
234
- Optional("storage_type"): POSTGRES_STORAGE_TYPES,
235
- Optional("backup_retention_days"): int_between(1, 35),
236
- }
237
- },
238
- Optional("database_copy"): [DATABASE_COPY],
239
- Optional("objects"): [
240
- {
241
- "key": str,
242
- Optional("body"): str,
243
- }
244
- ],
245
- }
246
-
247
- LIFECYCLE_RULE = {
248
- Optional("filter_prefix"): str,
249
- "expiration_days": int,
250
- "enabled": bool,
251
- }
252
-
253
-
254
- def kms_key_arn_regex(key):
255
- return Regex(
256
- r"^arn:aws:kms:.*:\d{12}:(key|alias).*",
257
- error=f"{key} must contain a valid ARN for a KMS key",
258
- )
259
-
260
-
261
- def s3_bucket_arn_regex(key):
262
- return Regex(
263
- r"^arn:aws:s3::.*",
264
- error=f"{key} must contain a valid ARN for an S3 bucket",
265
- )
266
-
267
-
268
- def iam_role_arn_regex(key):
269
- return Regex(
270
- r"^arn:aws:iam::\d{12}:role/.*",
271
- error=f"{key} must contain a valid ARN for an IAM role",
272
- )
273
-
274
-
275
- def dbt_email_address_regex(key):
276
- return Regex(
277
- r"^[\w.-]+@(businessandtrade.gov.uk|digital.trade.gov.uk)$",
278
- error=f"{key} must contain a valid DBT email address",
279
- )
280
-
281
-
282
- EXTERNAL_ROLE_ACCESS = {
283
- "role_arn": iam_role_arn_regex("role_arn"),
284
- "read": bool,
285
- "write": bool,
286
- "cyber_sign_off_by": dbt_email_address_regex("cyber_sign_off_by"),
287
- }
288
-
289
- CROSS_ENVIRONMENT_SERVICE_ACCESS = {
290
- "application": str,
291
- "environment": ENV_NAME,
292
- "account": str,
293
- "service": str,
294
- "read": bool,
295
- "write": bool,
296
- "cyber_sign_off_by": dbt_email_address_regex("cyber_sign_off_by"),
297
- }
298
-
299
- LOWER_ALPHANUMERIC = Regex(
300
- r"^([a-z][a-zA-Z0-9_-]*|\*)$",
301
- error="{} is invalid: must only contain lowercase alphanumeric characters separated by hyphen or underscore",
302
- )
303
-
304
- DATA_IMPORT = {
305
- Optional("source_kms_key_arn"): kms_key_arn_regex("source_kms_key_arn"),
306
- "source_bucket_arn": s3_bucket_arn_regex("source_bucket_arn"),
307
- "worker_role_arn": iam_role_arn_regex("worker_role_arn"),
308
- }
309
-
310
- DATA_MIGRATION = {
311
- "import": DATA_IMPORT,
312
- }
313
-
314
- S3_BASE = {
315
- Optional("readonly"): bool,
316
- Optional("serve_static_content"): bool,
317
- Optional("services"): Or("__all__", [str]),
318
- Optional("environments"): {
319
- ENV_NAME: {
320
- "bucket_name": validate_s3_bucket_name,
321
- Optional("deletion_policy"): DELETION_POLICY,
322
- Optional("retention_policy"): RETENTION_POLICY,
323
- Optional("versioning"): bool,
324
- Optional("lifecycle_rules"): [LIFECYCLE_RULE],
325
- Optional("data_migration"): DATA_MIGRATION,
326
- Optional("external_role_access"): {LOWER_ALPHANUMERIC: EXTERNAL_ROLE_ACCESS},
327
- Optional("cross_environment_service_access"): {
328
- LOWER_ALPHANUMERIC: CROSS_ENVIRONMENT_SERVICE_ACCESS
329
- },
330
- },
331
- },
332
- }
333
-
334
- S3_POLICY_DEFINITION = dict(S3_BASE)
335
- S3_POLICY_DEFINITION.update({"type": "s3-policy"})
336
-
337
- S3_DEFINITION = dict(S3_BASE)
338
- S3_DEFINITION.update(
339
- {
340
- "type": "s3",
341
- Optional("objects"): [{"key": str, Optional("body"): str, Optional("content_type"): str}],
342
- }
343
- )
344
-
345
- MONITORING_DEFINITION = {
346
- "type": "monitoring",
347
- Optional("environments"): {
348
- ENV_NAME: {
349
- Optional("enable_ops_center"): bool,
350
- }
351
- },
352
- }
353
-
354
- OPENSEARCH_PLANS = Or(
355
- "tiny", "small", "small-ha", "medium", "medium-ha", "large", "large-ha", "x-large", "x-large-ha"
356
- )
357
- OPENSEARCH_ENGINE_VERSIONS = str
358
- OPENSEARCH_MIN_VOLUME_SIZE = 10
359
- OPENSEARCH_MAX_VOLUME_SIZE = {
360
- "tiny": 100,
361
- "small": 200,
362
- "small-ha": 200,
363
- "medium": 512,
364
- "medium-ha": 512,
365
- "large": 1000,
366
- "large-ha": 1000,
367
- "x-large": 1500,
368
- "x-large-ha": 1500,
369
- }
370
-
371
- OPENSEARCH_DEFINITION = {
372
- "type": "opensearch",
373
- Optional("environments"): {
374
- ENV_NAME: {
375
- Optional("engine"): OPENSEARCH_ENGINE_VERSIONS,
376
- Optional("deletion_policy"): DELETION_POLICY,
377
- Optional("plan"): OPENSEARCH_PLANS,
378
- Optional("volume_size"): int,
379
- Optional("ebs_throughput"): int,
380
- Optional("ebs_volume_type"): str,
381
- Optional("instance"): str,
382
- Optional("instances"): int,
383
- Optional("master"): bool,
384
- Optional("es_app_log_retention_in_days"): int,
385
- Optional("index_slow_log_retention_in_days"): int,
386
- Optional("audit_log_retention_in_days"): int,
387
- Optional("search_slow_log_retention_in_days"): int,
388
- Optional("password_special_characters"): str,
389
- Optional("urlencode_password"): bool,
390
- }
391
- },
392
- }
393
-
394
- CACHE_POLICY_DEFINITION = {
395
- "min_ttl": int,
396
- "max_ttl": int,
397
- "default_ttl": int,
398
- "cookies_config": Or("none", "whitelist", "allExcept", "all"),
399
- "header": Or("none", "whitelist"),
400
- "query_string_behavior": Or("none", "whitelist", "allExcept", "all"),
401
- Optional("cookie_list"): list,
402
- Optional("headers_list"): list,
403
- Optional("cache_policy_query_strings"): list,
404
- }
405
-
406
- PATHS_DEFINITION = {
407
- Optional("default"): {
408
- "cache": str,
409
- "request": str,
410
- },
411
- Optional("additional"): list[
412
- {
413
- "path": str,
414
- "cache": str,
415
- "request": str,
416
- }
417
- ],
418
- }
419
-
420
- ALB_DEFINITION = {
421
- "type": "alb",
422
- Optional("environments"): {
423
- ENV_NAME: Or(
424
- {
425
- Optional("additional_address_list"): list,
426
- Optional("allowed_methods"): list,
427
- Optional("cached_methods"): list,
428
- Optional("cdn_compress"): bool,
429
- Optional("cdn_domains_list"): dict,
430
- Optional("cdn_geo_locations"): list,
431
- Optional("cdn_geo_restriction_type"): str,
432
- Optional("cdn_logging_bucket"): str,
433
- Optional("cdn_logging_bucket_prefix"): str,
434
- Optional("cdn_timeout_seconds"): int,
435
- Optional("default_waf"): str,
436
- Optional("domain_prefix"): str,
437
- Optional("enable_logging"): bool,
438
- Optional("env_root"): str,
439
- Optional("forwarded_values_forward"): str,
440
- Optional("forwarded_values_headers"): list,
441
- Optional("forwarded_values_query_string"): bool,
442
- Optional("origin_protocol_policy"): str,
443
- Optional("origin_ssl_protocols"): list,
444
- Optional("slack_alert_channel_alb_secret_rotation"): str,
445
- Optional("viewer_certificate_minimum_protocol_version"): str,
446
- Optional("viewer_certificate_ssl_support_method"): str,
447
- Optional("viewer_protocol_policy"): str,
448
- Optional("cache_policy"): dict({str: CACHE_POLICY_DEFINITION}),
449
- Optional("origin_request_policy"): dict({str: {}}),
450
- Optional("paths"): dict({str: PATHS_DEFINITION}),
451
- },
452
- None,
453
- )
454
- },
455
- }
456
-
457
- PROMETHEUS_POLICY_DEFINITION = {
458
- "type": "prometheus-policy",
459
- Optional("services"): Or("__all__", [str]),
460
- Optional("environments"): {
461
- ENV_NAME: {
462
- "role_arn": str,
463
- }
464
- },
465
- }
466
-
467
- _DEFAULT_VERSIONS_DEFINITION = {
468
- Optional("terraform-platform-modules"): str,
469
- Optional("platform-helper"): str,
470
- }
471
- _ENVIRONMENTS_VERSIONS_OVERRIDES = {
472
- Optional("terraform-platform-modules"): str,
473
- }
474
- _PIPELINE_VERSIONS_OVERRIDES = {
475
- Optional("platform-helper"): str,
476
- }
477
-
478
- _ENVIRONMENTS_PARAMS = {
479
- Optional("accounts"): {
480
- "deploy": {
481
- "name": str,
482
- "id": str,
483
- },
484
- "dns": {
485
- "name": str,
486
- "id": str,
487
- },
488
- },
489
- Optional("requires_approval"): bool,
490
- Optional("versions"): _ENVIRONMENTS_VERSIONS_OVERRIDES,
491
- Optional("vpc"): str,
492
- }
493
-
494
- ENVIRONMENTS_DEFINITION = {str: Or(None, _ENVIRONMENTS_PARAMS)}
495
-
496
- CODEBASE_PIPELINES_DEFINITION = [
497
- {
498
- "name": str,
499
- "repository": str,
500
- Optional("additional_ecr_repository"): str,
501
- Optional("deploy_repository_branch"): str,
502
- "services": list[str],
503
- "pipelines": [
504
- Or(
505
- {
506
- "name": str,
507
- "branch": branch_wildcard_validator,
508
- "environments": [
509
- {
510
- "name": str,
511
- Optional("requires_approval"): bool,
512
- }
513
- ],
514
- },
515
- {
516
- "name": str,
517
- "tag": bool,
518
- "environments": [
519
- {
520
- "name": str,
521
- Optional("requires_approval"): bool,
522
- }
523
- ],
524
- },
525
- ),
526
- ],
527
- },
528
- ]
529
-
530
- ENVIRONMENT_PIPELINES_DEFINITION = {
531
- str: {
532
- Optional("account"): str,
533
- Optional("branch", default="main"): str,
534
- Optional("pipeline_to_trigger"): str,
535
- Optional("versions"): _PIPELINE_VERSIONS_OVERRIDES,
536
- "slack_channel": str,
537
- "trigger_on_push": bool,
538
- "environments": {str: Or(None, _ENVIRONMENTS_PARAMS)},
539
- }
540
- }
541
-
542
- PLATFORM_CONFIG_SCHEMA = Schema(
543
- {
544
- # The following line is for the AWS Copilot version, will be removed under DBTP-1002
545
- "application": str,
546
- Optional("legacy_project", default=False): bool,
547
- Optional("default_versions"): _DEFAULT_VERSIONS_DEFINITION,
548
- Optional("accounts"): list[str],
549
- Optional("environments"): ENVIRONMENTS_DEFINITION,
550
- Optional("codebase_pipelines"): CODEBASE_PIPELINES_DEFINITION,
551
- Optional("extensions"): {
552
- str: Or(
553
- REDIS_DEFINITION,
554
- POSTGRES_DEFINITION,
555
- S3_DEFINITION,
556
- S3_POLICY_DEFINITION,
557
- MONITORING_DEFINITION,
558
- OPENSEARCH_DEFINITION,
559
- ALB_DEFINITION,
560
- PROMETHEUS_POLICY_DEFINITION,
561
- )
562
- },
563
- Optional("environment_pipelines"): ENVIRONMENT_PIPELINES_DEFINITION,
564
- }
565
- )
566
-
567
-
568
- def validate_platform_config(config):
569
- PLATFORM_CONFIG_SCHEMA.validate(config)
570
- enriched_config = apply_environment_defaults(config)
571
- _validate_environment_pipelines(enriched_config)
572
- _validate_environment_pipelines_triggers(enriched_config)
573
- _validate_codebase_pipelines(enriched_config)
574
- validate_database_copy_section(enriched_config)
575
-
576
- _validate_extension_supported_versions(
577
- config=config,
578
- extension_type="redis",
579
- version_key="engine",
580
- get_supported_versions=get_supported_redis_versions,
581
- )
582
- _validate_extension_supported_versions(
583
- config=config,
584
- extension_type="opensearch",
585
- version_key="engine",
586
- get_supported_versions=get_supported_opensearch_versions,
587
- )
588
-
589
-
590
- def _validate_extension_supported_versions(
591
- config, extension_type, version_key, get_supported_versions
592
- ):
593
- extensions = config.get("extensions", {})
594
- if not extensions:
595
- return
596
-
597
- extensions_for_type = [
598
- extension
599
- for extension in config.get("extensions", {}).values()
600
- if extension.get("type") == extension_type
601
- ]
602
-
603
- supported_extension_versions = get_supported_versions()
604
- extensions_with_invalid_version = []
605
-
606
- for extension in extensions_for_type:
607
-
608
- environments = extension.get("environments", {})
609
-
610
- if not isinstance(environments, dict):
611
- click.secho(
612
- f"Error: {extension_type} extension definition is invalid type, expected dictionary",
613
- fg="red",
614
- )
615
- continue
616
- for environment, env_config in environments.items():
617
-
618
- # An extension version doesn't need to be specified for all environments, provided one is specified under "*".
619
- # So check if the version is set before checking if it's supported
620
- extension_version = env_config.get(version_key)
621
- if extension_version and extension_version not in supported_extension_versions:
622
- extensions_with_invalid_version.append(
623
- {"environment": environment, "version": extension_version}
624
- )
625
-
626
- for version_failure in extensions_with_invalid_version:
627
- click.secho(
628
- f"{extension_type} version for environment {version_failure['environment']} is not in the list of supported {extension_type} versions: {supported_extension_versions}. Provided Version: {version_failure['version']}",
629
- fg="red",
630
- )
631
-
632
-
633
- def validate_database_copy_section(config):
634
- extensions = config.get("extensions", {})
635
- if not extensions:
636
- return
637
-
638
- postgres_extensions = {
639
- key: ext for key, ext in extensions.items() if ext.get("type", None) == "postgres"
640
- }
641
-
642
- if not postgres_extensions:
643
- return
644
-
645
- errors = []
646
-
647
- for extension_name, extension in postgres_extensions.items():
648
- database_copy_sections = extension.get("database_copy", [])
649
-
650
- if not database_copy_sections:
651
- return
652
-
653
- all_environments = [env for env in config.get("environments", {}).keys() if not env == "*"]
654
- all_envs_string = ", ".join(all_environments)
655
-
656
- for section in database_copy_sections:
657
- from_env = section["from"]
658
- to_env = section["to"]
659
-
660
- from_account = _get_env_deploy_account_info(config, from_env, "id")
661
- to_account = _get_env_deploy_account_info(config, to_env, "id")
662
-
663
- if from_env == to_env:
664
- errors.append(
665
- f"database_copy 'to' and 'from' cannot be the same environment in extension '{extension_name}'."
666
- )
667
-
668
- if "prod" in to_env:
669
- errors.append(
670
- f"Copying to a prod environment is not supported: database_copy 'to' cannot be '{to_env}' in extension '{extension_name}'."
671
- )
672
-
673
- if from_env not in all_environments:
674
- errors.append(
675
- f"database_copy 'from' parameter must be a valid environment ({all_envs_string}) but was '{from_env}' in extension '{extension_name}'."
676
- )
677
-
678
- if to_env not in all_environments:
679
- errors.append(
680
- f"database_copy 'to' parameter must be a valid environment ({all_envs_string}) but was '{to_env}' in extension '{extension_name}'."
681
- )
682
-
683
- if from_account != to_account:
684
- if "from_account" not in section:
685
- errors.append(
686
- f"Environments '{from_env}' and '{to_env}' are in different AWS accounts. The 'from_account' parameter must be present."
687
- )
688
- elif section["from_account"] != from_account:
689
- errors.append(
690
- f"Incorrect value for 'from_account' for environment '{from_env}'"
691
- )
692
-
693
- if "to_account" not in section:
694
- errors.append(
695
- f"Environments '{from_env}' and '{to_env}' are in different AWS accounts. The 'to_account' parameter must be present."
696
- )
697
- elif section["to_account"] != to_account:
698
- errors.append(f"Incorrect value for 'to_account' for environment '{to_env}'")
699
-
700
- if errors:
701
- abort_with_error("\n".join(errors))
702
-
703
-
704
- def _get_env_deploy_account_info(config, env, key):
705
- return (
706
- config.get("environments", {}).get(env, {}).get("accounts", {}).get("deploy", {}).get(key)
707
- )
708
-
709
-
710
- def _validate_environment_pipelines(config):
711
- bad_pipelines = {}
712
- for pipeline_name, pipeline in config.get("environment_pipelines", {}).items():
713
- bad_envs = []
714
- pipeline_account = pipeline.get("account", None)
715
- if pipeline_account:
716
- for env in pipeline.get("environments", {}).keys():
717
- env_account = _get_env_deploy_account_info(config, env, "name")
718
- if not env_account == pipeline_account:
719
- bad_envs.append(env)
720
- if bad_envs:
721
- bad_pipelines[pipeline_name] = {"account": pipeline_account, "bad_envs": bad_envs}
722
- if bad_pipelines:
723
- message = "The following pipelines are misconfigured:"
724
- for pipeline, detail in bad_pipelines.items():
725
- envs = detail["bad_envs"]
726
- acc = detail["account"]
727
- message += f" '{pipeline}' - these environments are not in the '{acc}' account: {', '.join(envs)}\n"
728
- abort_with_error(message)
729
-
730
-
731
- def _validate_codebase_pipelines(config):
732
- if CODEBASE_PIPELINES_KEY in config:
733
- for codebase in config[CODEBASE_PIPELINES_KEY]:
734
- codebase_environments = []
735
-
736
- for pipeline in codebase["pipelines"]:
737
- codebase_environments += [e["name"] for e in pipeline[ENVIRONMENTS_KEY]]
738
-
739
- unique_codebase_environments = sorted(list(set(codebase_environments)))
740
-
741
- if sorted(codebase_environments) != sorted(unique_codebase_environments):
742
- abort_with_error(
743
- f"The {PLATFORM_CONFIG_FILE} file is invalid, each environment can only be "
744
- "listed in a single pipeline per codebase"
745
- )
746
-
747
-
748
- def _validate_environment_pipelines_triggers(config):
749
- errors = []
750
- pipelines_with_triggers = {
751
- pipeline_name: pipeline
752
- for pipeline_name, pipeline in config.get("environment_pipelines", {}).items()
753
- if "pipeline_to_trigger" in pipeline
754
- }
755
-
756
- for pipeline_name, pipeline in pipelines_with_triggers.items():
757
- pipeline_to_trigger = pipeline["pipeline_to_trigger"]
758
- if pipeline_to_trigger not in config.get("environment_pipelines", {}):
759
- message = f" '{pipeline_name}' - '{pipeline_to_trigger}' is not a valid target pipeline to trigger"
760
-
761
- errors.append(message)
762
- continue
763
-
764
- if pipeline_to_trigger == pipeline_name:
765
- message = f" '{pipeline_name}' - pipelines cannot trigger themselves"
766
- errors.append(message)
767
-
768
- if errors:
769
- error_message = "The following pipelines are misconfigured: \n"
770
- abort_with_error(error_message + "\n ".join(errors))
771
-
772
-
773
- def lint_yaml_for_duplicate_keys(file_path):
774
- lint_yaml_config = """
775
- rules:
776
- key-duplicates: enable
777
- """
778
- yaml_config = config.YamlLintConfig(lint_yaml_config)
779
-
780
- with open(file_path, "r") as yaml_file:
781
- file_contents = yaml_file.read()
782
- results = linter.run(file_contents, yaml_config)
783
-
784
- parsed_results = [
785
- "\t" + f"Line {result.line}: {result.message}".replace(" in mapping (key-duplicates)", "")
786
- for result in results
787
- ]
788
-
789
- return parsed_results
790
-
791
-
792
- def load_and_validate_platform_config(path=PLATFORM_CONFIG_FILE, disable_file_check=False):
793
- if not disable_file_check:
794
- config_file_check(path)
795
- try:
796
- conf = yaml.safe_load(Path(path).read_text())
797
- duplicate_keys = lint_yaml_for_duplicate_keys(path)
798
- if duplicate_keys:
799
- abort_with_error(
800
- "Duplicate keys found in platform-config:"
801
- + os.linesep
802
- + os.linesep.join(duplicate_keys)
803
- )
804
- validate_platform_config(conf)
805
- return conf
806
- except ParserError:
807
- abort_with_error(f"{PLATFORM_CONFIG_FILE} is not valid YAML")
808
- except SchemaError as e:
809
- abort_with_error(f"Schema error in {PLATFORM_CONFIG_FILE}. {e}")
810
-
811
-
812
- def config_file_check(path=PLATFORM_CONFIG_FILE):
813
- platform_config_exists = Path(path).exists()
814
- errors = []
815
- warnings = []
816
-
817
- messages = {
818
- "storage.yml": {"instruction": " under the key 'extensions'", "type": errors},
819
- "extensions.yml": {"instruction": " under the key 'extensions'", "type": errors},
820
- "pipelines.yml": {
821
- "instruction": ", change the key 'codebases' to 'codebase_pipelines'",
822
- "type": errors,
823
- },
824
- PLATFORM_HELPER_VERSION_FILE: {
825
- "instruction": ", under the key `default_versions: platform-helper:`",
826
- "type": warnings,
827
- },
828
- }
829
-
830
- for file in messages.keys():
831
- if Path(file).exists():
832
- message = (
833
- f"`{file}` is no longer supported. Please move its contents into the "
834
- f"`{PLATFORM_CONFIG_FILE}` file{messages[file]['instruction']} and delete `{file}`."
835
- )
836
- messages[file]["type"].append(message)
837
-
838
- if not errors and not warnings and not platform_config_exists:
839
- errors.append(
840
- f"`{PLATFORM_CONFIG_FILE}` is missing. "
841
- "Please check it exists and you are in the root directory of your deployment project."
842
- )
843
-
844
- if warnings:
845
- click.secho("\n".join(warnings), bg="yellow", fg="black")
846
- if errors:
847
- click.secho("\n".join(errors), bg="red", fg="white")
848
- exit(1)
849
-
850
-
851
- S3_SCHEMA = Schema(S3_DEFINITION)
852
- S3_POLICY_SCHEMA = Schema(S3_POLICY_DEFINITION)
853
- POSTGRES_SCHEMA = Schema(POSTGRES_DEFINITION)
854
- REDIS_SCHEMA = Schema(REDIS_DEFINITION)
855
-
856
-
857
- class ConditionalSchema(Schema):
858
- def validate(self, data, _is_conditional_schema=True):
859
- data = super(ConditionalSchema, self).validate(data, _is_conditional_schema=False)
860
- if _is_conditional_schema:
861
- default_plan = None
862
- default_volume_size = None
863
-
864
- default_environment_config = data["environments"].get(
865
- "*", data["environments"].get("default", None)
866
- )
867
- if default_environment_config:
868
- default_plan = default_environment_config.get("plan", None)
869
- default_volume_size = default_environment_config.get("volume_size", None)
870
-
871
- for env in data["environments"]:
872
- volume_size = data["environments"][env].get("volume_size", default_volume_size)
873
- plan = data["environments"][env].get("plan", default_plan)
874
-
875
- if volume_size:
876
- if not plan:
877
- raise SchemaError(f"Missing key: 'plan'")
878
-
879
- if volume_size < OPENSEARCH_MIN_VOLUME_SIZE:
880
- raise SchemaError(
881
- f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer greater than {OPENSEARCH_MIN_VOLUME_SIZE}"
882
- )
883
-
884
- for key in OPENSEARCH_MAX_VOLUME_SIZE:
885
- if plan == key and not volume_size <= OPENSEARCH_MAX_VOLUME_SIZE[key]:
886
- raise SchemaError(
887
- f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer between {OPENSEARCH_MIN_VOLUME_SIZE} and {OPENSEARCH_MAX_VOLUME_SIZE[key]} for plan {plan}"
888
- )
889
-
890
- return data
891
-
892
-
893
- OPENSEARCH_SCHEMA = ConditionalSchema(OPENSEARCH_DEFINITION)
894
- MONITORING_SCHEMA = Schema(MONITORING_DEFINITION)
895
- ALB_SCHEMA = Schema(ALB_DEFINITION)
896
- PROMETHEUS_POLICY_SCHEMA = Schema(PROMETHEUS_POLICY_DEFINITION)
897
-
898
-
899
- def no_param_schema(schema_type):
900
- return Schema({"type": schema_type, Optional("services"): Or("__all__", [str])})
901
-
902
-
903
- SCHEMA_MAP = {
904
- "s3": S3_SCHEMA,
905
- "s3-policy": S3_POLICY_SCHEMA,
906
- "postgres": POSTGRES_SCHEMA,
907
- "redis": REDIS_SCHEMA,
908
- "opensearch": OPENSEARCH_SCHEMA,
909
- "monitoring": MONITORING_SCHEMA,
910
- "appconfig-ipfilter": no_param_schema("appconfig-ipfilter"),
911
- "subscription-filter": no_param_schema("subscription-filter"),
912
- "vpc": no_param_schema("vpc"),
913
- "xray": no_param_schema("xray"),
914
- "alb": ALB_SCHEMA,
915
- "prometheus-policy": PROMETHEUS_POLICY_SCHEMA,
916
- }