dbt-platform-helper 12.4.0__py3-none-any.whl → 12.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbt-platform-helper might be problematic. Click here for more details.

Files changed (32) hide show
  1. dbt_platform_helper/COMMANDS.md +0 -3
  2. dbt_platform_helper/commands/config.py +2 -2
  3. dbt_platform_helper/commands/copilot.py +47 -28
  4. dbt_platform_helper/commands/environment.py +16 -178
  5. dbt_platform_helper/commands/pipeline.py +5 -34
  6. dbt_platform_helper/constants.py +12 -1
  7. dbt_platform_helper/domain/config_validator.py +242 -0
  8. dbt_platform_helper/domain/copilot_environment.py +204 -0
  9. dbt_platform_helper/domain/database_copy.py +7 -5
  10. dbt_platform_helper/domain/maintenance_page.py +1 -1
  11. dbt_platform_helper/domain/terraform_environment.py +53 -0
  12. dbt_platform_helper/jinja2_tags.py +1 -1
  13. dbt_platform_helper/providers/cache.py +77 -0
  14. dbt_platform_helper/providers/cloudformation.py +0 -1
  15. dbt_platform_helper/providers/config.py +90 -0
  16. dbt_platform_helper/providers/opensearch.py +36 -0
  17. dbt_platform_helper/providers/platform_config_schema.py +667 -0
  18. dbt_platform_helper/providers/redis.py +34 -0
  19. dbt_platform_helper/providers/yaml_file.py +83 -0
  20. dbt_platform_helper/templates/addons/svc/s3-cross-account-policy.yml +67 -0
  21. dbt_platform_helper/utils/aws.py +1 -59
  22. dbt_platform_helper/utils/files.py +0 -106
  23. dbt_platform_helper/utils/template.py +10 -0
  24. dbt_platform_helper/utils/validation.py +5 -889
  25. {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/METADATA +2 -2
  26. {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/RECORD +29 -22
  27. {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/WHEEL +1 -1
  28. dbt_platform_helper/templates/pipelines/environments/buildspec.yml +0 -80
  29. dbt_platform_helper/templates/pipelines/environments/manifest.yml +0 -48
  30. dbt_platform_helper/templates/pipelines/environments/overrides/cfn.patches.yml +0 -21
  31. {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/LICENSE +0 -0
  32. {dbt_platform_helper-12.4.0.dist-info → dbt_platform_helper-12.5.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,667 @@
1
+ import ipaddress
2
+ import re
3
+ from typing import Callable
4
+
5
+ from schema import Optional
6
+ from schema import Or
7
+ from schema import Regex
8
+ from schema import Schema
9
+ from schema import SchemaError
10
+
11
+
12
+ class PlatformConfigSchema:
13
+ @staticmethod
14
+ def schema() -> Schema:
15
+ return Schema(
16
+ {
17
+ # The following line is for the AWS Copilot version, will be removed under DBTP-1002
18
+ "application": str,
19
+ Optional("legacy_project", default=False): bool,
20
+ Optional("default_versions"): PlatformConfigSchema.__default_versions_schema(),
21
+ Optional("accounts"): list[str],
22
+ Optional("environments"): PlatformConfigSchema.__environments_schema(),
23
+ Optional("codebase_pipelines"): PlatformConfigSchema.__codebase_pipelines_schema(),
24
+ Optional(
25
+ "environment_pipelines"
26
+ ): PlatformConfigSchema.__environment_pipelines_schema(),
27
+ Optional("extensions"): {
28
+ str: Or(
29
+ PlatformConfigSchema.__alb_schema(),
30
+ PlatformConfigSchema.__monitoring_schema(),
31
+ PlatformConfigSchema.__opensearch_schema(),
32
+ PlatformConfigSchema.__postgres_schema(),
33
+ PlatformConfigSchema.__prometheus_policy_schema(),
34
+ PlatformConfigSchema.__redis_schema(),
35
+ PlatformConfigSchema.__s3_bucket_schema(),
36
+ PlatformConfigSchema.__s3_bucket_policy_schema(),
37
+ )
38
+ },
39
+ }
40
+ )
41
+
42
+ @staticmethod
43
+ def extension_schemas() -> dict:
44
+ return {
45
+ "alb": Schema(PlatformConfigSchema.__alb_schema()),
46
+ "appconfig-ipfilter": PlatformConfigSchema.__no_configuration_required_schema(
47
+ "appconfig-ipfilter"
48
+ ),
49
+ "opensearch": ConditionalOpensSearchSchema(PlatformConfigSchema.__opensearch_schema()),
50
+ "postgres": Schema(PlatformConfigSchema.__postgres_schema()),
51
+ "prometheus-policy": Schema(PlatformConfigSchema.__prometheus_policy_schema()),
52
+ "redis": Schema(PlatformConfigSchema.__redis_schema()),
53
+ "s3": Schema(PlatformConfigSchema.__s3_bucket_schema()),
54
+ "s3-policy": Schema(PlatformConfigSchema.__s3_bucket_policy_schema()),
55
+ "subscription-filter": PlatformConfigSchema.__no_configuration_required_schema(
56
+ "subscription-filter"
57
+ ),
58
+ # Todo: The next three are no longer relevant. Remove them.
59
+ "monitoring": Schema(PlatformConfigSchema.__monitoring_schema()),
60
+ "vpc": PlatformConfigSchema.__no_configuration_required_schema("vpc"),
61
+ "xray": PlatformConfigSchema.__no_configuration_required_schema("xray"),
62
+ }
63
+
64
+ @staticmethod
65
+ def __alb_schema() -> dict:
66
+ _valid_alb_cache_policy = {
67
+ "min_ttl": int,
68
+ "max_ttl": int,
69
+ "default_ttl": int,
70
+ "cookies_config": Or("none", "whitelist", "allExcept", "all"),
71
+ "header": Or("none", "whitelist"),
72
+ "query_string_behavior": Or("none", "whitelist", "allExcept", "all"),
73
+ Optional("cookie_list"): list,
74
+ Optional("headers_list"): list,
75
+ Optional("cache_policy_query_strings"): list,
76
+ }
77
+
78
+ _valid_alb_paths_definition = {
79
+ Optional("default"): {
80
+ "cache": str,
81
+ "request": str,
82
+ },
83
+ Optional("additional"): list[
84
+ {
85
+ "path": str,
86
+ "cache": str,
87
+ "request": str,
88
+ }
89
+ ],
90
+ }
91
+
92
+ return {
93
+ "type": "alb",
94
+ Optional("environments"): {
95
+ PlatformConfigSchema.__valid_environment_name(): Or(
96
+ {
97
+ Optional("additional_address_list"): list,
98
+ Optional("allowed_methods"): list,
99
+ Optional("cached_methods"): list,
100
+ Optional("cdn_compress"): bool,
101
+ Optional("cdn_domains_list"): dict,
102
+ Optional("cdn_geo_locations"): list,
103
+ Optional("cdn_geo_restriction_type"): str,
104
+ Optional("cdn_logging_bucket"): str,
105
+ Optional("cdn_logging_bucket_prefix"): str,
106
+ Optional("cdn_timeout_seconds"): int,
107
+ Optional("default_waf"): str,
108
+ Optional("domain_prefix"): str,
109
+ Optional("enable_logging"): bool,
110
+ Optional("env_root"): str,
111
+ Optional("forwarded_values_forward"): str,
112
+ Optional("forwarded_values_headers"): list,
113
+ Optional("forwarded_values_query_string"): bool,
114
+ Optional("origin_protocol_policy"): str,
115
+ Optional("origin_ssl_protocols"): list,
116
+ Optional("slack_alert_channel_alb_secret_rotation"): str,
117
+ Optional("viewer_certificate_minimum_protocol_version"): str,
118
+ Optional("viewer_certificate_ssl_support_method"): str,
119
+ Optional("viewer_protocol_policy"): str,
120
+ Optional("cache_policy"): dict({str: _valid_alb_cache_policy}),
121
+ Optional("origin_request_policy"): dict({str: {}}),
122
+ Optional("paths"): dict({str: _valid_alb_paths_definition}),
123
+ },
124
+ None,
125
+ )
126
+ },
127
+ }
128
+
129
+ @staticmethod
130
+ def __codebase_pipelines_schema() -> list[dict]:
131
+ return [
132
+ {
133
+ "name": str,
134
+ "repository": str,
135
+ Optional("additional_ecr_repository"): str,
136
+ Optional("deploy_repository_branch"): str,
137
+ "services": list[str],
138
+ "pipelines": [
139
+ Or(
140
+ {
141
+ "name": str,
142
+ "branch": PlatformConfigSchema.__valid_branch_name(),
143
+ "environments": [
144
+ {
145
+ "name": str,
146
+ Optional("requires_approval"): bool,
147
+ }
148
+ ],
149
+ },
150
+ {
151
+ "name": str,
152
+ "tag": bool,
153
+ "environments": [
154
+ {
155
+ "name": str,
156
+ Optional("requires_approval"): bool,
157
+ }
158
+ ],
159
+ },
160
+ ),
161
+ ],
162
+ },
163
+ ]
164
+
165
+ @staticmethod
166
+ def __default_versions_schema() -> dict:
167
+ return {
168
+ Optional("terraform-platform-modules"): str,
169
+ Optional("platform-helper"): str,
170
+ }
171
+
172
+ @staticmethod
173
+ def __environments_schema() -> dict:
174
+ _valid_environment_specific_version_overrides = {
175
+ Optional("terraform-platform-modules"): str,
176
+ }
177
+
178
+ return {
179
+ str: Or(
180
+ None,
181
+ {
182
+ Optional("accounts"): {
183
+ "deploy": {
184
+ "name": str,
185
+ "id": str,
186
+ },
187
+ "dns": {
188
+ "name": str,
189
+ "id": str,
190
+ },
191
+ },
192
+ # Todo: requires_approval is no longer relevant since we don't have AWS Copilot manage environment pipelines
193
+ Optional("requires_approval"): bool,
194
+ Optional("versions"): _valid_environment_specific_version_overrides,
195
+ Optional("vpc"): str,
196
+ },
197
+ )
198
+ }
199
+
200
+ @staticmethod
201
+ def __environment_pipelines_schema() -> dict:
202
+ _valid_environment_pipeline_specific_version_overrides = {
203
+ Optional("platform-helper"): str,
204
+ }
205
+
206
+ return {
207
+ str: {
208
+ Optional("account"): str,
209
+ Optional("branch", default="main"): PlatformConfigSchema.__valid_branch_name(),
210
+ Optional("pipeline_to_trigger"): str,
211
+ Optional("versions"): _valid_environment_pipeline_specific_version_overrides,
212
+ "slack_channel": str,
213
+ "trigger_on_push": bool,
214
+ "environments": {
215
+ str: Or(
216
+ None,
217
+ {
218
+ Optional("accounts"): {
219
+ "deploy": {
220
+ "name": str,
221
+ "id": str,
222
+ },
223
+ "dns": {
224
+ "name": str,
225
+ "id": str,
226
+ },
227
+ },
228
+ Optional("requires_approval"): bool,
229
+ Optional(
230
+ "versions"
231
+ ): _valid_environment_pipeline_specific_version_overrides,
232
+ Optional("vpc"): str,
233
+ },
234
+ )
235
+ },
236
+ }
237
+ }
238
+
239
+ @staticmethod
240
+ def __monitoring_schema() -> dict:
241
+ return {
242
+ "type": "monitoring",
243
+ Optional("environments"): {
244
+ PlatformConfigSchema.__valid_environment_name(): {
245
+ Optional("enable_ops_center"): bool,
246
+ }
247
+ },
248
+ }
249
+
250
+ @staticmethod
251
+ def __opensearch_schema() -> dict:
252
+ # Todo: Move to OpenSearch provider?
253
+ _valid_opensearch_plans = Or(
254
+ "tiny",
255
+ "small",
256
+ "small-ha",
257
+ "medium",
258
+ "medium-ha",
259
+ "large",
260
+ "large-ha",
261
+ "x-large",
262
+ "x-large-ha",
263
+ )
264
+
265
+ return {
266
+ "type": "opensearch",
267
+ Optional("environments"): {
268
+ PlatformConfigSchema.__valid_environment_name(): {
269
+ Optional("engine"): str,
270
+ Optional("deletion_policy"): PlatformConfigSchema.__valid_deletion_policy(),
271
+ Optional("plan"): _valid_opensearch_plans,
272
+ Optional("volume_size"): int,
273
+ Optional("ebs_throughput"): int,
274
+ Optional("ebs_volume_type"): str,
275
+ Optional("instance"): str,
276
+ Optional("instances"): int,
277
+ Optional("master"): bool,
278
+ Optional("es_app_log_retention_in_days"): int,
279
+ Optional("index_slow_log_retention_in_days"): int,
280
+ Optional("audit_log_retention_in_days"): int,
281
+ Optional("search_slow_log_retention_in_days"): int,
282
+ Optional("password_special_characters"): str,
283
+ Optional("urlencode_password"): bool,
284
+ }
285
+ },
286
+ }
287
+
288
+ @staticmethod
289
+ def __postgres_schema() -> dict:
290
+ # Todo: Move to Postgres provider?
291
+ _valid_postgres_plans = Or(
292
+ "tiny",
293
+ "small",
294
+ "small-ha",
295
+ "small-high-io",
296
+ "medium",
297
+ "medium-ha",
298
+ "medium-high-io",
299
+ "large",
300
+ "large-ha",
301
+ "large-high-io",
302
+ "x-large",
303
+ "x-large-ha",
304
+ "x-large-high-io",
305
+ "2x-large",
306
+ "2x-large-ha",
307
+ "2x-large-high-io",
308
+ "4x-large",
309
+ "4x-large-ha",
310
+ "4x-large-high-io",
311
+ )
312
+
313
+ # Todo: Move to Postgres provider?
314
+ _valid_postgres_storage_types = Or("gp2", "gp3", "io1", "io2")
315
+
316
+ _valid_postgres_database_copy = {
317
+ "from": PlatformConfigSchema.__valid_environment_name(),
318
+ "to": PlatformConfigSchema.__valid_environment_name(),
319
+ Optional("from_account"): str,
320
+ Optional("to_account"): str,
321
+ Optional("pipeline"): {Optional("schedule"): str},
322
+ }
323
+
324
+ return {
325
+ "type": "postgres",
326
+ "version": (Or(int, float)),
327
+ Optional("deletion_policy"): PlatformConfigSchema.__valid_postgres_deletion_policy(),
328
+ Optional("environments"): {
329
+ PlatformConfigSchema.__valid_environment_name(): {
330
+ Optional("plan"): _valid_postgres_plans,
331
+ Optional("volume_size"): PlatformConfigSchema.is_integer_between(20, 10000),
332
+ Optional("iops"): PlatformConfigSchema.is_integer_between(1000, 9950),
333
+ Optional("snapshot_id"): str,
334
+ Optional(
335
+ "deletion_policy"
336
+ ): PlatformConfigSchema.__valid_postgres_deletion_policy(),
337
+ Optional("deletion_protection"): bool,
338
+ Optional("multi_az"): bool,
339
+ Optional("storage_type"): _valid_postgres_storage_types,
340
+ Optional("backup_retention_days"): PlatformConfigSchema.is_integer_between(
341
+ 1, 35
342
+ ),
343
+ }
344
+ },
345
+ Optional("database_copy"): [_valid_postgres_database_copy],
346
+ Optional("objects"): [
347
+ {
348
+ "key": str,
349
+ Optional("body"): str,
350
+ }
351
+ ],
352
+ }
353
+
354
+ @staticmethod
355
+ def __prometheus_policy_schema() -> dict:
356
+ return {
357
+ "type": "prometheus-policy",
358
+ Optional("services"): Or("__all__", [str]),
359
+ Optional("environments"): {
360
+ PlatformConfigSchema.__valid_environment_name(): {
361
+ "role_arn": str,
362
+ }
363
+ },
364
+ }
365
+
366
+ @staticmethod
367
+ def __redis_schema() -> dict:
368
+ # Todo move to Redis provider?
369
+ _valid_redis_plans = Or(
370
+ "micro",
371
+ "micro-ha",
372
+ "tiny",
373
+ "tiny-ha",
374
+ "small",
375
+ "small-ha",
376
+ "medium",
377
+ "medium-ha",
378
+ "large",
379
+ "large-ha",
380
+ "x-large",
381
+ "x-large-ha",
382
+ )
383
+
384
+ return {
385
+ "type": "redis",
386
+ Optional("environments"): {
387
+ PlatformConfigSchema.__valid_environment_name(): {
388
+ Optional("plan"): _valid_redis_plans,
389
+ Optional("engine"): str,
390
+ Optional("replicas"): PlatformConfigSchema.is_integer_between(0, 5),
391
+ Optional("deletion_policy"): PlatformConfigSchema.__valid_deletion_policy(),
392
+ Optional("apply_immediately"): bool,
393
+ Optional("automatic_failover_enabled"): bool,
394
+ Optional("instance"): str,
395
+ Optional("multi_az_enabled"): bool,
396
+ }
397
+ },
398
+ }
399
+
400
+ @staticmethod
401
+ def valid_s3_bucket_name(name: str):
402
+ # Todo: This is a public method becasue that's what the test expect. Perhaps it belongs in an S3 provider?
403
+ errors = []
404
+ if not (2 < len(name) < 64):
405
+ errors.append("Length must be between 3 and 63 characters inclusive.")
406
+
407
+ if not re.match(r"^[a-z0-9].*[a-z0-9]$", name):
408
+ errors.append("Names must start and end with 0-9 or a-z.")
409
+
410
+ if not re.match(r"^[a-z0-9.-]*$", name):
411
+ errors.append("Names can only contain the characters 0-9, a-z, '.' and '-'.")
412
+
413
+ if ".." in name:
414
+ errors.append("Names cannot contain two adjacent periods.")
415
+
416
+ try:
417
+ ipaddress.ip_address(name)
418
+ errors.append("Names cannot be IP addresses.")
419
+ except ValueError:
420
+ pass
421
+
422
+ for prefix in ("xn--", "sthree-"):
423
+ if name.startswith(prefix):
424
+ errors.append(f"Names cannot be prefixed '{prefix}'.")
425
+
426
+ for suffix in ("-s3alias", "--ol-s3"):
427
+ if name.endswith(suffix):
428
+ errors.append(f"Names cannot be suffixed '{suffix}'.")
429
+
430
+ if errors:
431
+ # Todo: Raise suitable PlatformException?
432
+ raise SchemaError(
433
+ "Bucket name '{}' is invalid:\n{}".format(name, "\n".join(f" {e}" for e in errors))
434
+ )
435
+
436
+ return True
437
+
438
+ @staticmethod
439
+ def __valid_s3_base_definition() -> dict:
440
+ def _valid_s3_bucket_arn(key):
441
+ return Regex(
442
+ r"^arn:aws:s3::.*",
443
+ error=f"{key} must contain a valid ARN for an S3 bucket",
444
+ )
445
+
446
+ _valid_s3_data_migration = {
447
+ "import": {
448
+ Optional("source_kms_key_arn"): PlatformConfigSchema.__valid_kms_key_arn(
449
+ "source_kms_key_arn"
450
+ ),
451
+ "source_bucket_arn": _valid_s3_bucket_arn("source_bucket_arn"),
452
+ "worker_role_arn": PlatformConfigSchema.__valid_iam_role_arn("worker_role_arn"),
453
+ },
454
+ }
455
+
456
+ _valid_s3_bucket_retention_policy = Or(
457
+ None,
458
+ {
459
+ "mode": Or("GOVERNANCE", "COMPLIANCE"),
460
+ Or("days", "years", only_one=True): int,
461
+ },
462
+ )
463
+
464
+ _valid_s3_bucket_lifecycle_rule = {
465
+ Optional("filter_prefix"): str,
466
+ "expiration_days": int,
467
+ "enabled": bool,
468
+ }
469
+
470
+ _valid_s3_bucket_external_role_access = {
471
+ "role_arn": PlatformConfigSchema.__valid_iam_role_arn("role_arn"),
472
+ "read": bool,
473
+ "write": bool,
474
+ "cyber_sign_off_by": PlatformConfigSchema.__valid_dbt_email_address(
475
+ "cyber_sign_off_by"
476
+ ),
477
+ }
478
+
479
+ _valid_s3_bucket_external_role_access_name = Regex(
480
+ r"^([a-z][a-zA-Z0-9_-]*)$",
481
+ error="External role access block name {} is invalid: names must only contain lowercase alphanumeric characters separated by hypen or underscore",
482
+ )
483
+
484
+ return dict(
485
+ {
486
+ Optional("readonly"): bool,
487
+ Optional("serve_static_content"): bool,
488
+ Optional("serve_static_param_name"): str,
489
+ Optional("services"): Or("__all__", [str]),
490
+ Optional("environments"): {
491
+ PlatformConfigSchema.__valid_environment_name(): {
492
+ "bucket_name": PlatformConfigSchema.valid_s3_bucket_name,
493
+ Optional("deletion_policy"): PlatformConfigSchema.__valid_deletion_policy(),
494
+ Optional("retention_policy"): _valid_s3_bucket_retention_policy,
495
+ Optional("versioning"): bool,
496
+ Optional("lifecycle_rules"): [_valid_s3_bucket_lifecycle_rule],
497
+ Optional("data_migration"): _valid_s3_data_migration,
498
+ Optional("external_role_access"): {
499
+ PlatformConfigSchema.__valid_schema_key(): _valid_s3_bucket_external_role_access
500
+ },
501
+ Optional("cross_environment_service_access"): {
502
+ PlatformConfigSchema.__valid_schema_key(): {
503
+ "application": str,
504
+ "environment": PlatformConfigSchema.__valid_environment_name(),
505
+ "account": str,
506
+ "service": str,
507
+ "read": bool,
508
+ "write": bool,
509
+ "cyber_sign_off_by": PlatformConfigSchema.__valid_dbt_email_address(
510
+ "cyber_sign_off_by"
511
+ ),
512
+ }
513
+ },
514
+ },
515
+ },
516
+ }
517
+ )
518
+
519
+ @staticmethod
520
+ def __s3_bucket_schema() -> dict:
521
+ return PlatformConfigSchema.__valid_s3_base_definition() | {
522
+ "type": "s3",
523
+ Optional("objects"): [
524
+ {"key": str, Optional("body"): str, Optional("content_type"): str}
525
+ ],
526
+ }
527
+
528
+ @staticmethod
529
+ def __s3_bucket_policy_schema() -> dict:
530
+ return PlatformConfigSchema.__valid_s3_base_definition() | {"type": "s3-policy"}
531
+
532
+ @staticmethod
533
+ def string_matching_regex(regex_pattern: str) -> Callable:
534
+ # Todo public for the unit tests, not sure about testing what could be a private method. Perhaps it's covered by other tests anyway?
535
+ def validate(string):
536
+ if not re.match(regex_pattern, string):
537
+ # Todo: Raise suitable PlatformException?
538
+ raise SchemaError(
539
+ f"String '{string}' does not match the required pattern '{regex_pattern}'."
540
+ )
541
+ return string
542
+
543
+ return validate
544
+
545
+ @staticmethod
546
+ def is_integer_between(lower_limit, upper_limit) -> Callable:
547
+ # Todo public for the unit tests, not sure about testing what could be a private method. Perhaps it's covered by other tests anyway?
548
+ def validate(value):
549
+ if isinstance(value, int) and lower_limit <= value <= upper_limit:
550
+ return True
551
+ # Todo: Raise suitable PlatformException?
552
+ raise SchemaError(f"should be an integer between {lower_limit} and {upper_limit}")
553
+
554
+ return validate
555
+
556
+ @staticmethod
557
+ def __valid_schema_key() -> Regex:
558
+ return Regex(
559
+ r"^([a-z][a-zA-Z0-9_-]*|\*)$",
560
+ error="{} is invalid: must only contain lowercase alphanumeric characters separated by hyphen or underscore",
561
+ )
562
+
563
+ @staticmethod
564
+ def __valid_branch_name() -> Callable:
565
+ # Todo: Make this actually validate a git branch name properly; https://git-scm.com/docs/git-check-ref-format
566
+ return PlatformConfigSchema.string_matching_regex(r"^((?!\*).)*(\*)?$")
567
+
568
+ @staticmethod
569
+ def __valid_deletion_policy() -> Or:
570
+ return Or("Delete", "Retain")
571
+
572
+ @staticmethod
573
+ def __valid_postgres_deletion_policy() -> Or:
574
+ return Or("Delete", "Retain", "Snapshot")
575
+
576
+ @staticmethod
577
+ def __valid_environment_name() -> Regex:
578
+ return Regex(
579
+ r"^([a-z][a-zA-Z0-9]*|\*)$",
580
+ error="Environment name {} is invalid: names must only contain lowercase alphanumeric characters, or be the '*' default environment",
581
+ # For values the "error" parameter works and outputs the custom text. For keys the custom text doesn't get reported in the exception for some reason.
582
+ )
583
+
584
+ @staticmethod
585
+ def __valid_kms_key_arn(key) -> Regex:
586
+ return Regex(
587
+ r"^arn:aws:kms:.*:\d{12}:(key|alias).*",
588
+ error=f"{key} must contain a valid ARN for a KMS key",
589
+ )
590
+
591
+ @staticmethod
592
+ def __valid_iam_role_arn(key) -> Regex:
593
+ return Regex(
594
+ r"^arn:aws:iam::\d{12}:role/.*",
595
+ error=f"{key} must contain a valid ARN for an IAM role",
596
+ )
597
+
598
+ @staticmethod
599
+ def __valid_dbt_email_address(key) -> Regex:
600
+ return Regex(
601
+ r"^[\w.-]+@(businessandtrade.gov.uk|digital.trade.gov.uk)$",
602
+ error=f"{key} must contain a valid DBT email address",
603
+ )
604
+
605
+ @staticmethod
606
+ def __no_configuration_required_schema(schema_type) -> Schema:
607
+ return Schema({"type": schema_type, Optional("services"): Or("__all__", [str])})
608
+
609
+
610
+ class ConditionalOpensSearchSchema(Schema):
611
+ # Todo: Move to OpenSearch provider?
612
+ _valid_opensearch_min_volume_size: int = 10
613
+
614
+ # Todo: Move to OpenSearch provider?
615
+ _valid_opensearch_max_volume_size: dict = {
616
+ "tiny": 100,
617
+ "small": 200,
618
+ "small-ha": 200,
619
+ "medium": 512,
620
+ "medium-ha": 512,
621
+ "large": 1000,
622
+ "large-ha": 1000,
623
+ "x-large": 1500,
624
+ "x-large-ha": 1500,
625
+ }
626
+
627
+ def validate(self, data, _is_conditional_schema=True) -> Schema:
628
+ data = super(ConditionalOpensSearchSchema, self).validate(
629
+ data, _is_conditional_schema=False
630
+ )
631
+ if _is_conditional_schema:
632
+ default_plan = None
633
+ default_volume_size = None
634
+
635
+ default_environment_config = data["environments"].get(
636
+ "*", data["environments"].get("default", None)
637
+ )
638
+ if default_environment_config:
639
+ default_plan = default_environment_config.get("plan", None)
640
+ default_volume_size = default_environment_config.get("volume_size", None)
641
+
642
+ for env in data["environments"]:
643
+ volume_size = data["environments"][env].get("volume_size", default_volume_size)
644
+ plan = data["environments"][env].get("plan", default_plan)
645
+
646
+ if volume_size:
647
+ if not plan:
648
+ # Todo: Raise suitable PlatformException?
649
+ raise SchemaError(f"Missing key: 'plan'")
650
+
651
+ if volume_size < self._valid_opensearch_min_volume_size:
652
+ # Todo: Raise suitable PlatformException?
653
+ raise SchemaError(
654
+ f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer greater than {self._valid_opensearch_min_volume_size}"
655
+ )
656
+
657
+ for key in self._valid_opensearch_max_volume_size:
658
+ if (
659
+ plan == key
660
+ and not volume_size <= self._valid_opensearch_max_volume_size[key]
661
+ ):
662
+ # Todo: Raise suitable PlatformException?
663
+ raise SchemaError(
664
+ f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer between {self._valid_opensearch_min_volume_size} and {self._valid_opensearch_max_volume_size[key]} for plan {plan}"
665
+ )
666
+
667
+ return data