dbt-platform-helper 12.4.0__py3-none-any.whl → 12.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,4 @@
1
- import os
2
1
  from copy import deepcopy
3
- from datetime import datetime
4
2
  from os import makedirs
5
3
  from pathlib import Path
6
4
 
@@ -9,8 +7,6 @@ import yaml
9
7
  from jinja2 import Environment
10
8
  from jinja2 import FileSystemLoader
11
9
 
12
- from dbt_platform_helper.constants import PLATFORM_HELPER_CACHE_FILE
13
-
14
10
 
15
11
  def to_yaml(value):
16
12
  return yaml.dump(value, sort_keys=False)
@@ -106,69 +102,3 @@ def apply_environment_defaults(config):
106
102
  enriched_config["environments"] = defaulted_envs
107
103
 
108
104
  return enriched_config
109
-
110
-
111
- def read_supported_versions_from_cache(resource_name):
112
-
113
- platform_helper_config = read_file_as_yaml(PLATFORM_HELPER_CACHE_FILE)
114
-
115
- return platform_helper_config.get(resource_name).get("versions")
116
-
117
-
118
- def write_to_cache(resource_name, supported_versions):
119
-
120
- platform_helper_config = {}
121
-
122
- if os.path.exists(PLATFORM_HELPER_CACHE_FILE):
123
- platform_helper_config = read_file_as_yaml(PLATFORM_HELPER_CACHE_FILE)
124
-
125
- cache_dict = {
126
- resource_name: {
127
- "versions": supported_versions,
128
- "date-retrieved": datetime.now().strftime("%d-%m-%y %H:%M:%S"),
129
- }
130
- }
131
-
132
- platform_helper_config.update(cache_dict)
133
-
134
- with open(PLATFORM_HELPER_CACHE_FILE, "w") as file:
135
- file.write("# [!] This file is autogenerated via the platform-helper. Do not edit.\n")
136
- yaml.dump(platform_helper_config, file)
137
-
138
-
139
- def cache_refresh_required(resource_name) -> bool:
140
- """
141
- Checks if the platform-helper should reach out to AWS to 'refresh' its
142
- cached values.
143
-
144
- An API call is needed if any of the following conditions are met:
145
- 1. No cache file (.platform-helper-config.yml) exists.
146
- 2. The resource name (e.g. redis, opensearch) does not exist within the cache file.
147
- 3. The date-retrieved value of the cached data is > than a time interval. In this case 1 day.
148
- """
149
-
150
- if not os.path.exists(PLATFORM_HELPER_CACHE_FILE):
151
- return True
152
-
153
- platform_helper_config = read_file_as_yaml(PLATFORM_HELPER_CACHE_FILE)
154
-
155
- if platform_helper_config.get(resource_name):
156
- return check_if_cached_datetime_is_greater_than_interval(
157
- platform_helper_config[resource_name].get("date-retrieved"), 1
158
- )
159
-
160
- return True
161
-
162
-
163
- def check_if_cached_datetime_is_greater_than_interval(date_retrieved, interval_in_days):
164
-
165
- current_datetime = datetime.now()
166
- cached_datetime = datetime.strptime(date_retrieved, "%d-%m-%y %H:%M:%S")
167
- delta = current_datetime - cached_datetime
168
-
169
- return delta.days > interval_in_days
170
-
171
-
172
- def read_file_as_yaml(file_name):
173
-
174
- return yaml.safe_load(Path(file_name).read_text())
@@ -1,14 +1,9 @@
1
- import ipaddress
2
1
  import os
3
2
  import re
4
3
  from pathlib import Path
5
4
 
6
5
  import click
7
6
  import yaml
8
- from schema import Optional
9
- from schema import Or
10
- from schema import Regex
11
- from schema import Schema
12
7
  from schema import SchemaError
13
8
  from yaml.parser import ParserError
14
9
  from yamllint import config
@@ -18,66 +13,14 @@ from dbt_platform_helper.constants import CODEBASE_PIPELINES_KEY
18
13
  from dbt_platform_helper.constants import ENVIRONMENTS_KEY
19
14
  from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
20
15
  from dbt_platform_helper.constants import PLATFORM_HELPER_VERSION_FILE
16
+ from dbt_platform_helper.providers.platform_config_schema import EXTENSION_SCHEMAS
17
+ from dbt_platform_helper.providers.platform_config_schema import PLATFORM_CONFIG_SCHEMA
21
18
  from dbt_platform_helper.utils.aws import get_supported_opensearch_versions
22
19
  from dbt_platform_helper.utils.aws import get_supported_redis_versions
23
20
  from dbt_platform_helper.utils.files import apply_environment_defaults
24
21
  from dbt_platform_helper.utils.messages import abort_with_error
25
22
 
26
23
 
27
- def validate_string(regex_pattern: str):
28
- def validator(string):
29
- if not re.match(regex_pattern, string):
30
- raise SchemaError(
31
- f"String '{string}' does not match the required pattern '{regex_pattern}'. For more details on valid string patterns see: https://aws.github.io/copilot-cli/docs/manifest/lb-web-service/"
32
- )
33
- return string
34
-
35
- return validator
36
-
37
-
38
- S3_BUCKET_NAME_ERROR_TEMPLATE = "Bucket name '{}' is invalid:\n{}"
39
- AVAILABILITY_UNCERTAIN_TEMPLATE = (
40
- "Warning: Could not determine the availability of bucket name '{}'."
41
- )
42
- BUCKET_NAME_IN_USE_TEMPLATE = "Warning: Bucket name '{}' is already in use. Check your AWS accounts to see if this is a problem."
43
-
44
-
45
- def validate_s3_bucket_name(name: str):
46
- errors = []
47
- if not (2 < len(name) < 64):
48
- errors.append("Length must be between 3 and 63 characters inclusive.")
49
-
50
- if not re.match(r"^[a-z0-9].*[a-z0-9]$", name):
51
- errors.append("Names must start and end with 0-9 or a-z.")
52
-
53
- if not re.match(r"^[a-z0-9.-]*$", name):
54
- errors.append("Names can only contain the characters 0-9, a-z, '.' and '-'.")
55
-
56
- if ".." in name:
57
- errors.append("Names cannot contain two adjacent periods.")
58
-
59
- try:
60
- ipaddress.ip_address(name)
61
- errors.append("Names cannot be IP addresses.")
62
- except ValueError:
63
- pass
64
-
65
- for prefix in ("xn--", "sthree-"):
66
- if name.startswith(prefix):
67
- errors.append(f"Names cannot be prefixed '{prefix}'.")
68
-
69
- for suffix in ("-s3alias", "--ol-s3"):
70
- if name.endswith(suffix):
71
- errors.append(f"Names cannot be suffixed '{suffix}'.")
72
-
73
- if errors:
74
- raise SchemaError(
75
- S3_BUCKET_NAME_ERROR_TEMPLATE.format(name, "\n".join(f" {e}" for e in errors))
76
- )
77
-
78
- return True
79
-
80
-
81
24
  def validate_addons(addons: dict):
82
25
  """
83
26
  Validate the addons file and return a dictionary of addon: error message.
@@ -90,7 +33,7 @@ def validate_addons(addons: dict):
90
33
  if not addon_type:
91
34
  errors[addon_name] = f"Missing addon type in addon '{addon_name}'"
92
35
  continue
93
- schema = SCHEMA_MAP.get(addon_type, None)
36
+ schema = EXTENSION_SCHEMAS.get(addon_type, None)
94
37
  if not schema:
95
38
  errors[addon_name] = (
96
39
  f"Unsupported addon type '{addon_type}' in addon '{addon_name}'"
@@ -116,15 +59,6 @@ def validate_addons(addons: dict):
116
59
  return errors
117
60
 
118
61
 
119
- def int_between(lower, upper):
120
- def is_between(value):
121
- if isinstance(value, int) and lower <= value <= upper:
122
- return True
123
- raise SchemaError(f"should be an integer between {lower} and {upper}")
124
-
125
- return is_between
126
-
127
-
128
62
  def float_between_with_halfstep(lower, upper):
129
63
  def is_between(value):
130
64
  is_number = isinstance(value, int) or isinstance(value, float)
@@ -137,434 +71,6 @@ def float_between_with_halfstep(lower, upper):
137
71
  return is_between
138
72
 
139
73
 
140
- ENV_NAME = Regex(
141
- r"^([a-z][a-zA-Z0-9]*|\*)$",
142
- error="Environment name {} is invalid: names must only contain lowercase alphanumeric characters, or be the '*' default environment",
143
- # For values the "error" parameter works and outputs the custom text. For keys the custom text doesn't get reported in the exception for some reason.
144
- )
145
-
146
- range_validator = validate_string(r"^\d+-\d+$")
147
- seconds_validator = validate_string(r"^\d+s$")
148
- branch_wildcard_validator = validate_string(r"^((?!\*).)*(\*)?$")
149
-
150
- NUMBER = Or(int, float)
151
- DELETION_POLICY = Or("Delete", "Retain")
152
- DB_DELETION_POLICY = Or("Delete", "Retain", "Snapshot")
153
- DELETION_PROTECTION = bool
154
-
155
- REDIS_PLANS = Or(
156
- "micro",
157
- "micro-ha",
158
- "tiny",
159
- "tiny-ha",
160
- "small",
161
- "small-ha",
162
- "medium",
163
- "medium-ha",
164
- "large",
165
- "large-ha",
166
- "x-large",
167
- "x-large-ha",
168
- )
169
-
170
- REDIS_ENGINE_VERSIONS = str
171
-
172
- REDIS_DEFINITION = {
173
- "type": "redis",
174
- Optional("environments"): {
175
- ENV_NAME: {
176
- Optional("plan"): REDIS_PLANS,
177
- Optional("engine"): REDIS_ENGINE_VERSIONS,
178
- Optional("replicas"): int_between(0, 5),
179
- Optional("deletion_policy"): DELETION_POLICY,
180
- Optional("apply_immediately"): bool,
181
- Optional("automatic_failover_enabled"): bool,
182
- Optional("instance"): str,
183
- Optional("multi_az_enabled"): bool,
184
- }
185
- },
186
- }
187
-
188
- POSTGRES_PLANS = Or(
189
- "tiny",
190
- "small",
191
- "small-ha",
192
- "small-high-io",
193
- "medium",
194
- "medium-ha",
195
- "medium-high-io",
196
- "large",
197
- "large-ha",
198
- "large-high-io",
199
- "x-large",
200
- "x-large-ha",
201
- "x-large-high-io",
202
- )
203
- POSTGRES_STORAGE_TYPES = Or("gp2", "gp3", "io1", "io2")
204
-
205
- RETENTION_POLICY = Or(
206
- None,
207
- {
208
- "mode": Or("GOVERNANCE", "COMPLIANCE"),
209
- Or("days", "years", only_one=True): int,
210
- },
211
- )
212
-
213
- DATABASE_COPY = {
214
- "from": ENV_NAME,
215
- "to": ENV_NAME,
216
- Optional("from_account"): str,
217
- Optional("to_account"): str,
218
- Optional("pipeline"): {Optional("schedule"): str},
219
- }
220
-
221
- POSTGRES_DEFINITION = {
222
- "type": "postgres",
223
- "version": NUMBER,
224
- Optional("deletion_policy"): DB_DELETION_POLICY,
225
- Optional("environments"): {
226
- ENV_NAME: {
227
- Optional("plan"): POSTGRES_PLANS,
228
- Optional("volume_size"): int_between(20, 10000),
229
- Optional("iops"): int_between(1000, 9950),
230
- Optional("snapshot_id"): str,
231
- Optional("deletion_policy"): DB_DELETION_POLICY,
232
- Optional("deletion_protection"): DELETION_PROTECTION,
233
- Optional("multi_az"): bool,
234
- Optional("storage_type"): POSTGRES_STORAGE_TYPES,
235
- Optional("backup_retention_days"): int_between(1, 35),
236
- }
237
- },
238
- Optional("database_copy"): [DATABASE_COPY],
239
- Optional("objects"): [
240
- {
241
- "key": str,
242
- Optional("body"): str,
243
- }
244
- ],
245
- }
246
-
247
- LIFECYCLE_RULE = {
248
- Optional("filter_prefix"): str,
249
- "expiration_days": int,
250
- "enabled": bool,
251
- }
252
-
253
-
254
- def kms_key_arn_regex(key):
255
- return Regex(
256
- r"^arn:aws:kms:.*:\d{12}:(key|alias).*",
257
- error=f"{key} must contain a valid ARN for a KMS key",
258
- )
259
-
260
-
261
- def s3_bucket_arn_regex(key):
262
- return Regex(
263
- r"^arn:aws:s3::.*",
264
- error=f"{key} must contain a valid ARN for an S3 bucket",
265
- )
266
-
267
-
268
- def iam_role_arn_regex(key):
269
- return Regex(
270
- r"^arn:aws:iam::\d{12}:role/.*",
271
- error=f"{key} must contain a valid ARN for an IAM role",
272
- )
273
-
274
-
275
- def dbt_email_address_regex(key):
276
- return Regex(
277
- r"^[\w.-]+@(businessandtrade.gov.uk|digital.trade.gov.uk)$",
278
- error=f"{key} must contain a valid DBT email address",
279
- )
280
-
281
-
282
- EXTERNAL_ROLE_ACCESS = {
283
- "role_arn": iam_role_arn_regex("role_arn"),
284
- "read": bool,
285
- "write": bool,
286
- "cyber_sign_off_by": dbt_email_address_regex("cyber_sign_off_by"),
287
- }
288
-
289
- CROSS_ENVIRONMENT_SERVICE_ACCESS = {
290
- "application": str,
291
- "environment": ENV_NAME,
292
- "account": str,
293
- "service": str,
294
- "read": bool,
295
- "write": bool,
296
- "cyber_sign_off_by": dbt_email_address_regex("cyber_sign_off_by"),
297
- }
298
-
299
- LOWER_ALPHANUMERIC = Regex(
300
- r"^([a-z][a-zA-Z0-9_-]*|\*)$",
301
- error="{} is invalid: must only contain lowercase alphanumeric characters separated by hyphen or underscore",
302
- )
303
-
304
- DATA_IMPORT = {
305
- Optional("source_kms_key_arn"): kms_key_arn_regex("source_kms_key_arn"),
306
- "source_bucket_arn": s3_bucket_arn_regex("source_bucket_arn"),
307
- "worker_role_arn": iam_role_arn_regex("worker_role_arn"),
308
- }
309
-
310
- DATA_MIGRATION = {
311
- "import": DATA_IMPORT,
312
- }
313
-
314
- S3_BASE = {
315
- Optional("readonly"): bool,
316
- Optional("serve_static_content"): bool,
317
- Optional("services"): Or("__all__", [str]),
318
- Optional("environments"): {
319
- ENV_NAME: {
320
- "bucket_name": validate_s3_bucket_name,
321
- Optional("deletion_policy"): DELETION_POLICY,
322
- Optional("retention_policy"): RETENTION_POLICY,
323
- Optional("versioning"): bool,
324
- Optional("lifecycle_rules"): [LIFECYCLE_RULE],
325
- Optional("data_migration"): DATA_MIGRATION,
326
- Optional("external_role_access"): {LOWER_ALPHANUMERIC: EXTERNAL_ROLE_ACCESS},
327
- Optional("cross_environment_service_access"): {
328
- LOWER_ALPHANUMERIC: CROSS_ENVIRONMENT_SERVICE_ACCESS
329
- },
330
- },
331
- },
332
- }
333
-
334
- S3_POLICY_DEFINITION = dict(S3_BASE)
335
- S3_POLICY_DEFINITION.update({"type": "s3-policy"})
336
-
337
- S3_DEFINITION = dict(S3_BASE)
338
- S3_DEFINITION.update(
339
- {
340
- "type": "s3",
341
- Optional("objects"): [{"key": str, Optional("body"): str, Optional("content_type"): str}],
342
- }
343
- )
344
-
345
- MONITORING_DEFINITION = {
346
- "type": "monitoring",
347
- Optional("environments"): {
348
- ENV_NAME: {
349
- Optional("enable_ops_center"): bool,
350
- }
351
- },
352
- }
353
-
354
- OPENSEARCH_PLANS = Or(
355
- "tiny", "small", "small-ha", "medium", "medium-ha", "large", "large-ha", "x-large", "x-large-ha"
356
- )
357
- OPENSEARCH_ENGINE_VERSIONS = str
358
- OPENSEARCH_MIN_VOLUME_SIZE = 10
359
- OPENSEARCH_MAX_VOLUME_SIZE = {
360
- "tiny": 100,
361
- "small": 200,
362
- "small-ha": 200,
363
- "medium": 512,
364
- "medium-ha": 512,
365
- "large": 1000,
366
- "large-ha": 1000,
367
- "x-large": 1500,
368
- "x-large-ha": 1500,
369
- }
370
-
371
- OPENSEARCH_DEFINITION = {
372
- "type": "opensearch",
373
- Optional("environments"): {
374
- ENV_NAME: {
375
- Optional("engine"): OPENSEARCH_ENGINE_VERSIONS,
376
- Optional("deletion_policy"): DELETION_POLICY,
377
- Optional("plan"): OPENSEARCH_PLANS,
378
- Optional("volume_size"): int,
379
- Optional("ebs_throughput"): int,
380
- Optional("ebs_volume_type"): str,
381
- Optional("instance"): str,
382
- Optional("instances"): int,
383
- Optional("master"): bool,
384
- Optional("es_app_log_retention_in_days"): int,
385
- Optional("index_slow_log_retention_in_days"): int,
386
- Optional("audit_log_retention_in_days"): int,
387
- Optional("search_slow_log_retention_in_days"): int,
388
- Optional("password_special_characters"): str,
389
- Optional("urlencode_password"): bool,
390
- }
391
- },
392
- }
393
-
394
- CACHE_POLICY_DEFINITION = {
395
- "min_ttl": int,
396
- "max_ttl": int,
397
- "default_ttl": int,
398
- "cookies_config": Or("none", "whitelist", "allExcept", "all"),
399
- "header": Or("none", "whitelist"),
400
- "query_string_behavior": Or("none", "whitelist", "allExcept", "all"),
401
- Optional("cookie_list"): list,
402
- Optional("headers_list"): list,
403
- Optional("cache_policy_query_strings"): list,
404
- }
405
-
406
- PATHS_DEFINITION = {
407
- Optional("default"): {
408
- "cache": str,
409
- "request": str,
410
- },
411
- Optional("additional"): list[
412
- {
413
- "path": str,
414
- "cache": str,
415
- "request": str,
416
- }
417
- ],
418
- }
419
-
420
- ALB_DEFINITION = {
421
- "type": "alb",
422
- Optional("environments"): {
423
- ENV_NAME: Or(
424
- {
425
- Optional("additional_address_list"): list,
426
- Optional("allowed_methods"): list,
427
- Optional("cached_methods"): list,
428
- Optional("cdn_compress"): bool,
429
- Optional("cdn_domains_list"): dict,
430
- Optional("cdn_geo_locations"): list,
431
- Optional("cdn_geo_restriction_type"): str,
432
- Optional("cdn_logging_bucket"): str,
433
- Optional("cdn_logging_bucket_prefix"): str,
434
- Optional("cdn_timeout_seconds"): int,
435
- Optional("default_waf"): str,
436
- Optional("domain_prefix"): str,
437
- Optional("enable_logging"): bool,
438
- Optional("env_root"): str,
439
- Optional("forwarded_values_forward"): str,
440
- Optional("forwarded_values_headers"): list,
441
- Optional("forwarded_values_query_string"): bool,
442
- Optional("origin_protocol_policy"): str,
443
- Optional("origin_ssl_protocols"): list,
444
- Optional("slack_alert_channel_alb_secret_rotation"): str,
445
- Optional("viewer_certificate_minimum_protocol_version"): str,
446
- Optional("viewer_certificate_ssl_support_method"): str,
447
- Optional("viewer_protocol_policy"): str,
448
- Optional("cache_policy"): dict({str: CACHE_POLICY_DEFINITION}),
449
- Optional("origin_request_policy"): dict({str: {}}),
450
- Optional("paths"): dict({str: PATHS_DEFINITION}),
451
- },
452
- None,
453
- )
454
- },
455
- }
456
-
457
- PROMETHEUS_POLICY_DEFINITION = {
458
- "type": "prometheus-policy",
459
- Optional("services"): Or("__all__", [str]),
460
- Optional("environments"): {
461
- ENV_NAME: {
462
- "role_arn": str,
463
- }
464
- },
465
- }
466
-
467
- _DEFAULT_VERSIONS_DEFINITION = {
468
- Optional("terraform-platform-modules"): str,
469
- Optional("platform-helper"): str,
470
- }
471
- _ENVIRONMENTS_VERSIONS_OVERRIDES = {
472
- Optional("terraform-platform-modules"): str,
473
- }
474
- _PIPELINE_VERSIONS_OVERRIDES = {
475
- Optional("platform-helper"): str,
476
- }
477
-
478
- _ENVIRONMENTS_PARAMS = {
479
- Optional("accounts"): {
480
- "deploy": {
481
- "name": str,
482
- "id": str,
483
- },
484
- "dns": {
485
- "name": str,
486
- "id": str,
487
- },
488
- },
489
- Optional("requires_approval"): bool,
490
- Optional("versions"): _ENVIRONMENTS_VERSIONS_OVERRIDES,
491
- Optional("vpc"): str,
492
- }
493
-
494
- ENVIRONMENTS_DEFINITION = {str: Or(None, _ENVIRONMENTS_PARAMS)}
495
-
496
- CODEBASE_PIPELINES_DEFINITION = [
497
- {
498
- "name": str,
499
- "repository": str,
500
- Optional("additional_ecr_repository"): str,
501
- Optional("deploy_repository_branch"): str,
502
- "services": list[str],
503
- "pipelines": [
504
- Or(
505
- {
506
- "name": str,
507
- "branch": branch_wildcard_validator,
508
- "environments": [
509
- {
510
- "name": str,
511
- Optional("requires_approval"): bool,
512
- }
513
- ],
514
- },
515
- {
516
- "name": str,
517
- "tag": bool,
518
- "environments": [
519
- {
520
- "name": str,
521
- Optional("requires_approval"): bool,
522
- }
523
- ],
524
- },
525
- ),
526
- ],
527
- },
528
- ]
529
-
530
- ENVIRONMENT_PIPELINES_DEFINITION = {
531
- str: {
532
- Optional("account"): str,
533
- Optional("branch", default="main"): str,
534
- Optional("pipeline_to_trigger"): str,
535
- Optional("versions"): _PIPELINE_VERSIONS_OVERRIDES,
536
- "slack_channel": str,
537
- "trigger_on_push": bool,
538
- "environments": {str: Or(None, _ENVIRONMENTS_PARAMS)},
539
- }
540
- }
541
-
542
- PLATFORM_CONFIG_SCHEMA = Schema(
543
- {
544
- # The following line is for the AWS Copilot version, will be removed under DBTP-1002
545
- "application": str,
546
- Optional("legacy_project", default=False): bool,
547
- Optional("default_versions"): _DEFAULT_VERSIONS_DEFINITION,
548
- Optional("accounts"): list[str],
549
- Optional("environments"): ENVIRONMENTS_DEFINITION,
550
- Optional("codebase_pipelines"): CODEBASE_PIPELINES_DEFINITION,
551
- Optional("extensions"): {
552
- str: Or(
553
- REDIS_DEFINITION,
554
- POSTGRES_DEFINITION,
555
- S3_DEFINITION,
556
- S3_POLICY_DEFINITION,
557
- MONITORING_DEFINITION,
558
- OPENSEARCH_DEFINITION,
559
- ALB_DEFINITION,
560
- PROMETHEUS_POLICY_DEFINITION,
561
- )
562
- },
563
- Optional("environment_pipelines"): ENVIRONMENT_PIPELINES_DEFINITION,
564
- }
565
- )
566
-
567
-
568
74
  def validate_platform_config(config):
569
75
  PLATFORM_CONFIG_SCHEMA.validate(config)
570
76
  enriched_config = apply_environment_defaults(config)
@@ -846,71 +352,3 @@ def config_file_check(path=PLATFORM_CONFIG_FILE):
846
352
  if errors:
847
353
  click.secho("\n".join(errors), bg="red", fg="white")
848
354
  exit(1)
849
-
850
-
851
- S3_SCHEMA = Schema(S3_DEFINITION)
852
- S3_POLICY_SCHEMA = Schema(S3_POLICY_DEFINITION)
853
- POSTGRES_SCHEMA = Schema(POSTGRES_DEFINITION)
854
- REDIS_SCHEMA = Schema(REDIS_DEFINITION)
855
-
856
-
857
- class ConditionalSchema(Schema):
858
- def validate(self, data, _is_conditional_schema=True):
859
- data = super(ConditionalSchema, self).validate(data, _is_conditional_schema=False)
860
- if _is_conditional_schema:
861
- default_plan = None
862
- default_volume_size = None
863
-
864
- default_environment_config = data["environments"].get(
865
- "*", data["environments"].get("default", None)
866
- )
867
- if default_environment_config:
868
- default_plan = default_environment_config.get("plan", None)
869
- default_volume_size = default_environment_config.get("volume_size", None)
870
-
871
- for env in data["environments"]:
872
- volume_size = data["environments"][env].get("volume_size", default_volume_size)
873
- plan = data["environments"][env].get("plan", default_plan)
874
-
875
- if volume_size:
876
- if not plan:
877
- raise SchemaError(f"Missing key: 'plan'")
878
-
879
- if volume_size < OPENSEARCH_MIN_VOLUME_SIZE:
880
- raise SchemaError(
881
- f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer greater than {OPENSEARCH_MIN_VOLUME_SIZE}"
882
- )
883
-
884
- for key in OPENSEARCH_MAX_VOLUME_SIZE:
885
- if plan == key and not volume_size <= OPENSEARCH_MAX_VOLUME_SIZE[key]:
886
- raise SchemaError(
887
- f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer between {OPENSEARCH_MIN_VOLUME_SIZE} and {OPENSEARCH_MAX_VOLUME_SIZE[key]} for plan {plan}"
888
- )
889
-
890
- return data
891
-
892
-
893
- OPENSEARCH_SCHEMA = ConditionalSchema(OPENSEARCH_DEFINITION)
894
- MONITORING_SCHEMA = Schema(MONITORING_DEFINITION)
895
- ALB_SCHEMA = Schema(ALB_DEFINITION)
896
- PROMETHEUS_POLICY_SCHEMA = Schema(PROMETHEUS_POLICY_DEFINITION)
897
-
898
-
899
- def no_param_schema(schema_type):
900
- return Schema({"type": schema_type, Optional("services"): Or("__all__", [str])})
901
-
902
-
903
- SCHEMA_MAP = {
904
- "s3": S3_SCHEMA,
905
- "s3-policy": S3_POLICY_SCHEMA,
906
- "postgres": POSTGRES_SCHEMA,
907
- "redis": REDIS_SCHEMA,
908
- "opensearch": OPENSEARCH_SCHEMA,
909
- "monitoring": MONITORING_SCHEMA,
910
- "appconfig-ipfilter": no_param_schema("appconfig-ipfilter"),
911
- "subscription-filter": no_param_schema("subscription-filter"),
912
- "vpc": no_param_schema("vpc"),
913
- "xray": no_param_schema("xray"),
914
- "alb": ALB_SCHEMA,
915
- "prometheus-policy": PROMETHEUS_POLICY_SCHEMA,
916
- }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbt-platform-helper
3
- Version: 12.4.0
3
+ Version: 12.4.1
4
4
  Summary: Set of tools to help transfer applications/services from GOV.UK PaaS to DBT PaaS augmenting AWS Copilot.
5
5
  License: MIT
6
6
  Author: Department for Business and Trade Platform Team