dbt-platform-helper 12.3.0__py3-none-any.whl → 12.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbt-platform-helper might be problematic. Click here for more details.

Files changed (40) hide show
  1. dbt_platform_helper/COMMANDS.md +6 -1
  2. dbt_platform_helper/commands/codebase.py +1 -1
  3. dbt_platform_helper/commands/conduit.py +2 -2
  4. dbt_platform_helper/commands/config.py +4 -4
  5. dbt_platform_helper/commands/copilot.py +13 -15
  6. dbt_platform_helper/commands/database.py +17 -4
  7. dbt_platform_helper/commands/environment.py +3 -2
  8. dbt_platform_helper/commands/pipeline.py +1 -29
  9. dbt_platform_helper/constants.py +3 -1
  10. dbt_platform_helper/domain/codebase.py +23 -5
  11. dbt_platform_helper/domain/conduit.py +0 -6
  12. dbt_platform_helper/domain/database_copy.py +14 -13
  13. dbt_platform_helper/domain/maintenance_page.py +9 -9
  14. dbt_platform_helper/platform_exception.py +5 -0
  15. dbt_platform_helper/providers/aws.py +32 -0
  16. dbt_platform_helper/providers/cache.py +83 -0
  17. dbt_platform_helper/providers/cloudformation.py +8 -1
  18. dbt_platform_helper/providers/copilot.py +2 -5
  19. dbt_platform_helper/providers/ecs.py +19 -4
  20. dbt_platform_helper/providers/load_balancers.py +11 -5
  21. dbt_platform_helper/providers/platform_config_schema.py +605 -0
  22. dbt_platform_helper/providers/secrets.py +51 -10
  23. dbt_platform_helper/providers/validation.py +19 -0
  24. dbt_platform_helper/utils/application.py +14 -2
  25. dbt_platform_helper/utils/arn_parser.py +1 -1
  26. dbt_platform_helper/utils/aws.py +22 -21
  27. dbt_platform_helper/utils/files.py +0 -70
  28. dbt_platform_helper/utils/git.py +2 -2
  29. dbt_platform_helper/utils/validation.py +3 -551
  30. dbt_platform_helper/utils/versioning.py +8 -8
  31. {dbt_platform_helper-12.3.0.dist-info → dbt_platform_helper-12.4.1.dist-info}/METADATA +1 -1
  32. {dbt_platform_helper-12.3.0.dist-info → dbt_platform_helper-12.4.1.dist-info}/RECORD +35 -35
  33. dbt_platform_helper/addons-template-map.yml +0 -29
  34. dbt_platform_helper/exceptions.py +0 -147
  35. dbt_platform_helper/templates/pipelines/environments/buildspec.yml +0 -80
  36. dbt_platform_helper/templates/pipelines/environments/manifest.yml +0 -48
  37. dbt_platform_helper/templates/pipelines/environments/overrides/cfn.patches.yml +0 -21
  38. {dbt_platform_helper-12.3.0.dist-info → dbt_platform_helper-12.4.1.dist-info}/LICENSE +0 -0
  39. {dbt_platform_helper-12.3.0.dist-info → dbt_platform_helper-12.4.1.dist-info}/WHEEL +0 -0
  40. {dbt_platform_helper-12.3.0.dist-info → dbt_platform_helper-12.4.1.dist-info}/entry_points.txt +0 -0
@@ -1,14 +1,9 @@
1
- import ipaddress
2
1
  import os
3
2
  import re
4
3
  from pathlib import Path
5
4
 
6
5
  import click
7
6
  import yaml
8
- from schema import Optional
9
- from schema import Or
10
- from schema import Regex
11
- from schema import Schema
12
7
  from schema import SchemaError
13
8
  from yaml.parser import ParserError
14
9
  from yamllint import config
@@ -18,66 +13,14 @@ from dbt_platform_helper.constants import CODEBASE_PIPELINES_KEY
18
13
  from dbt_platform_helper.constants import ENVIRONMENTS_KEY
19
14
  from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
20
15
  from dbt_platform_helper.constants import PLATFORM_HELPER_VERSION_FILE
16
+ from dbt_platform_helper.providers.platform_config_schema import EXTENSION_SCHEMAS
17
+ from dbt_platform_helper.providers.platform_config_schema import PLATFORM_CONFIG_SCHEMA
21
18
  from dbt_platform_helper.utils.aws import get_supported_opensearch_versions
22
19
  from dbt_platform_helper.utils.aws import get_supported_redis_versions
23
20
  from dbt_platform_helper.utils.files import apply_environment_defaults
24
21
  from dbt_platform_helper.utils.messages import abort_with_error
25
22
 
26
23
 
27
- def validate_string(regex_pattern: str):
28
- def validator(string):
29
- if not re.match(regex_pattern, string):
30
- raise SchemaError(
31
- f"String '{string}' does not match the required pattern '{regex_pattern}'. For more details on valid string patterns see: https://aws.github.io/copilot-cli/docs/manifest/lb-web-service/"
32
- )
33
- return string
34
-
35
- return validator
36
-
37
-
38
- S3_BUCKET_NAME_ERROR_TEMPLATE = "Bucket name '{}' is invalid:\n{}"
39
- AVAILABILITY_UNCERTAIN_TEMPLATE = (
40
- "Warning: Could not determine the availability of bucket name '{}'."
41
- )
42
- BUCKET_NAME_IN_USE_TEMPLATE = "Warning: Bucket name '{}' is already in use. Check your AWS accounts to see if this is a problem."
43
-
44
-
45
- def validate_s3_bucket_name(name: str):
46
- errors = []
47
- if not (2 < len(name) < 64):
48
- errors.append("Length must be between 3 and 63 characters inclusive.")
49
-
50
- if not re.match(r"^[a-z0-9].*[a-z0-9]$", name):
51
- errors.append("Names must start and end with 0-9 or a-z.")
52
-
53
- if not re.match(r"^[a-z0-9.-]*$", name):
54
- errors.append("Names can only contain the characters 0-9, a-z, '.' and '-'.")
55
-
56
- if ".." in name:
57
- errors.append("Names cannot contain two adjacent periods.")
58
-
59
- try:
60
- ipaddress.ip_address(name)
61
- errors.append("Names cannot be IP addresses.")
62
- except ValueError:
63
- pass
64
-
65
- for prefix in ("xn--", "sthree-"):
66
- if name.startswith(prefix):
67
- errors.append(f"Names cannot be prefixed '{prefix}'.")
68
-
69
- for suffix in ("-s3alias", "--ol-s3"):
70
- if name.endswith(suffix):
71
- errors.append(f"Names cannot be suffixed '{suffix}'.")
72
-
73
- if errors:
74
- raise SchemaError(
75
- S3_BUCKET_NAME_ERROR_TEMPLATE.format(name, "\n".join(f" {e}" for e in errors))
76
- )
77
-
78
- return True
79
-
80
-
81
24
  def validate_addons(addons: dict):
82
25
  """
83
26
  Validate the addons file and return a dictionary of addon: error message.
@@ -90,7 +33,7 @@ def validate_addons(addons: dict):
90
33
  if not addon_type:
91
34
  errors[addon_name] = f"Missing addon type in addon '{addon_name}'"
92
35
  continue
93
- schema = SCHEMA_MAP.get(addon_type, None)
36
+ schema = EXTENSION_SCHEMAS.get(addon_type, None)
94
37
  if not schema:
95
38
  errors[addon_name] = (
96
39
  f"Unsupported addon type '{addon_type}' in addon '{addon_name}'"
@@ -116,15 +59,6 @@ def validate_addons(addons: dict):
116
59
  return errors
117
60
 
118
61
 
119
- def int_between(lower, upper):
120
- def is_between(value):
121
- if isinstance(value, int) and lower <= value <= upper:
122
- return True
123
- raise SchemaError(f"should be an integer between {lower} and {upper}")
124
-
125
- return is_between
126
-
127
-
128
62
  def float_between_with_halfstep(lower, upper):
129
63
  def is_between(value):
130
64
  is_number = isinstance(value, int) or isinstance(value, float)
@@ -137,420 +71,6 @@ def float_between_with_halfstep(lower, upper):
137
71
  return is_between
138
72
 
139
73
 
140
- ENV_NAME = Regex(
141
- r"^([a-z][a-zA-Z0-9]*|\*)$",
142
- error="Environment name {} is invalid: names must only contain lowercase alphanumeric characters, or be the '*' default environment",
143
- # For values the "error" parameter works and outputs the custom text. For keys the custom text doesn't get reported in the exception for some reason.
144
- )
145
-
146
- range_validator = validate_string(r"^\d+-\d+$")
147
- seconds_validator = validate_string(r"^\d+s$")
148
- branch_wildcard_validator = validate_string(r"^((?!\*).)*(\*)?$")
149
-
150
- NUMBER = Or(int, float)
151
- DELETION_POLICY = Or("Delete", "Retain")
152
- DB_DELETION_POLICY = Or("Delete", "Retain", "Snapshot")
153
- DELETION_PROTECTION = bool
154
-
155
- REDIS_PLANS = Or(
156
- "micro",
157
- "micro-ha",
158
- "tiny",
159
- "tiny-ha",
160
- "small",
161
- "small-ha",
162
- "medium",
163
- "medium-ha",
164
- "large",
165
- "large-ha",
166
- "x-large",
167
- "x-large-ha",
168
- )
169
-
170
- REDIS_ENGINE_VERSIONS = str
171
-
172
- REDIS_DEFINITION = {
173
- "type": "redis",
174
- Optional("environments"): {
175
- ENV_NAME: {
176
- Optional("plan"): REDIS_PLANS,
177
- Optional("engine"): REDIS_ENGINE_VERSIONS,
178
- Optional("replicas"): int_between(0, 5),
179
- Optional("deletion_policy"): DELETION_POLICY,
180
- Optional("apply_immediately"): bool,
181
- Optional("automatic_failover_enabled"): bool,
182
- Optional("instance"): str,
183
- Optional("multi_az_enabled"): bool,
184
- }
185
- },
186
- }
187
-
188
- POSTGRES_PLANS = Or(
189
- "tiny",
190
- "small",
191
- "small-ha",
192
- "small-high-io",
193
- "medium",
194
- "medium-ha",
195
- "medium-high-io",
196
- "large",
197
- "large-ha",
198
- "large-high-io",
199
- "x-large",
200
- "x-large-ha",
201
- "x-large-high-io",
202
- )
203
- POSTGRES_STORAGE_TYPES = Or("gp2", "gp3", "io1", "io2")
204
-
205
- RETENTION_POLICY = Or(
206
- None,
207
- {
208
- "mode": Or("GOVERNANCE", "COMPLIANCE"),
209
- Or("days", "years", only_one=True): int,
210
- },
211
- )
212
-
213
- DATABASE_COPY = {
214
- "from": ENV_NAME,
215
- "to": ENV_NAME,
216
- Optional("from_account"): str,
217
- Optional("to_account"): str,
218
- }
219
-
220
- POSTGRES_DEFINITION = {
221
- "type": "postgres",
222
- "version": NUMBER,
223
- Optional("deletion_policy"): DB_DELETION_POLICY,
224
- Optional("environments"): {
225
- ENV_NAME: {
226
- Optional("plan"): POSTGRES_PLANS,
227
- Optional("volume_size"): int_between(20, 10000),
228
- Optional("iops"): int_between(1000, 9950),
229
- Optional("snapshot_id"): str,
230
- Optional("deletion_policy"): DB_DELETION_POLICY,
231
- Optional("deletion_protection"): DELETION_PROTECTION,
232
- Optional("multi_az"): bool,
233
- Optional("storage_type"): POSTGRES_STORAGE_TYPES,
234
- Optional("backup_retention_days"): int_between(1, 35),
235
- }
236
- },
237
- Optional("database_copy"): [DATABASE_COPY],
238
- Optional("objects"): [
239
- {
240
- "key": str,
241
- Optional("body"): str,
242
- }
243
- ],
244
- }
245
-
246
- LIFECYCLE_RULE = {
247
- Optional("filter_prefix"): str,
248
- "expiration_days": int,
249
- "enabled": bool,
250
- }
251
-
252
-
253
- def kms_key_arn_regex(key):
254
- return Regex(
255
- r"^arn:aws:kms:.*:\d{12}:(key|alias).*",
256
- error=f"{key} must contain a valid ARN for a KMS key",
257
- )
258
-
259
-
260
- def s3_bucket_arn_regex(key):
261
- return Regex(
262
- r"^arn:aws:s3::.*",
263
- error=f"{key} must contain a valid ARN for an S3 bucket",
264
- )
265
-
266
-
267
- def iam_role_arn_regex(key):
268
- return Regex(
269
- r"^arn:aws:iam::\d{12}:role/.*",
270
- error=f"{key} must contain a valid ARN for an IAM role",
271
- )
272
-
273
-
274
- def dbt_email_address_regex(key):
275
- return Regex(
276
- r"^[\w.-]+@(businessandtrade.gov.uk|digital.trade.gov.uk)$",
277
- error=f"{key} must contain a valid DBT email address",
278
- )
279
-
280
-
281
- EXTERNAL_ROLE_ACCESS = {
282
- "role_arn": iam_role_arn_regex("role_arn"),
283
- "read": bool,
284
- "write": bool,
285
- "cyber_sign_off_by": dbt_email_address_regex("cyber_sign_off_by"),
286
- }
287
-
288
- EXTERNAL_ROLE_ACCESS_NAME = Regex(
289
- r"^([a-z][a-zA-Z0-9_-]*)$",
290
- error="External role access block name {} is invalid: names must only contain lowercase alphanumeric characters separated by hypen or underscore",
291
- )
292
-
293
- DATA_IMPORT = {
294
- Optional("source_kms_key_arn"): kms_key_arn_regex("source_kms_key_arn"),
295
- "source_bucket_arn": s3_bucket_arn_regex("source_bucket_arn"),
296
- "worker_role_arn": iam_role_arn_regex("worker_role_arn"),
297
- }
298
-
299
- DATA_MIGRATION = {
300
- "import": DATA_IMPORT,
301
- }
302
-
303
- S3_BASE = {
304
- Optional("readonly"): bool,
305
- Optional("serve_static_content"): bool,
306
- Optional("services"): Or("__all__", [str]),
307
- Optional("environments"): {
308
- ENV_NAME: {
309
- "bucket_name": validate_s3_bucket_name,
310
- Optional("deletion_policy"): DELETION_POLICY,
311
- Optional("retention_policy"): RETENTION_POLICY,
312
- Optional("versioning"): bool,
313
- Optional("lifecycle_rules"): [LIFECYCLE_RULE],
314
- Optional("data_migration"): DATA_MIGRATION,
315
- Optional("external_role_access"): {EXTERNAL_ROLE_ACCESS_NAME: EXTERNAL_ROLE_ACCESS},
316
- },
317
- },
318
- }
319
-
320
- S3_POLICY_DEFINITION = dict(S3_BASE)
321
- S3_POLICY_DEFINITION.update({"type": "s3-policy"})
322
-
323
- S3_DEFINITION = dict(S3_BASE)
324
- S3_DEFINITION.update(
325
- {
326
- "type": "s3",
327
- Optional("objects"): [{"key": str, Optional("body"): str, Optional("content_type"): str}],
328
- }
329
- )
330
-
331
- MONITORING_DEFINITION = {
332
- "type": "monitoring",
333
- Optional("environments"): {
334
- ENV_NAME: {
335
- Optional("enable_ops_center"): bool,
336
- }
337
- },
338
- }
339
-
340
- OPENSEARCH_PLANS = Or(
341
- "tiny", "small", "small-ha", "medium", "medium-ha", "large", "large-ha", "x-large", "x-large-ha"
342
- )
343
- OPENSEARCH_ENGINE_VERSIONS = str
344
- OPENSEARCH_MIN_VOLUME_SIZE = 10
345
- OPENSEARCH_MAX_VOLUME_SIZE = {
346
- "tiny": 100,
347
- "small": 200,
348
- "small-ha": 200,
349
- "medium": 512,
350
- "medium-ha": 512,
351
- "large": 1000,
352
- "large-ha": 1000,
353
- "x-large": 1500,
354
- "x-large-ha": 1500,
355
- }
356
-
357
- OPENSEARCH_DEFINITION = {
358
- "type": "opensearch",
359
- Optional("environments"): {
360
- ENV_NAME: {
361
- Optional("engine"): OPENSEARCH_ENGINE_VERSIONS,
362
- Optional("deletion_policy"): DELETION_POLICY,
363
- Optional("plan"): OPENSEARCH_PLANS,
364
- Optional("volume_size"): int,
365
- Optional("ebs_throughput"): int,
366
- Optional("ebs_volume_type"): str,
367
- Optional("instance"): str,
368
- Optional("instances"): int,
369
- Optional("master"): bool,
370
- Optional("es_app_log_retention_in_days"): int,
371
- Optional("index_slow_log_retention_in_days"): int,
372
- Optional("audit_log_retention_in_days"): int,
373
- Optional("search_slow_log_retention_in_days"): int,
374
- Optional("password_special_characters"): str,
375
- Optional("urlencode_password"): bool,
376
- }
377
- },
378
- }
379
-
380
- CACHE_POLICY_DEFINITION = {
381
- "min_ttl": int,
382
- "max_ttl": int,
383
- "default_ttl": int,
384
- "cookies_config": Or("none", "whitelist", "allExcept", "all"),
385
- "header": Or("none", "whitelist"),
386
- "query_string_behavior": Or("none", "whitelist", "allExcept", "all"),
387
- Optional("cookie_list"): list,
388
- Optional("headers_list"): list,
389
- Optional("cache_policy_query_strings"): list,
390
- }
391
-
392
- PATHS_DEFINITION = {
393
- Optional("default"): {
394
- "cache": str,
395
- "request": str,
396
- },
397
- Optional("additional"): list[
398
- {
399
- "path": str,
400
- "cache": str,
401
- "request": str,
402
- }
403
- ],
404
- }
405
-
406
- ALB_DEFINITION = {
407
- "type": "alb",
408
- Optional("environments"): {
409
- ENV_NAME: Or(
410
- {
411
- Optional("additional_address_list"): list,
412
- Optional("allowed_methods"): list,
413
- Optional("cached_methods"): list,
414
- Optional("cdn_compress"): bool,
415
- Optional("cdn_domains_list"): dict,
416
- Optional("cdn_geo_locations"): list,
417
- Optional("cdn_geo_restriction_type"): str,
418
- Optional("cdn_logging_bucket"): str,
419
- Optional("cdn_logging_bucket_prefix"): str,
420
- Optional("cdn_timeout_seconds"): int,
421
- Optional("default_waf"): str,
422
- Optional("domain_prefix"): str,
423
- Optional("enable_logging"): bool,
424
- Optional("env_root"): str,
425
- Optional("forwarded_values_forward"): str,
426
- Optional("forwarded_values_headers"): list,
427
- Optional("forwarded_values_query_string"): bool,
428
- Optional("origin_protocol_policy"): str,
429
- Optional("origin_ssl_protocols"): list,
430
- Optional("slack_alert_channel_alb_secret_rotation"): str,
431
- Optional("viewer_certificate_minimum_protocol_version"): str,
432
- Optional("viewer_certificate_ssl_support_method"): str,
433
- Optional("viewer_protocol_policy"): str,
434
- Optional("cache_policy"): dict({str: CACHE_POLICY_DEFINITION}),
435
- Optional("origin_request_policy"): dict({str: {}}),
436
- Optional("paths"): dict({str: PATHS_DEFINITION}),
437
- },
438
- None,
439
- )
440
- },
441
- }
442
-
443
- PROMETHEUS_POLICY_DEFINITION = {
444
- "type": "prometheus-policy",
445
- Optional("services"): Or("__all__", [str]),
446
- Optional("environments"): {
447
- ENV_NAME: {
448
- "role_arn": str,
449
- }
450
- },
451
- }
452
-
453
- _DEFAULT_VERSIONS_DEFINITION = {
454
- Optional("terraform-platform-modules"): str,
455
- Optional("platform-helper"): str,
456
- }
457
- _ENVIRONMENTS_VERSIONS_OVERRIDES = {
458
- Optional("terraform-platform-modules"): str,
459
- }
460
- _PIPELINE_VERSIONS_OVERRIDES = {
461
- Optional("platform-helper"): str,
462
- }
463
-
464
- _ENVIRONMENTS_PARAMS = {
465
- Optional("accounts"): {
466
- "deploy": {
467
- "name": str,
468
- "id": str,
469
- },
470
- "dns": {
471
- "name": str,
472
- "id": str,
473
- },
474
- },
475
- Optional("requires_approval"): bool,
476
- Optional("versions"): _ENVIRONMENTS_VERSIONS_OVERRIDES,
477
- Optional("vpc"): str,
478
- }
479
-
480
- ENVIRONMENTS_DEFINITION = {str: Or(None, _ENVIRONMENTS_PARAMS)}
481
-
482
- CODEBASE_PIPELINES_DEFINITION = [
483
- {
484
- "name": str,
485
- "repository": str,
486
- Optional("additional_ecr_repository"): str,
487
- Optional("deploy_repository_branch"): str,
488
- "services": list[str],
489
- "pipelines": [
490
- Or(
491
- {
492
- "name": str,
493
- "branch": branch_wildcard_validator,
494
- "environments": [
495
- {
496
- "name": str,
497
- Optional("requires_approval"): bool,
498
- }
499
- ],
500
- },
501
- {
502
- "name": str,
503
- "tag": bool,
504
- "environments": [
505
- {
506
- "name": str,
507
- Optional("requires_approval"): bool,
508
- }
509
- ],
510
- },
511
- ),
512
- ],
513
- },
514
- ]
515
-
516
- ENVIRONMENT_PIPELINES_DEFINITION = {
517
- str: {
518
- Optional("account"): str,
519
- Optional("branch", default="main"): str,
520
- Optional("pipeline_to_trigger"): str,
521
- Optional("versions"): _PIPELINE_VERSIONS_OVERRIDES,
522
- "slack_channel": str,
523
- "trigger_on_push": bool,
524
- "environments": {str: Or(None, _ENVIRONMENTS_PARAMS)},
525
- }
526
- }
527
-
528
- PLATFORM_CONFIG_SCHEMA = Schema(
529
- {
530
- # The following line is for the AWS Copilot version, will be removed under DBTP-1002
531
- "application": str,
532
- Optional("legacy_project", default=False): bool,
533
- Optional("default_versions"): _DEFAULT_VERSIONS_DEFINITION,
534
- Optional("accounts"): list[str],
535
- Optional("environments"): ENVIRONMENTS_DEFINITION,
536
- Optional("codebase_pipelines"): CODEBASE_PIPELINES_DEFINITION,
537
- Optional("extensions"): {
538
- str: Or(
539
- REDIS_DEFINITION,
540
- POSTGRES_DEFINITION,
541
- S3_DEFINITION,
542
- S3_POLICY_DEFINITION,
543
- MONITORING_DEFINITION,
544
- OPENSEARCH_DEFINITION,
545
- ALB_DEFINITION,
546
- PROMETHEUS_POLICY_DEFINITION,
547
- )
548
- },
549
- Optional("environment_pipelines"): ENVIRONMENT_PIPELINES_DEFINITION,
550
- }
551
- )
552
-
553
-
554
74
  def validate_platform_config(config):
555
75
  PLATFORM_CONFIG_SCHEMA.validate(config)
556
76
  enriched_config = apply_environment_defaults(config)
@@ -832,71 +352,3 @@ def config_file_check(path=PLATFORM_CONFIG_FILE):
832
352
  if errors:
833
353
  click.secho("\n".join(errors), bg="red", fg="white")
834
354
  exit(1)
835
-
836
-
837
- S3_SCHEMA = Schema(S3_DEFINITION)
838
- S3_POLICY_SCHEMA = Schema(S3_POLICY_DEFINITION)
839
- POSTGRES_SCHEMA = Schema(POSTGRES_DEFINITION)
840
- REDIS_SCHEMA = Schema(REDIS_DEFINITION)
841
-
842
-
843
- class ConditionalSchema(Schema):
844
- def validate(self, data, _is_conditional_schema=True):
845
- data = super(ConditionalSchema, self).validate(data, _is_conditional_schema=False)
846
- if _is_conditional_schema:
847
- default_plan = None
848
- default_volume_size = None
849
-
850
- default_environment_config = data["environments"].get(
851
- "*", data["environments"].get("default", None)
852
- )
853
- if default_environment_config:
854
- default_plan = default_environment_config.get("plan", None)
855
- default_volume_size = default_environment_config.get("volume_size", None)
856
-
857
- for env in data["environments"]:
858
- volume_size = data["environments"][env].get("volume_size", default_volume_size)
859
- plan = data["environments"][env].get("plan", default_plan)
860
-
861
- if volume_size:
862
- if not plan:
863
- raise SchemaError(f"Missing key: 'plan'")
864
-
865
- if volume_size < OPENSEARCH_MIN_VOLUME_SIZE:
866
- raise SchemaError(
867
- f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer greater than {OPENSEARCH_MIN_VOLUME_SIZE}"
868
- )
869
-
870
- for key in OPENSEARCH_MAX_VOLUME_SIZE:
871
- if plan == key and not volume_size <= OPENSEARCH_MAX_VOLUME_SIZE[key]:
872
- raise SchemaError(
873
- f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer between {OPENSEARCH_MIN_VOLUME_SIZE} and {OPENSEARCH_MAX_VOLUME_SIZE[key]} for plan {plan}"
874
- )
875
-
876
- return data
877
-
878
-
879
- OPENSEARCH_SCHEMA = ConditionalSchema(OPENSEARCH_DEFINITION)
880
- MONITORING_SCHEMA = Schema(MONITORING_DEFINITION)
881
- ALB_SCHEMA = Schema(ALB_DEFINITION)
882
- PROMETHEUS_POLICY_SCHEMA = Schema(PROMETHEUS_POLICY_DEFINITION)
883
-
884
-
885
- def no_param_schema(schema_type):
886
- return Schema({"type": schema_type, Optional("services"): Or("__all__", [str])})
887
-
888
-
889
- SCHEMA_MAP = {
890
- "s3": S3_SCHEMA,
891
- "s3-policy": S3_POLICY_SCHEMA,
892
- "postgres": POSTGRES_SCHEMA,
893
- "redis": REDIS_SCHEMA,
894
- "opensearch": OPENSEARCH_SCHEMA,
895
- "monitoring": MONITORING_SCHEMA,
896
- "appconfig-ipfilter": no_param_schema("appconfig-ipfilter"),
897
- "subscription-filter": no_param_schema("subscription-filter"),
898
- "vpc": no_param_schema("vpc"),
899
- "xray": no_param_schema("xray"),
900
- "alb": ALB_SCHEMA,
901
- "prometheus-policy": PROMETHEUS_POLICY_SCHEMA,
902
- }
@@ -13,9 +13,9 @@ import requests
13
13
 
14
14
  from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
15
15
  from dbt_platform_helper.constants import PLATFORM_HELPER_VERSION_FILE
16
- from dbt_platform_helper.exceptions import IncompatibleMajorVersion
17
- from dbt_platform_helper.exceptions import IncompatibleMinorVersion
18
- from dbt_platform_helper.exceptions import ValidationException
16
+ from dbt_platform_helper.providers.validation import IncompatibleMajorVersionException
17
+ from dbt_platform_helper.providers.validation import IncompatibleMinorVersionException
18
+ from dbt_platform_helper.providers.validation import ValidationException
19
19
  from dbt_platform_helper.utils.platform_config import load_unvalidated_config_file
20
20
 
21
21
  VersionTuple = Optional[Tuple[int, int, int]]
@@ -198,13 +198,13 @@ def validate_version_compatibility(
198
198
  if (app_major == 0 and check_major == 0) and (
199
199
  app_minor != check_minor or app_patch != check_patch
200
200
  ):
201
- raise IncompatibleMajorVersion(app_version_as_string, check_version_as_string)
201
+ raise IncompatibleMajorVersionException(app_version_as_string, check_version_as_string)
202
202
 
203
203
  if app_major != check_major:
204
- raise IncompatibleMajorVersion(app_version_as_string, check_version_as_string)
204
+ raise IncompatibleMajorVersionException(app_version_as_string, check_version_as_string)
205
205
 
206
206
  if app_minor != check_minor:
207
- raise IncompatibleMinorVersion(app_version_as_string, check_version_as_string)
207
+ raise IncompatibleMinorVersionException(app_version_as_string, check_version_as_string)
208
208
 
209
209
 
210
210
  def check_version_on_file_compatibility(
@@ -248,9 +248,9 @@ def check_platform_helper_version_needs_update():
248
248
  )
249
249
  try:
250
250
  validate_version_compatibility(local_version, latest_release)
251
- except IncompatibleMajorVersion:
251
+ except IncompatibleMajorVersionException:
252
252
  click.secho(message, fg="red")
253
- except IncompatibleMinorVersion:
253
+ except IncompatibleMinorVersionException:
254
254
  click.secho(message, fg="yellow")
255
255
 
256
256
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbt-platform-helper
3
- Version: 12.3.0
3
+ Version: 12.4.1
4
4
  Summary: Set of tools to help transfer applications/services from GOV.UK PaaS to DBT PaaS augmenting AWS Copilot.
5
5
  License: MIT
6
6
  Author: Department for Business and Trade Platform Team