dbt-platform-helper 12.4.0__py3-none-any.whl → 12.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,605 @@
1
+ import ipaddress
2
+ import re
3
+
4
+ from schema import Optional
5
+ from schema import Or
6
+ from schema import Regex
7
+ from schema import Schema
8
+ from schema import SchemaError
9
+
10
+
11
+ def _string_matching_regex(regex_pattern: str):
12
+ def validate(string):
13
+ if not re.match(regex_pattern, string):
14
+ # Todo: Raise suitable PlatformException?
15
+ raise SchemaError(
16
+ f"String '{string}' does not match the required pattern '{regex_pattern}'."
17
+ )
18
+ return string
19
+
20
+ return validate
21
+
22
+
23
+ def _is_integer_between(lower_limit, upper_limit):
24
+ def validate(value):
25
+ if isinstance(value, int) and lower_limit <= value <= upper_limit:
26
+ return True
27
+ # Todo: Raise suitable PlatformException?
28
+ raise SchemaError(f"should be an integer between {lower_limit} and {upper_limit}")
29
+
30
+ return validate
31
+
32
+
33
+ _valid_schema_key = Regex(
34
+ r"^([a-z][a-zA-Z0-9_-]*|\*)$",
35
+ error="{} is invalid: must only contain lowercase alphanumeric characters separated by hyphen or underscore",
36
+ )
37
+
38
+ # Todo: Make this actually validate a git branch name properly; https://git-scm.com/docs/git-check-ref-format
39
+ _valid_branch_name = _string_matching_regex(r"^((?!\*).)*(\*)?$")
40
+
41
+ _valid_deletion_policy = Or("Delete", "Retain")
42
+
43
+ _valid_postgres_deletion_policy = Or("Delete", "Retain", "Snapshot")
44
+
45
+ _valid_environment_name = Regex(
46
+ r"^([a-z][a-zA-Z0-9]*|\*)$",
47
+ error="Environment name {} is invalid: names must only contain lowercase alphanumeric characters, or be the '*' default environment",
48
+ # For values the "error" parameter works and outputs the custom text. For keys the custom text doesn't get reported in the exception for some reason.
49
+ )
50
+
51
+
52
+ def _valid_kms_key_arn(key):
53
+ return Regex(
54
+ r"^arn:aws:kms:.*:\d{12}:(key|alias).*",
55
+ error=f"{key} must contain a valid ARN for a KMS key",
56
+ )
57
+
58
+
59
+ def _valid_iam_role_arn(key):
60
+ return Regex(
61
+ r"^arn:aws:iam::\d{12}:role/.*",
62
+ error=f"{key} must contain a valid ARN for an IAM role",
63
+ )
64
+
65
+
66
+ def _valid_dbt_email_address(key):
67
+ return Regex(
68
+ r"^[\w.-]+@(businessandtrade.gov.uk|digital.trade.gov.uk)$",
69
+ error=f"{key} must contain a valid DBT email address",
70
+ )
71
+
72
+
73
+ _cross_environment_service_access_schema = {
74
+ "application": str,
75
+ "environment": _valid_environment_name,
76
+ "account": str,
77
+ "service": str,
78
+ "read": bool,
79
+ "write": bool,
80
+ "cyber_sign_off_by": _valid_dbt_email_address("cyber_sign_off_by"),
81
+ }
82
+
83
+
84
+ def _no_configuration_required_schema(schema_type):
85
+ return Schema({"type": schema_type, Optional("services"): Or("__all__", [str])})
86
+
87
+
88
+ # Application load balancer....
89
+ _valid_alb_cache_policy = {
90
+ "min_ttl": int,
91
+ "max_ttl": int,
92
+ "default_ttl": int,
93
+ "cookies_config": Or("none", "whitelist", "allExcept", "all"),
94
+ "header": Or("none", "whitelist"),
95
+ "query_string_behavior": Or("none", "whitelist", "allExcept", "all"),
96
+ Optional("cookie_list"): list,
97
+ Optional("headers_list"): list,
98
+ Optional("cache_policy_query_strings"): list,
99
+ }
100
+
101
+ _valid_alb_paths_definition = {
102
+ Optional("default"): {
103
+ "cache": str,
104
+ "request": str,
105
+ },
106
+ Optional("additional"): list[
107
+ {
108
+ "path": str,
109
+ "cache": str,
110
+ "request": str,
111
+ }
112
+ ],
113
+ }
114
+
115
+ _alb_schema = {
116
+ "type": "alb",
117
+ Optional("environments"): {
118
+ _valid_environment_name: Or(
119
+ {
120
+ Optional("additional_address_list"): list,
121
+ Optional("allowed_methods"): list,
122
+ Optional("cached_methods"): list,
123
+ Optional("cdn_compress"): bool,
124
+ Optional("cdn_domains_list"): dict,
125
+ Optional("cdn_geo_locations"): list,
126
+ Optional("cdn_geo_restriction_type"): str,
127
+ Optional("cdn_logging_bucket"): str,
128
+ Optional("cdn_logging_bucket_prefix"): str,
129
+ Optional("cdn_timeout_seconds"): int,
130
+ Optional("default_waf"): str,
131
+ Optional("domain_prefix"): str,
132
+ Optional("enable_logging"): bool,
133
+ Optional("env_root"): str,
134
+ Optional("forwarded_values_forward"): str,
135
+ Optional("forwarded_values_headers"): list,
136
+ Optional("forwarded_values_query_string"): bool,
137
+ Optional("origin_protocol_policy"): str,
138
+ Optional("origin_ssl_protocols"): list,
139
+ Optional("slack_alert_channel_alb_secret_rotation"): str,
140
+ Optional("viewer_certificate_minimum_protocol_version"): str,
141
+ Optional("viewer_certificate_ssl_support_method"): str,
142
+ Optional("viewer_protocol_policy"): str,
143
+ Optional("cache_policy"): dict({str: _valid_alb_cache_policy}),
144
+ Optional("origin_request_policy"): dict({str: {}}),
145
+ Optional("paths"): dict({str: _valid_alb_paths_definition}),
146
+ },
147
+ None,
148
+ )
149
+ },
150
+ }
151
+
152
+ # Monitoring...
153
+ _monitoring_schema = {
154
+ "type": "monitoring",
155
+ Optional("environments"): {
156
+ _valid_environment_name: {
157
+ Optional("enable_ops_center"): bool,
158
+ }
159
+ },
160
+ }
161
+
162
+
163
+ # Opensearch...
164
+ class ConditionalOpensSearchSchema(Schema):
165
+ def validate(self, data, _is_conditional_schema=True):
166
+ data = super(ConditionalOpensSearchSchema, self).validate(
167
+ data, _is_conditional_schema=False
168
+ )
169
+ if _is_conditional_schema:
170
+ default_plan = None
171
+ default_volume_size = None
172
+
173
+ default_environment_config = data["environments"].get(
174
+ "*", data["environments"].get("default", None)
175
+ )
176
+ if default_environment_config:
177
+ default_plan = default_environment_config.get("plan", None)
178
+ default_volume_size = default_environment_config.get("volume_size", None)
179
+
180
+ for env in data["environments"]:
181
+ volume_size = data["environments"][env].get("volume_size", default_volume_size)
182
+ plan = data["environments"][env].get("plan", default_plan)
183
+
184
+ if volume_size:
185
+ if not plan:
186
+ # Todo: Raise suitable PlatformException?
187
+ raise SchemaError(f"Missing key: 'plan'")
188
+
189
+ if volume_size < _valid_opensearch_min_volume_size:
190
+ # Todo: Raise suitable PlatformException?
191
+ raise SchemaError(
192
+ f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer greater than {_valid_opensearch_min_volume_size}"
193
+ )
194
+
195
+ for key in _valid_opensearch_max_volume_size:
196
+ if (
197
+ plan == key
198
+ and not volume_size <= _valid_opensearch_max_volume_size[key]
199
+ ):
200
+ # Todo: Raise suitable PlatformException?
201
+ raise SchemaError(
202
+ f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer between {_valid_opensearch_min_volume_size} and {_valid_opensearch_max_volume_size[key]} for plan {plan}"
203
+ )
204
+
205
+ return data
206
+
207
+
208
+ # Todo: Move to OpenSearch provider?
209
+ _valid_opensearch_plans = Or(
210
+ "tiny", "small", "small-ha", "medium", "medium-ha", "large", "large-ha", "x-large", "x-large-ha"
211
+ )
212
+ # Todo: Move to OpenSearch provider?
213
+ _valid_opensearch_min_volume_size = 10
214
+ # Todo: Move to OpenSearch provider?
215
+ _valid_opensearch_max_volume_size = {
216
+ "tiny": 100,
217
+ "small": 200,
218
+ "small-ha": 200,
219
+ "medium": 512,
220
+ "medium-ha": 512,
221
+ "large": 1000,
222
+ "large-ha": 1000,
223
+ "x-large": 1500,
224
+ "x-large-ha": 1500,
225
+ }
226
+
227
+ _opensearch_schema = {
228
+ "type": "opensearch",
229
+ Optional("environments"): {
230
+ _valid_environment_name: {
231
+ Optional("engine"): str,
232
+ Optional("deletion_policy"): _valid_deletion_policy,
233
+ Optional("plan"): _valid_opensearch_plans,
234
+ Optional("volume_size"): int,
235
+ Optional("ebs_throughput"): int,
236
+ Optional("ebs_volume_type"): str,
237
+ Optional("instance"): str,
238
+ Optional("instances"): int,
239
+ Optional("master"): bool,
240
+ Optional("es_app_log_retention_in_days"): int,
241
+ Optional("index_slow_log_retention_in_days"): int,
242
+ Optional("audit_log_retention_in_days"): int,
243
+ Optional("search_slow_log_retention_in_days"): int,
244
+ Optional("password_special_characters"): str,
245
+ Optional("urlencode_password"): bool,
246
+ }
247
+ },
248
+ }
249
+
250
+ # Prometheus...
251
+ _prometheus_policy_schema = {
252
+ "type": "prometheus-policy",
253
+ Optional("services"): Or("__all__", [str]),
254
+ Optional("environments"): {
255
+ _valid_environment_name: {
256
+ "role_arn": str,
257
+ }
258
+ },
259
+ }
260
+
261
+ # Postgres...
262
+ # Todo: Move to Postgres provider?
263
+ _valid_postgres_plans = Or(
264
+ "tiny",
265
+ "small",
266
+ "small-ha",
267
+ "small-high-io",
268
+ "medium",
269
+ "medium-ha",
270
+ "medium-high-io",
271
+ "large",
272
+ "large-ha",
273
+ "large-high-io",
274
+ "x-large",
275
+ "x-large-ha",
276
+ "x-large-high-io",
277
+ )
278
+
279
+ # Todo: Move to Postgres provider?
280
+ _valid_postgres_storage_types = Or("gp2", "gp3", "io1", "io2")
281
+
282
+ _valid_postgres_database_copy = {
283
+ "from": _valid_environment_name,
284
+ "to": _valid_environment_name,
285
+ Optional("from_account"): str,
286
+ Optional("to_account"): str,
287
+ Optional("pipeline"): {Optional("schedule"): str},
288
+ }
289
+
290
+ _postgres_schema = {
291
+ "type": "postgres",
292
+ "version": (Or(int, float)),
293
+ Optional("deletion_policy"): _valid_postgres_deletion_policy,
294
+ Optional("environments"): {
295
+ _valid_environment_name: {
296
+ Optional("plan"): _valid_postgres_plans,
297
+ Optional("volume_size"): _is_integer_between(20, 10000),
298
+ Optional("iops"): _is_integer_between(1000, 9950),
299
+ Optional("snapshot_id"): str,
300
+ Optional("deletion_policy"): _valid_postgres_deletion_policy,
301
+ Optional("deletion_protection"): bool,
302
+ Optional("multi_az"): bool,
303
+ Optional("storage_type"): _valid_postgres_storage_types,
304
+ Optional("backup_retention_days"): _is_integer_between(1, 35),
305
+ }
306
+ },
307
+ Optional("database_copy"): [_valid_postgres_database_copy],
308
+ Optional("objects"): [
309
+ {
310
+ "key": str,
311
+ Optional("body"): str,
312
+ }
313
+ ],
314
+ }
315
+
316
+ # Redis...
317
+ # Todo move to Redis provider?
318
+ _valid_redis_plans = Or(
319
+ "micro",
320
+ "micro-ha",
321
+ "tiny",
322
+ "tiny-ha",
323
+ "small",
324
+ "small-ha",
325
+ "medium",
326
+ "medium-ha",
327
+ "large",
328
+ "large-ha",
329
+ "x-large",
330
+ "x-large-ha",
331
+ )
332
+
333
+ _redis_schema = {
334
+ "type": "redis",
335
+ Optional("environments"): {
336
+ _valid_environment_name: {
337
+ Optional("plan"): _valid_redis_plans,
338
+ Optional("engine"): str,
339
+ Optional("replicas"): _is_integer_between(0, 5),
340
+ Optional("deletion_policy"): _valid_deletion_policy,
341
+ Optional("apply_immediately"): bool,
342
+ Optional("automatic_failover_enabled"): bool,
343
+ Optional("instance"): str,
344
+ Optional("multi_az_enabled"): bool,
345
+ }
346
+ },
347
+ }
348
+
349
+
350
+ # S3 Bucket...
351
+ def _valid_s3_bucket_name(name: str):
352
+ errors = []
353
+ if not (2 < len(name) < 64):
354
+ errors.append("Length must be between 3 and 63 characters inclusive.")
355
+
356
+ if not re.match(r"^[a-z0-9].*[a-z0-9]$", name):
357
+ errors.append("Names must start and end with 0-9 or a-z.")
358
+
359
+ if not re.match(r"^[a-z0-9.-]*$", name):
360
+ errors.append("Names can only contain the characters 0-9, a-z, '.' and '-'.")
361
+
362
+ if ".." in name:
363
+ errors.append("Names cannot contain two adjacent periods.")
364
+
365
+ try:
366
+ ipaddress.ip_address(name)
367
+ errors.append("Names cannot be IP addresses.")
368
+ except ValueError:
369
+ pass
370
+
371
+ for prefix in ("xn--", "sthree-"):
372
+ if name.startswith(prefix):
373
+ errors.append(f"Names cannot be prefixed '{prefix}'.")
374
+
375
+ for suffix in ("-s3alias", "--ol-s3"):
376
+ if name.endswith(suffix):
377
+ errors.append(f"Names cannot be suffixed '{suffix}'.")
378
+
379
+ if errors:
380
+ # Todo: Raise suitable PlatformException?
381
+ raise SchemaError(
382
+ "Bucket name '{}' is invalid:\n{}".format(name, "\n".join(f" {e}" for e in errors))
383
+ )
384
+
385
+ return True
386
+
387
+
388
+ def _valid_s3_bucket_arn(key):
389
+ return Regex(
390
+ r"^arn:aws:s3::.*",
391
+ error=f"{key} must contain a valid ARN for an S3 bucket",
392
+ )
393
+
394
+
395
+ _valid_s3_data_migration = {
396
+ "import": {
397
+ Optional("source_kms_key_arn"): _valid_kms_key_arn("source_kms_key_arn"),
398
+ "source_bucket_arn": _valid_s3_bucket_arn("source_bucket_arn"),
399
+ "worker_role_arn": _valid_iam_role_arn("worker_role_arn"),
400
+ },
401
+ }
402
+
403
+ _valid_s3_bucket_retention_policy = Or(
404
+ None,
405
+ {
406
+ "mode": Or("GOVERNANCE", "COMPLIANCE"),
407
+ Or("days", "years", only_one=True): int,
408
+ },
409
+ )
410
+
411
+ _valid_s3_bucket_lifecycle_rule = {
412
+ Optional("filter_prefix"): str,
413
+ "expiration_days": int,
414
+ "enabled": bool,
415
+ }
416
+
417
+ _valid_s3_bucket_external_role_access = {
418
+ "role_arn": _valid_iam_role_arn("role_arn"),
419
+ "read": bool,
420
+ "write": bool,
421
+ "cyber_sign_off_by": _valid_dbt_email_address("cyber_sign_off_by"),
422
+ }
423
+
424
+ _valid_s3_bucket_external_role_access_name = Regex(
425
+ r"^([a-z][a-zA-Z0-9_-]*)$",
426
+ error="External role access block name {} is invalid: names must only contain lowercase alphanumeric characters separated by hypen or underscore",
427
+ )
428
+
429
+ _valid_s3_base_definition = dict(
430
+ {
431
+ Optional("readonly"): bool,
432
+ Optional("serve_static_content"): bool,
433
+ Optional("serve_static_param_name"): str,
434
+ Optional("services"): Or("__all__", [str]),
435
+ Optional("environments"): {
436
+ _valid_environment_name: {
437
+ "bucket_name": _valid_s3_bucket_name,
438
+ Optional("deletion_policy"): _valid_deletion_policy,
439
+ Optional("retention_policy"): _valid_s3_bucket_retention_policy,
440
+ Optional("versioning"): bool,
441
+ Optional("lifecycle_rules"): [_valid_s3_bucket_lifecycle_rule],
442
+ Optional("data_migration"): _valid_s3_data_migration,
443
+ Optional("external_role_access"): {
444
+ _valid_schema_key: _valid_s3_bucket_external_role_access
445
+ },
446
+ Optional("cross_environment_service_access"): {
447
+ _valid_schema_key: _cross_environment_service_access_schema
448
+ },
449
+ },
450
+ },
451
+ }
452
+ )
453
+
454
+ _s3_bucket_schema = _valid_s3_base_definition | {
455
+ "type": "s3",
456
+ Optional("objects"): [{"key": str, Optional("body"): str, Optional("content_type"): str}],
457
+ }
458
+
459
+ _s3_bucket_policy_schema = _valid_s3_base_definition | {"type": "s3-policy"}
460
+
461
+ _default_versions_schema = {
462
+ Optional("terraform-platform-modules"): str,
463
+ Optional("platform-helper"): str,
464
+ }
465
+
466
+ _valid_environment_specific_version_overrides = {
467
+ Optional("terraform-platform-modules"): str,
468
+ }
469
+
470
+ _valid_pipeline_specific_version_overrides = {
471
+ Optional("platform-helper"): str,
472
+ }
473
+
474
+ _environments_schema = {
475
+ str: Or(
476
+ None,
477
+ {
478
+ Optional("accounts"): {
479
+ "deploy": {
480
+ "name": str,
481
+ "id": str,
482
+ },
483
+ "dns": {
484
+ "name": str,
485
+ "id": str,
486
+ },
487
+ },
488
+ # Todo: Is requires_approval relevant?
489
+ Optional("requires_approval"): bool,
490
+ Optional("versions"): _valid_environment_specific_version_overrides,
491
+ Optional("vpc"): str,
492
+ },
493
+ )
494
+ }
495
+
496
+ # Codebase pipelines...
497
+ _codebase_pipelines_schema = [
498
+ {
499
+ "name": str,
500
+ "repository": str,
501
+ Optional("additional_ecr_repository"): str,
502
+ Optional("deploy_repository_branch"): str,
503
+ "services": list[str],
504
+ "pipelines": [
505
+ Or(
506
+ {
507
+ "name": str,
508
+ "branch": _valid_branch_name,
509
+ "environments": [
510
+ {
511
+ "name": str,
512
+ Optional("requires_approval"): bool,
513
+ }
514
+ ],
515
+ },
516
+ {
517
+ "name": str,
518
+ "tag": bool,
519
+ "environments": [
520
+ {
521
+ "name": str,
522
+ Optional("requires_approval"): bool,
523
+ }
524
+ ],
525
+ },
526
+ ),
527
+ ],
528
+ },
529
+ ]
530
+
531
+ # Environment pipelines...
532
+ _environment_pipelines_schema = {
533
+ str: {
534
+ Optional("account"): str,
535
+ Optional("branch", default="main"): _valid_branch_name,
536
+ Optional("pipeline_to_trigger"): str,
537
+ Optional("versions"): _valid_pipeline_specific_version_overrides,
538
+ "slack_channel": str,
539
+ "trigger_on_push": bool,
540
+ "environments": {
541
+ str: Or(
542
+ None,
543
+ {
544
+ Optional("accounts"): {
545
+ "deploy": {
546
+ "name": str,
547
+ "id": str,
548
+ },
549
+ "dns": {
550
+ "name": str,
551
+ "id": str,
552
+ },
553
+ },
554
+ Optional("requires_approval"): bool,
555
+ Optional("versions"): _valid_environment_specific_version_overrides,
556
+ Optional("vpc"): str,
557
+ },
558
+ )
559
+ },
560
+ }
561
+ }
562
+
563
+
564
+ # Used outside this file by validate_platform_config()
565
+ PLATFORM_CONFIG_SCHEMA = Schema(
566
+ {
567
+ # The following line is for the AWS Copilot version, will be removed under DBTP-1002
568
+ "application": str,
569
+ Optional("legacy_project", default=False): bool,
570
+ Optional("default_versions"): _default_versions_schema,
571
+ Optional("accounts"): list[str],
572
+ Optional("environments"): _environments_schema,
573
+ Optional("codebase_pipelines"): _codebase_pipelines_schema,
574
+ Optional("environment_pipelines"): _environment_pipelines_schema,
575
+ Optional("extensions"): {
576
+ str: Or(
577
+ _alb_schema,
578
+ _monitoring_schema,
579
+ _opensearch_schema,
580
+ _postgres_schema,
581
+ _prometheus_policy_schema,
582
+ _redis_schema,
583
+ _s3_bucket_schema,
584
+ _s3_bucket_policy_schema,
585
+ )
586
+ },
587
+ }
588
+ )
589
+
590
+ # This is used outside this file by validate_addons()
591
+ EXTENSION_SCHEMAS = {
592
+ "alb": Schema(_alb_schema),
593
+ "appconfig-ipfilter": _no_configuration_required_schema("appconfig-ipfilter"),
594
+ "opensearch": ConditionalOpensSearchSchema(_opensearch_schema),
595
+ "postgres": Schema(_postgres_schema),
596
+ "prometheus-policy": Schema(_prometheus_policy_schema),
597
+ "redis": Schema(_redis_schema),
598
+ "s3": Schema(_s3_bucket_schema),
599
+ "s3-policy": Schema(_s3_bucket_policy_schema),
600
+ "subscription-filter": _no_configuration_required_schema("subscription-filter"),
601
+ # Todo: We think the next three are no longer relevant?
602
+ "monitoring": Schema(_monitoring_schema),
603
+ "vpc": _no_configuration_required_schema("vpc"),
604
+ "xray": _no_configuration_required_schema("xray"),
605
+ }
@@ -18,10 +18,8 @@ from dbt_platform_helper.providers.aws import AWSException
18
18
  from dbt_platform_helper.providers.aws import CopilotCodebaseNotFoundException
19
19
  from dbt_platform_helper.providers.aws import ImageNotFoundException
20
20
  from dbt_platform_helper.providers.aws import LogGroupNotFoundException
21
+ from dbt_platform_helper.providers.cache import CacheProvider
21
22
  from dbt_platform_helper.providers.validation import ValidationException
22
- from dbt_platform_helper.utils.files import cache_refresh_required
23
- from dbt_platform_helper.utils.files import read_supported_versions_from_cache
24
- from dbt_platform_helper.utils.files import write_to_cache
25
23
 
26
24
  SSM_BASE_PATH = "/copilot/{app}/{env}/secrets/"
27
25
  SSM_PATH = "/copilot/{app}/{env}/secrets/{name}"
@@ -362,9 +360,9 @@ def get_postgres_connection_data_updated_with_master_secret(session, parameter_n
362
360
 
363
361
  def get_supported_redis_versions():
364
362
 
365
- if cache_refresh_required("redis"):
363
+ cache_provider = CacheProvider()
366
364
 
367
- supported_versions = []
365
+ if cache_provider.cache_refresh_required("redis"):
368
366
 
369
367
  session = get_aws_session_or_abort()
370
368
  elasticache_client = session.client("elasticache")
@@ -378,19 +376,19 @@ def get_supported_redis_versions():
378
376
  for version in supported_versions_response["CacheEngineVersions"]
379
377
  ]
380
378
 
381
- write_to_cache("redis", supported_versions)
379
+ cache_provider.update_cache("redis", supported_versions)
382
380
 
383
381
  return supported_versions
384
382
 
385
383
  else:
386
- return read_supported_versions_from_cache("redis")
384
+ return cache_provider.read_supported_versions_from_cache("redis")
387
385
 
388
386
 
389
387
  def get_supported_opensearch_versions():
390
388
 
391
- if cache_refresh_required("opensearch"):
389
+ cache_provider = CacheProvider()
392
390
 
393
- supported_versions = []
391
+ if cache_provider.cache_refresh_required("opensearch"):
394
392
 
395
393
  session = get_aws_session_or_abort()
396
394
  opensearch_client = session.client("opensearch")
@@ -405,12 +403,12 @@ def get_supported_opensearch_versions():
405
403
  version.removeprefix("OpenSearch_") for version in opensearch_versions
406
404
  ]
407
405
 
408
- write_to_cache("opensearch", supported_versions)
406
+ cache_provider.update_cache("opensearch", supported_versions)
409
407
 
410
408
  return supported_versions
411
409
 
412
410
  else:
413
- return read_supported_versions_from_cache("opensearch")
411
+ return cache_provider.read_supported_versions_from_cache("opensearch")
414
412
 
415
413
 
416
414
  def get_connection_string(