dbt-platform-helper 12.4.0__py3-none-any.whl → 12.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -67,14 +67,9 @@ def generate(terraform_platform_modules_version, deploy_branch):
67
67
  pipeline_config = load_and_validate_platform_config()
68
68
 
69
69
  has_codebase_pipelines = CODEBASE_PIPELINES_KEY in pipeline_config
70
- has_legacy_environment_pipelines = ENVIRONMENTS_KEY in pipeline_config
71
70
  has_environment_pipelines = ENVIRONMENT_PIPELINES_KEY in pipeline_config
72
71
 
73
- if (
74
- not has_codebase_pipelines
75
- and not has_legacy_environment_pipelines
76
- and not has_environment_pipelines
77
- ):
72
+ if not (has_codebase_pipelines or has_environment_pipelines):
78
73
  click.secho("No pipelines defined: nothing to do.", err=True, fg="yellow")
79
74
  return
80
75
 
@@ -178,29 +173,6 @@ def _generate_codebase_pipeline(
178
173
  )
179
174
 
180
175
 
181
- def _generate_copilot_environments_pipeline(
182
- app_name, codestar_connection_arn, git_repo, configuration, base_path, pipelines_dir, templates
183
- ):
184
- makedirs(pipelines_dir / "environments/overrides", exist_ok=True)
185
-
186
- template_data = {
187
- "app_name": app_name,
188
- "git_repo": git_repo,
189
- "codestar_connection_arn": codestar_connection_arn,
190
- "pipeline_environments": configuration,
191
- }
192
-
193
- _create_file_from_template(
194
- base_path, "environments/buildspec.yml", pipelines_dir, template_data, templates
195
- )
196
- _create_file_from_template(
197
- base_path, "environments/manifest.yml", pipelines_dir, template_data, templates
198
- )
199
- _create_file_from_template(
200
- base_path, "environments/overrides/cfn.patches.yml", pipelines_dir, template_data, templates
201
- )
202
-
203
-
204
176
  def _create_file_from_template(
205
177
  base_path, file_name, pipelines_dir, template_data, templates, template_name=None
206
178
  ):
@@ -1,7 +1,9 @@
1
+ # Todo: Move to Config provider
1
2
  PLATFORM_CONFIG_FILE = "platform-config.yml"
3
+ # Todo: Can we get rid of this yet?
2
4
  PLATFORM_HELPER_VERSION_FILE = ".platform-helper-version"
5
+ # Todo: Move to ???
3
6
  DEFAULT_TERRAFORM_PLATFORM_MODULES_VERSION = "5"
4
- PLATFORM_HELPER_CACHE_FILE = ".platform-helper-config-cache.yml"
5
7
 
6
8
  # Keys
7
9
  CODEBASE_PIPELINES_KEY = "codebase_pipelines"
@@ -261,7 +261,7 @@ def add_maintenance_page(
261
261
  )
262
262
 
263
263
  click.secho(
264
- f"\nUse a browser plugin to add `Bypass-Key` header with value {bypass_value} to your requests. For more detail, visit https://platform.readme.trade.gov.uk/activities/holding-and-maintenance-pages/",
264
+ f"\nUse a browser plugin to add `Bypass-Key` header with value {bypass_value} to your requests. For more detail, visit https://platform.readme.trade.gov.uk/next-steps/put-a-service-under-maintenance/",
265
265
  fg="green",
266
266
  )
267
267
 
@@ -0,0 +1,83 @@
1
+ import os
2
+ from datetime import datetime
3
+ from pathlib import Path
4
+
5
+ import yaml
6
+
7
+
8
+ class CacheProvider:
9
+ def __init__(self):
10
+ self._cache_file = ".platform-helper-config-cache.yml"
11
+
12
+ def read_supported_versions_from_cache(self, resource_name):
13
+
14
+ platform_helper_config = self.__read_file_as_yaml(self._cache_file)
15
+
16
+ return platform_helper_config.get(resource_name).get("versions")
17
+
18
+ def update_cache(self, resource_name, supported_versions):
19
+
20
+ platform_helper_config = {}
21
+
22
+ if self.__cache_exists():
23
+ platform_helper_config = self.__read_file_as_yaml(self._cache_file)
24
+
25
+ cache_dict = {
26
+ resource_name: {
27
+ "versions": supported_versions,
28
+ "date-retrieved": datetime.now().strftime("%d-%m-%y %H:%M:%S"),
29
+ }
30
+ }
31
+
32
+ platform_helper_config.update(cache_dict)
33
+
34
+ self.__write_cache(platform_helper_config)
35
+
36
+ def cache_refresh_required(self, resource_name) -> bool:
37
+ """
38
+ Checks if the platform-helper should reach out to AWS to 'refresh' its
39
+ cached values.
40
+
41
+ An API call is needed if any of the following conditions are met:
42
+ 1. No cache file (.platform-helper-config.yml) exists.
43
+ 2. The resource name (e.g. redis, opensearch) does not exist within the cache file.
44
+ 3. The date-retrieved value of the cached data is > than a time interval. In this case 1 day.
45
+ """
46
+
47
+ if not self.__cache_exists():
48
+ return True
49
+
50
+ platform_helper_config = self.__read_file_as_yaml(self._cache_file)
51
+
52
+ if platform_helper_config.get(resource_name):
53
+ return self.__check_if_cached_datetime_is_greater_than_interval(
54
+ platform_helper_config[resource_name].get("date-retrieved"), 1
55
+ )
56
+
57
+ return True
58
+
59
+ @staticmethod
60
+ def __check_if_cached_datetime_is_greater_than_interval(date_retrieved, interval_in_days):
61
+
62
+ current_datetime = datetime.now()
63
+ cached_datetime = datetime.strptime(date_retrieved, "%d-%m-%y %H:%M:%S")
64
+ delta = current_datetime - cached_datetime
65
+
66
+ return delta.days > interval_in_days
67
+
68
+ # TODO - same applies here as below
69
+ @staticmethod
70
+ def __read_file_as_yaml(file_name):
71
+
72
+ return yaml.safe_load(Path(file_name).read_text())
73
+
74
+ # TODO - temp fix to the unit test coverage issue, plan is to seperate out any yaml interaction methods into a seperate 'yaml' provider
75
+ # should be done under a different sub-task which will need to loop back to this provider as part of that work to use the yaml provider instead
76
+ def __write_cache(self, contents):
77
+
78
+ with open(self._cache_file, "w") as file:
79
+ file.write("# [!] This file is autogenerated via the platform-helper. Do not edit.\n")
80
+ yaml.dump(contents, file)
81
+
82
+ def __cache_exists(self):
83
+ return os.path.exists(self._cache_file)