dbt-platform-helper 13.1.0__py3-none-any.whl → 15.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. dbt_platform_helper/COMMANDS.md +107 -27
  2. dbt_platform_helper/commands/application.py +5 -6
  3. dbt_platform_helper/commands/codebase.py +31 -10
  4. dbt_platform_helper/commands/conduit.py +3 -5
  5. dbt_platform_helper/commands/config.py +20 -311
  6. dbt_platform_helper/commands/copilot.py +18 -391
  7. dbt_platform_helper/commands/database.py +17 -9
  8. dbt_platform_helper/commands/environment.py +20 -14
  9. dbt_platform_helper/commands/generate.py +0 -3
  10. dbt_platform_helper/commands/internal.py +140 -0
  11. dbt_platform_helper/commands/notify.py +58 -78
  12. dbt_platform_helper/commands/pipeline.py +23 -19
  13. dbt_platform_helper/commands/secrets.py +39 -93
  14. dbt_platform_helper/commands/version.py +7 -12
  15. dbt_platform_helper/constants.py +52 -7
  16. dbt_platform_helper/domain/codebase.py +89 -39
  17. dbt_platform_helper/domain/conduit.py +335 -76
  18. dbt_platform_helper/domain/config.py +381 -0
  19. dbt_platform_helper/domain/copilot.py +398 -0
  20. dbt_platform_helper/domain/copilot_environment.py +8 -8
  21. dbt_platform_helper/domain/database_copy.py +2 -2
  22. dbt_platform_helper/domain/maintenance_page.py +254 -430
  23. dbt_platform_helper/domain/notify.py +64 -0
  24. dbt_platform_helper/domain/pipelines.py +43 -35
  25. dbt_platform_helper/domain/plans.py +41 -0
  26. dbt_platform_helper/domain/secrets.py +279 -0
  27. dbt_platform_helper/domain/service.py +570 -0
  28. dbt_platform_helper/domain/terraform_environment.py +14 -13
  29. dbt_platform_helper/domain/update_alb_rules.py +412 -0
  30. dbt_platform_helper/domain/versioning.py +249 -0
  31. dbt_platform_helper/{providers → entities}/platform_config_schema.py +75 -82
  32. dbt_platform_helper/entities/semantic_version.py +83 -0
  33. dbt_platform_helper/entities/service.py +339 -0
  34. dbt_platform_helper/platform_exception.py +4 -0
  35. dbt_platform_helper/providers/autoscaling.py +24 -0
  36. dbt_platform_helper/providers/aws/__init__.py +0 -0
  37. dbt_platform_helper/providers/aws/exceptions.py +70 -0
  38. dbt_platform_helper/providers/aws/interfaces.py +13 -0
  39. dbt_platform_helper/providers/aws/opensearch.py +23 -0
  40. dbt_platform_helper/providers/aws/redis.py +21 -0
  41. dbt_platform_helper/providers/aws/sso_auth.py +75 -0
  42. dbt_platform_helper/providers/cache.py +40 -4
  43. dbt_platform_helper/providers/cloudformation.py +1 -1
  44. dbt_platform_helper/providers/config.py +137 -19
  45. dbt_platform_helper/providers/config_validator.py +112 -51
  46. dbt_platform_helper/providers/copilot.py +24 -16
  47. dbt_platform_helper/providers/ecr.py +89 -7
  48. dbt_platform_helper/providers/ecs.py +228 -36
  49. dbt_platform_helper/providers/environment_variable.py +24 -0
  50. dbt_platform_helper/providers/files.py +1 -1
  51. dbt_platform_helper/providers/io.py +36 -4
  52. dbt_platform_helper/providers/kms.py +22 -0
  53. dbt_platform_helper/providers/load_balancers.py +402 -42
  54. dbt_platform_helper/providers/logs.py +72 -0
  55. dbt_platform_helper/providers/parameter_store.py +134 -0
  56. dbt_platform_helper/providers/s3.py +21 -0
  57. dbt_platform_helper/providers/schema_migrations/__init__.py +0 -0
  58. dbt_platform_helper/providers/schema_migrations/schema_v0_to_v1_migration.py +43 -0
  59. dbt_platform_helper/providers/schema_migrator.py +77 -0
  60. dbt_platform_helper/providers/secrets.py +5 -5
  61. dbt_platform_helper/providers/slack_channel_notifier.py +62 -0
  62. dbt_platform_helper/providers/terraform_manifest.py +121 -19
  63. dbt_platform_helper/providers/version.py +106 -23
  64. dbt_platform_helper/providers/version_status.py +27 -0
  65. dbt_platform_helper/providers/vpc.py +36 -5
  66. dbt_platform_helper/providers/yaml_file.py +58 -2
  67. dbt_platform_helper/templates/environment-pipelines/main.tf +4 -3
  68. dbt_platform_helper/templates/svc/overrides/cfn.patches.yml +5 -0
  69. dbt_platform_helper/utilities/decorators.py +103 -0
  70. dbt_platform_helper/utils/application.py +119 -22
  71. dbt_platform_helper/utils/aws.py +39 -150
  72. dbt_platform_helper/utils/deep_merge.py +10 -0
  73. dbt_platform_helper/utils/git.py +1 -14
  74. dbt_platform_helper/utils/validation.py +1 -1
  75. {dbt_platform_helper-13.1.0.dist-info → dbt_platform_helper-15.16.0.dist-info}/METADATA +11 -20
  76. dbt_platform_helper-15.16.0.dist-info/RECORD +118 -0
  77. {dbt_platform_helper-13.1.0.dist-info → dbt_platform_helper-15.16.0.dist-info}/WHEEL +1 -1
  78. platform_helper.py +3 -1
  79. terraform/elasticache-redis/plans.yml +85 -0
  80. terraform/opensearch/plans.yml +71 -0
  81. terraform/postgres/plans.yml +128 -0
  82. dbt_platform_helper/addon-plans.yml +0 -224
  83. dbt_platform_helper/providers/aws.py +0 -37
  84. dbt_platform_helper/providers/opensearch.py +0 -36
  85. dbt_platform_helper/providers/redis.py +0 -34
  86. dbt_platform_helper/providers/semantic_version.py +0 -126
  87. dbt_platform_helper/templates/svc/manifest-backend.yml +0 -69
  88. dbt_platform_helper/templates/svc/manifest-public.yml +0 -109
  89. dbt_platform_helper/utils/cloudfoundry.py +0 -14
  90. dbt_platform_helper/utils/files.py +0 -53
  91. dbt_platform_helper/utils/manifests.py +0 -18
  92. dbt_platform_helper/utils/versioning.py +0 -238
  93. dbt_platform_helper-13.1.0.dist-info/RECORD +0 -96
  94. {dbt_platform_helper-13.1.0.dist-info → dbt_platform_helper-15.16.0.dist-info}/entry_points.txt +0 -0
  95. {dbt_platform_helper-13.1.0.dist-info → dbt_platform_helper-15.16.0.dist-info/licenses}/LICENSE +0 -0
@@ -1,41 +1,76 @@
1
1
  from copy import deepcopy
2
+ from datetime import datetime
2
3
  from pathlib import Path
3
4
 
4
5
  from schema import SchemaError
5
6
 
7
+ from dbt_platform_helper.constants import FIRST_UPGRADABLE_PLATFORM_HELPER_MAJOR_VERSION
6
8
  from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
9
+ from dbt_platform_helper.constants import PLATFORM_CONFIG_SCHEMA_VERSION
10
+ from dbt_platform_helper.constants import PLATFORM_HELPER_PACKAGE_NAME
11
+ from dbt_platform_helper.entities.platform_config_schema import PlatformConfigSchema
12
+ from dbt_platform_helper.entities.semantic_version import SemanticVersion
7
13
  from dbt_platform_helper.providers.config_validator import ConfigValidator
8
14
  from dbt_platform_helper.providers.config_validator import ConfigValidatorError
9
15
  from dbt_platform_helper.providers.io import ClickIOProvider
10
- from dbt_platform_helper.providers.platform_config_schema import PlatformConfigSchema
16
+ from dbt_platform_helper.providers.version import InstalledVersionProvider
11
17
  from dbt_platform_helper.providers.yaml_file import FileNotFoundException
12
18
  from dbt_platform_helper.providers.yaml_file import FileProviderException
13
19
  from dbt_platform_helper.providers.yaml_file import YamlFileProvider
14
20
 
21
+ SCHEMA_VERSION_MESSAGE = """Installed version: platform-helper: {installed_platform_helper_version} (schema version: {installed_schema_version})
22
+ 'platform-config.yml' version: platform-helper: {config_platform_helper_version} (schema version: {config_schema_version})"""
23
+ PLEASE_UPGRADE_TO_V13_MESSAGE = """Please ensure that you have already upgraded to platform-helper 13, following the instructions in https://platform.readme.trade.gov.uk/reference/upgrading-platform-helper/.
24
+
25
+ Then upgrade platform-helper to version {installed_platform_helper_version} and run 'platform-helper config migrate' to upgrade the configuration to the current schema version."""
26
+
27
+
28
+ class ConfigLoader:
29
+ def __init__(self, file_provider=YamlFileProvider, io: ClickIOProvider = ClickIOProvider()):
30
+ self.io = io
31
+ self.file_provider = file_provider
32
+
33
+ def load(self, path):
34
+ try:
35
+ file_content = self.file_provider.load(path)
36
+ return file_content
37
+ except FileNotFoundException as e:
38
+ self.io.abort_with_error(
39
+ f"{e} Please check it exists and you are in the root directory of your -deploy repository."
40
+ )
41
+ except FileProviderException as e:
42
+ self.io.abort_with_error(f"Error loading configuration from {path}: {e}")
43
+
15
44
 
16
45
  class ConfigProvider:
17
46
  def __init__(
18
47
  self,
19
- config_validator: ConfigValidator = None,
20
- file_provider: YamlFileProvider = None,
21
- io: ClickIOProvider = None,
48
+ config_validator: ConfigValidator = ConfigValidator(),
49
+ file_provider: YamlFileProvider = YamlFileProvider,
50
+ io: ClickIOProvider = ClickIOProvider(),
51
+ schema_version_for_installed_platform_helper: int = PLATFORM_CONFIG_SCHEMA_VERSION,
52
+ installed_version_provider: InstalledVersionProvider = InstalledVersionProvider,
22
53
  ):
23
54
  self.config = {}
24
- self.validator = config_validator or ConfigValidator()
25
- self.io = io or ClickIOProvider()
26
- self.file_provider = file_provider or YamlFileProvider
27
-
28
- # TODO refactor so that apply_environment_defaults isn't set, discarded and set again
55
+ self.validator = config_validator
56
+ self.io = io
57
+ self.file_provider = file_provider
58
+ self.schema_version_for_installed_platform_helper = (
59
+ schema_version_for_installed_platform_helper
60
+ )
61
+ self.installed_version_provider = installed_version_provider
62
+
63
+ # TODO: DBTP-1964: refactor so that apply_environment_defaults isn't set, discarded and set again
29
64
  def get_enriched_config(self):
30
65
  return self.apply_environment_defaults(self.load_and_validate_platform_config())
31
66
 
32
67
  def _validate_platform_config(self):
33
68
  PlatformConfigSchema.schema().validate(self.config)
34
-
35
- # TODO= logically this isn't validation but loading + parsing, to move.
69
+ # TODO: DBTP-1964: = logically this isn't validation but loading + parsing, to move.
36
70
  # also, we apply defaults but discard that data. Should we just apply
37
71
  # defaults to config returned by load_and_validate
38
72
  enriched_config = ConfigProvider.apply_environment_defaults(self.config)
73
+
39
74
  try:
40
75
  self.validator.run_validations(enriched_config)
41
76
  except ConfigValidatorError as exc:
@@ -51,6 +86,8 @@ class ConfigProvider:
51
86
  except FileProviderException as e:
52
87
  self.io.abort_with_error(f"Error loading configuration from {path}: {e}")
53
88
 
89
+ self._validate_schema_version()
90
+
54
91
  try:
55
92
  self._validate_platform_config()
56
93
  except SchemaError as e:
@@ -58,13 +95,91 @@ class ConfigProvider:
58
95
 
59
96
  return self.config
60
97
 
98
+ def _abort_due_to_schema_version_error(self, config_description: str, action_required: str):
99
+ self.io.abort_with_error(
100
+ "\n".join(
101
+ [
102
+ config_description,
103
+ "",
104
+ action_required,
105
+ ]
106
+ )
107
+ )
108
+
109
+ def _validate_schema_version(self):
110
+ config_schema_version = self.config.get("schema_version")
111
+ config_platform_helper_version = self.config.get("default_versions", {}).get(
112
+ "platform-helper", ""
113
+ )
114
+ header = SCHEMA_VERSION_MESSAGE.format(
115
+ installed_platform_helper_version=self._installed_platform_helper_version(),
116
+ installed_schema_version=self.schema_version_for_installed_platform_helper,
117
+ config_platform_helper_version=(
118
+ config_platform_helper_version if config_platform_helper_version else "N/A"
119
+ ),
120
+ config_schema_version=(config_schema_version if config_schema_version else "N/A"),
121
+ )
122
+
123
+ if config_schema_version:
124
+ self._handle_schema_version_mismatch(config_schema_version, header)
125
+ else:
126
+ self._handle_missing_schema_version(config_platform_helper_version, header)
127
+
128
+ def _handle_schema_version_mismatch(self, platform_config_schema_version: int, header: str):
129
+ platform_config_schema_version_is_old = (
130
+ platform_config_schema_version < self.schema_version_for_installed_platform_helper
131
+ )
132
+ installed_platform_helper_is_old = (
133
+ platform_config_schema_version > self.schema_version_for_installed_platform_helper
134
+ )
135
+
136
+ if platform_config_schema_version_is_old:
137
+ self._abort_due_to_schema_version_error(
138
+ header,
139
+ "Please upgrade your platform-config.yml by running 'platform-helper config migrate'.",
140
+ )
141
+ elif installed_platform_helper_is_old:
142
+ self._abort_due_to_schema_version_error(
143
+ header,
144
+ f"Please update your platform-helper to a version that supports schema_version: {platform_config_schema_version}.",
145
+ )
146
+ # else the schema_version is the correct one so continue.
147
+
148
+ def _handle_missing_schema_version(self, config_platform_helper_version: str, header: str):
149
+ config_p_h_version_semver = SemanticVersion.from_string(config_platform_helper_version)
150
+ major_version = config_p_h_version_semver and config_p_h_version_semver.major
151
+ platform_config_is_old_but_supported_by_migrations = (
152
+ major_version and major_version == FIRST_UPGRADABLE_PLATFORM_HELPER_MAJOR_VERSION
153
+ )
154
+ platform_config_is_old = (
155
+ major_version and major_version < FIRST_UPGRADABLE_PLATFORM_HELPER_MAJOR_VERSION
156
+ )
157
+ platform_config_is_really_old = not major_version
158
+ installed_platform_helper_version = self._installed_platform_helper_version()
159
+
160
+ if platform_config_is_old_but_supported_by_migrations:
161
+ self._abort_due_to_schema_version_error(
162
+ header,
163
+ f"Please upgrade your platform-config.yml to be compatible with {installed_platform_helper_version} by running: 'platform-helper config migrate'.",
164
+ )
165
+ elif platform_config_is_old or platform_config_is_really_old:
166
+ self._abort_due_to_schema_version_error(
167
+ header,
168
+ PLEASE_UPGRADE_TO_V13_MESSAGE.format(
169
+ installed_platform_helper_version=installed_platform_helper_version,
170
+ ),
171
+ )
172
+ # if major_version and major_version > FIRST_UPGRADABLE_PLATFORM_HELPER_MAJOR_VERSION then
173
+ # the platform-config.yml is malformed and so should progress to validation if appropriate.
174
+
61
175
  def load_unvalidated_config_file(self, path=PLATFORM_CONFIG_FILE):
62
176
  try:
63
177
  return self.file_provider.load(path)
64
178
  except FileProviderException:
65
179
  return {}
66
180
 
67
- # TODO this general function should be moved out of ConfigProvider
181
+ # TODO: DBTP-1888: remove function and push logic to where this is called.
182
+ # removed usage from config domain, code is very generic and doesn't require the overhead of a function
68
183
  def config_file_check(self, path=PLATFORM_CONFIG_FILE):
69
184
  if not Path(path).exists():
70
185
  self.io.abort_with_error(
@@ -85,17 +200,10 @@ class ConfigProvider:
85
200
  name: data if data else {} for name, data in environments.items() if name != "*"
86
201
  }
87
202
 
88
- default_versions = config.get("default_versions", {})
89
-
90
203
  def combine_env_data(data):
91
204
  return {
92
205
  **env_defaults,
93
206
  **data,
94
- "versions": {
95
- **default_versions,
96
- **env_defaults.get("versions", {}),
97
- **data.get("versions", {}),
98
- },
99
207
  }
100
208
 
101
209
  defaulted_envs = {
@@ -106,3 +214,13 @@ class ConfigProvider:
106
214
  enriched_config["environments"] = defaulted_envs
107
215
 
108
216
  return enriched_config
217
+
218
+ def write_platform_config(self, new_platform_config):
219
+ current_date = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
220
+ message = f"# Generated by platform-helper {self._installed_platform_helper_version()} / {current_date}.\n\n"
221
+ self.file_provider.write(PLATFORM_CONFIG_FILE, new_platform_config, message)
222
+
223
+ def _installed_platform_helper_version(self) -> str:
224
+ return str(
225
+ self.installed_version_provider.get_semantic_version(PLATFORM_HELPER_PACKAGE_NAME)
226
+ )
@@ -3,9 +3,11 @@ from typing import Callable
3
3
  import boto3
4
4
 
5
5
  from dbt_platform_helper.platform_exception import PlatformException
6
+ from dbt_platform_helper.providers.aws.opensearch import Opensearch
7
+ from dbt_platform_helper.providers.aws.redis import Redis
8
+ from dbt_platform_helper.providers.cache import Cache
9
+ from dbt_platform_helper.providers.cache import GetAWSVersionStrategy
6
10
  from dbt_platform_helper.providers.io import ClickIOProvider
7
- from dbt_platform_helper.providers.opensearch import OpensearchProvider
8
- from dbt_platform_helper.providers.redis import RedisProvider
9
11
 
10
12
 
11
13
  class ConfigValidatorError(PlatformException):
@@ -15,7 +17,10 @@ class ConfigValidatorError(PlatformException):
15
17
  class ConfigValidator:
16
18
 
17
19
  def __init__(
18
- self, validations: Callable[[dict], None] = None, io: ClickIOProvider = ClickIOProvider()
20
+ self,
21
+ validations: Callable[[dict], None] = None,
22
+ io: ClickIOProvider = ClickIOProvider(),
23
+ session: boto3.Session = None,
19
24
  ):
20
25
  self.validations = validations or [
21
26
  self.validate_supported_redis_versions,
@@ -23,16 +28,19 @@ class ConfigValidator:
23
28
  self.validate_environment_pipelines,
24
29
  self.validate_environment_pipelines_triggers,
25
30
  self.validate_database_copy_section,
26
- self.validate_database_migration_input_sources,
31
+ self.validate_s3_data_migration_config,
32
+ self.validate_cache_invalidation_config,
33
+ self.validate_config_for_managed_upgrades,
27
34
  ]
28
35
  self.io = io
36
+ self.session = session
29
37
 
30
38
  def run_validations(self, config: dict):
31
39
  for validation in self.validations:
32
40
  validation(config)
33
41
 
34
42
  def _validate_extension_supported_versions(
35
- self, config, extension_type, version_key, get_supported_versions
43
+ self, config, aws_provider, extension_type, version_key
36
44
  ):
37
45
  extensions = config.get("extensions", {})
38
46
  if not extensions:
@@ -44,7 +52,10 @@ class ConfigValidator:
44
52
  if extension.get("type") == extension_type
45
53
  ]
46
54
 
47
- supported_extension_versions = get_supported_versions()
55
+ # In this format so it can be monkey patched initially via mock_get_data fixture
56
+ cache_provider = Cache()
57
+ get_data_strategy = GetAWSVersionStrategy(aws_provider)
58
+ supported_extension_versions = cache_provider.get_data(get_data_strategy)
48
59
  extensions_with_invalid_version = []
49
60
 
50
61
  for extension in extensions_for_type:
@@ -71,24 +82,25 @@ class ConfigValidator:
71
82
  f"{extension_type} version for environment {version_failure['environment']} is not in the list of supported {extension_type} versions: {supported_extension_versions}. Provided Version: {version_failure['version']}",
72
83
  )
73
84
 
85
+ def _get_client(self, service_name: str):
86
+ if self.session:
87
+ return self.session.client(service_name)
88
+ return boto3.client(service_name)
89
+
74
90
  def validate_supported_redis_versions(self, config):
75
91
  return self._validate_extension_supported_versions(
76
92
  config=config,
77
- extension_type="redis",
78
- version_key="engine",
79
- get_supported_versions=RedisProvider(
80
- boto3.client("elasticache")
81
- ).get_supported_redis_versions,
93
+ aws_provider=Redis(self._get_client("elasticache")),
94
+ extension_type="redis", # TODO: DBTP-1888: this is information which can live in the RedisProvider
95
+ version_key="engine", # TODO: DBTP-1888: this is information which can live in the RedisProvider
82
96
  )
83
97
 
84
98
  def validate_supported_opensearch_versions(self, config):
85
99
  return self._validate_extension_supported_versions(
86
100
  config=config,
87
- extension_type="opensearch",
88
- version_key="engine",
89
- get_supported_versions=OpensearchProvider(
90
- boto3.client("opensearch")
91
- ).get_supported_opensearch_versions,
101
+ aws_provider=Opensearch(self._get_client("opensearch")),
102
+ extension_type="opensearch", # TODO: DBTP-1888: this is information which can live in the OpensearchProvider
103
+ version_key="engine", # TODO: DBTP-1888: this is information which can live in the OpensearchProvider
92
104
  )
93
105
 
94
106
  def validate_environment_pipelines(self, config):
@@ -170,21 +182,6 @@ class ConfigValidator:
170
182
  from_env = section["from"]
171
183
  to_env = section["to"]
172
184
 
173
- from_account = (
174
- config.get("environments", {})
175
- .get(from_env, {})
176
- .get("accounts", {})
177
- .get("deploy", {})
178
- .get("id")
179
- )
180
- to_account = (
181
- config.get("environments", {})
182
- .get(to_env, {})
183
- .get("accounts", {})
184
- .get("deploy", {})
185
- .get("id")
186
- )
187
-
188
185
  if from_env == to_env:
189
186
  errors.append(
190
187
  f"database_copy 'to' and 'from' cannot be the same environment in extension '{extension_name}'."
@@ -205,29 +202,10 @@ class ConfigValidator:
205
202
  f"database_copy 'to' parameter must be a valid environment ({all_envs_string}) but was '{to_env}' in extension '{extension_name}'."
206
203
  )
207
204
 
208
- if from_account != to_account:
209
- if "from_account" not in section:
210
- errors.append(
211
- f"Environments '{from_env}' and '{to_env}' are in different AWS accounts. The 'from_account' parameter must be present."
212
- )
213
- elif section["from_account"] != from_account:
214
- errors.append(
215
- f"Incorrect value for 'from_account' for environment '{from_env}'"
216
- )
217
-
218
- if "to_account" not in section:
219
- errors.append(
220
- f"Environments '{from_env}' and '{to_env}' are in different AWS accounts. The 'to_account' parameter must be present."
221
- )
222
- elif section["to_account"] != to_account:
223
- errors.append(
224
- f"Incorrect value for 'to_account' for environment '{to_env}'"
225
- )
226
-
227
205
  if errors:
228
206
  raise ConfigValidatorError("\n".join(errors))
229
207
 
230
- def validate_database_migration_input_sources(self, config: dict):
208
+ def validate_s3_data_migration_config(self, config: dict):
231
209
  extensions = config.get("extensions", {})
232
210
  if not extensions:
233
211
  return
@@ -246,6 +224,10 @@ class ConfigValidator:
246
224
  if "data_migration" not in env_config:
247
225
  continue
248
226
  data_migration = env_config.get("data_migration", {})
227
+ if extension.get("serve_static_content", {}):
228
+ errors.append(
229
+ "Data migration is not supported for static S3 buckets to avoid the risk of unintentionally exposing private data. However, you can copy data on an ad hoc basis using AWS CLI commands such as 'aws s3 sync' or 'aws s3 cp'."
230
+ )
249
231
  if "import" in data_migration and "import_sources" in data_migration:
250
232
  errors.append(
251
233
  f"Error in '{extension_name}.environments.{env}.data_migration': only the 'import_sources' property is required - 'import' is deprecated."
@@ -256,3 +238,82 @@ class ConfigValidator:
256
238
  )
257
239
  if errors:
258
240
  raise ConfigValidatorError("\n".join(errors))
241
+
242
+ def validate_cache_invalidation_config(self, config: dict):
243
+ codebase_pipelines = config.get("codebase_pipelines")
244
+ if not codebase_pipelines:
245
+ return
246
+
247
+ errors = []
248
+
249
+ all_environments = [env for env in config.get("environments", {}).keys() if not env == "*"]
250
+
251
+ for codebase in codebase_pipelines.values():
252
+ cache_invalidation_config = codebase.get("cache_invalidation")
253
+ if cache_invalidation_config:
254
+ for domain, config in cache_invalidation_config.get("domains").items():
255
+ environment = config.get("environment")
256
+ if environment not in all_environments:
257
+ errors.append(
258
+ f"Error in cache invalidation configuration for the domain '{domain}'. Environment '{environment}' is not defined for this application"
259
+ )
260
+
261
+ if errors:
262
+ raise ConfigValidatorError("\n".join(errors))
263
+
264
+ def validate_config_for_managed_upgrades(self, config: dict):
265
+ """
266
+ Validates that pipelines do not contain manual approvals when managed
267
+ upgrades are enabled.
268
+
269
+ Args:
270
+ config (dict): The platform configuration dictionary.
271
+
272
+ Raises:
273
+ ConfigValidatorError:
274
+ - If any pipeline contains manual approvals when platform-helper is "auto".
275
+ - If platform-config.yml is missing environment_pipelines or codebase_pipelines configuration.
276
+ """
277
+ errors = []
278
+
279
+ def find_pipeline_for_env(env_pipelines, env: str):
280
+ for name, config in env_pipelines.items():
281
+ if not isinstance(config, dict):
282
+ continue
283
+ envs = config.get("environments", {})
284
+ if isinstance(envs, dict) and env in envs:
285
+ return name
286
+
287
+ if config.get("default_versions", {}).get("platform-helper") == "auto":
288
+
289
+ pipelines = {}
290
+ environments = [env for env in config.get("environments").keys() if env != "*"]
291
+ environment_pipelines = config.get("environment_pipelines", {})
292
+ for env in environments:
293
+ pipeline = find_pipeline_for_env(environment_pipelines, env)
294
+ if not pipeline:
295
+ errors.append(
296
+ f"For auto default platform-helper version, all environments {environments} must be deployed in an environment pipeline. Missing: {env}"
297
+ )
298
+
299
+ for pipeline_section in ["environment_pipelines", "codebase_pipelines"]:
300
+ pipelines = config.get(pipeline_section, {})
301
+
302
+ if not pipelines:
303
+ errors.append(
304
+ f"For auto default platform-helper version, environment and codebase pipelines must be configured in platform-config.yml. {pipeline_section} is not configured."
305
+ )
306
+ continue
307
+
308
+ for pipeline_name, pipeline in pipelines.items():
309
+ if pipeline_section == "environment_pipelines":
310
+ pipeline_deploy_to_environments = pipeline.get("environments", {})
311
+ for env_name, env_config in pipeline_deploy_to_environments.items():
312
+ if isinstance(env_config, dict) and env_config.get("requires_approval"):
313
+ errors.append(
314
+ f"Managed upgrades enabled: (environment_pipelines) Pipeline '{pipeline_name}' environment '{env_name}' "
315
+ "cannot have manual approval when platform-helper is 'auto'."
316
+ )
317
+
318
+ if errors:
319
+ raise ConfigValidatorError("\n".join(errors))
@@ -1,10 +1,11 @@
1
1
  import json
2
+ import subprocess
2
3
  import time
3
4
 
4
5
  from botocore.exceptions import ClientError
5
6
 
6
7
  from dbt_platform_helper.constants import CONDUIT_DOCKER_IMAGE_LOCATION
7
- from dbt_platform_helper.providers.aws import CreateTaskTimeoutException
8
+ from dbt_platform_helper.providers.aws.exceptions import CreateTaskTimeoutException
8
9
  from dbt_platform_helper.providers.secrets import Secrets
9
10
  from dbt_platform_helper.utils.application import Application
10
11
  from dbt_platform_helper.utils.messages import abort_with_error
@@ -13,7 +14,6 @@ from dbt_platform_helper.utils.messages import abort_with_error
13
14
  def create_addon_client_task(
14
15
  iam_client,
15
16
  ssm_client,
16
- subprocess,
17
17
  application: Application,
18
18
  env: str,
19
19
  addon_type: str,
@@ -31,7 +31,6 @@ def create_addon_client_task(
31
31
  elif access == "admin":
32
32
  create_postgres_admin_task(
33
33
  ssm_client,
34
- subprocess,
35
34
  application,
36
35
  addon_name,
37
36
  addon_type,
@@ -53,7 +52,7 @@ def create_addon_client_task(
53
52
  # We cannot check for botocore.errorfactory.NoSuchEntityException as botocore generates that class on the fly as part of errorfactory.
54
53
  # factory. Checking the error code is the recommended way of handling these exceptions.
55
54
  if ex.response.get("Error", {}).get("Code", None) != "NoSuchEntity":
56
- # TODO When we are refactoring this, raise an exception to be caught at the command layer
55
+ # TODO: DBTP-1946: When we are refactoring this, raise an exception to be caught at the command layer
57
56
  abort_with_error(
58
57
  f"cannot obtain Role {role_name}: {ex.response.get('Error', {}).get('Message', '')}"
59
58
  )
@@ -71,15 +70,8 @@ def create_addon_client_task(
71
70
  )
72
71
 
73
72
 
74
- def create_postgres_admin_task(
75
- ssm_client,
76
- subprocess,
77
- app: Application,
78
- addon_name: str,
79
- addon_type: str,
80
- env: str,
81
- secret_name: str,
82
- task_name: str,
73
+ def get_postgres_admin_connection_string(
74
+ ssm_client, secret_name: str, app: Application, env: str, addon_name: str
83
75
  ):
84
76
  read_only_secret_name = secret_name + "_READ_ONLY_USER"
85
77
  master_secret_name = (
@@ -94,6 +86,23 @@ def create_postgres_admin_task(
94
86
  )
95
87
  )
96
88
 
89
+ return connection_string
90
+
91
+
92
+ def create_postgres_admin_task(
93
+ ssm_client,
94
+ app: Application,
95
+ addon_name: str,
96
+ addon_type: str,
97
+ env: str,
98
+ secret_name: str,
99
+ task_name: str,
100
+ ):
101
+
102
+ connection_string = get_postgres_admin_connection_string(
103
+ ssm_client, secret_name, app, env, addon_name
104
+ )
105
+
97
106
  subprocess.call(
98
107
  f"copilot task run --app {app.name} --env {env} "
99
108
  f"--task-group-name {task_name} "
@@ -121,7 +130,6 @@ def _temp_until_refactor_get_ecs_task_arns(ecs_client, cluster_arn: str, task_na
121
130
 
122
131
  def connect_to_addon_client_task(
123
132
  ecs_client,
124
- subprocess,
125
133
  application_name,
126
134
  env,
127
135
  cluster_arn,
@@ -132,7 +140,7 @@ def connect_to_addon_client_task(
132
140
  tries = 0
133
141
  while tries < 15 and not running:
134
142
  tries += 1
135
- # Todo: Use from ECS provider when we refactor this
143
+ # TODO: DBTP-1946: Use from ECS provider when we refactor this
136
144
  if get_ecs_task_arns(ecs_client, cluster_arn, task_name):
137
145
  subprocess.call(
138
146
  "copilot task exec "
@@ -154,7 +162,7 @@ def _normalise_secret_name(addon_name: str) -> str:
154
162
 
155
163
 
156
164
  def _get_secrets_provider(application: Application, env: str) -> Secrets:
157
- # Todo: We instantiate the secrets provider here to avoid rabbit holing, but something better probably possible when we are refactoring this area
165
+ # TODO: DBTP-1946: We instantiate the secrets provider here to avoid rabbit holing, but something better probably possible when we are refactoring this area
158
166
  return Secrets(
159
167
  application.environments[env].session.client("ssm"),
160
168
  application.environments[env].session.client("secretsmanager"),
@@ -1,20 +1,102 @@
1
+ from collections import defaultdict
2
+
3
+ import botocore
1
4
  from boto3 import Session
2
5
 
6
+ from dbt_platform_helper.providers.aws.exceptions import AWSException
7
+ from dbt_platform_helper.providers.aws.exceptions import ImageNotFoundException
8
+ from dbt_platform_helper.providers.aws.exceptions import MultipleImagesFoundException
9
+ from dbt_platform_helper.providers.aws.exceptions import RepositoryNotFoundException
10
+ from dbt_platform_helper.providers.io import ClickIOProvider
3
11
  from dbt_platform_helper.utils.aws import get_aws_session_or_abort
4
12
 
13
+ NOT_A_UNIQUE_TAG_INFO = 'INFO: The tag "{image_ref}" is not a unique, commit-specific tag. Deploying the corresponding commit tag "{commit_tag}" instead.'
14
+ NO_ASSOCIATED_COMMIT_TAG_WARNING = 'WARNING: The AWS ECR image "{image_ref}" has no associated commit tag so deploying "{image_ref}". Note this could result in images with unintended or incompatible changes being deployed in new ECS Tasks for your service.'
15
+
5
16
 
6
17
  class ECRProvider:
7
- def __init__(self, session: Session = None):
18
+ def __init__(self, session: Session = None, click_io: ClickIOProvider = ClickIOProvider()):
8
19
  self.session = session
9
- self.client = None
10
-
11
- def _get_client(self):
12
- if not self.session:
13
- self.session = get_aws_session_or_abort()
14
- return self.session.client("ecr")
20
+ self.click_io = click_io
15
21
 
16
22
  def get_ecr_repo_names(self) -> list[str]:
17
23
  out = []
18
24
  for page in self._get_client().get_paginator("describe_repositories").paginate():
19
25
  out.extend([repo["repositoryName"] for repo in page.get("repositories", {})])
20
26
  return out
27
+
28
+ def get_commit_tag_for_reference(self, application_name: str, codebase: str, image_ref: str):
29
+ repository = f"{application_name}/{codebase}"
30
+ next_page_token = None
31
+ tag_map = {}
32
+ digest_map = defaultdict(dict)
33
+
34
+ while True:
35
+ image_list = self._get_ecr_images(repository, image_ref, next_page_token)
36
+ next_page_token = image_list.get("nextToken")
37
+
38
+ for image in image_list["imageIds"]:
39
+ digest, tag = image["imageDigest"], image["imageTag"]
40
+ digest_map[digest][tag.split("-")[0]] = tag
41
+ tag_map[tag] = digest
42
+
43
+ if not next_page_token:
44
+ break
45
+
46
+ if image_ref.startswith("commit-"):
47
+ if image_ref in tag_map:
48
+ return image_ref
49
+ else:
50
+ candidates = [
51
+ tag
52
+ for tag in tag_map.keys()
53
+ if image_ref.startswith(tag) or tag.startswith(image_ref)
54
+ ]
55
+ if not candidates:
56
+ raise ImageNotFoundException(image_ref)
57
+ if len(candidates) > 1:
58
+ raise MultipleImagesFoundException(image_ref, candidates)
59
+ return candidates[0]
60
+ else:
61
+ digest = tag_map.get(image_ref)
62
+ if not digest:
63
+ raise ImageNotFoundException(image_ref)
64
+
65
+ commit_tag = digest_map.get(digest, dict()).get("commit")
66
+
67
+ if commit_tag:
68
+ self.click_io.info(
69
+ NOT_A_UNIQUE_TAG_INFO.format(image_ref=image_ref, commit_tag=commit_tag)
70
+ )
71
+ return commit_tag
72
+ else:
73
+ self.click_io.warn(NO_ASSOCIATED_COMMIT_TAG_WARNING.format(image_ref=image_ref))
74
+ return image_ref
75
+
76
+ def _get_ecr_images(self, repository, image_ref, next_page_token):
77
+ params = {"repositoryName": repository, "filter": {"tagStatus": "TAGGED"}}
78
+ if next_page_token:
79
+ params["nextToken"] = next_page_token
80
+ try:
81
+ image_list = self._get_client().list_images(**params)
82
+ return image_list
83
+ except botocore.exceptions.ClientError as e:
84
+ if e.response["Error"]["Code"] == "RepositoryNotFoundException":
85
+ raise RepositoryNotFoundException(repository)
86
+ else:
87
+ raise AWSException(
88
+ f"Unexpected error for repo '{repository}' and image reference '{image_ref}': {e}"
89
+ )
90
+
91
+ @staticmethod
92
+ def _check_image_details_exists(image_info: dict, image_ref: str):
93
+ """Error handling for any unexpected scenario where AWS ECR returns a
94
+ malformed response."""
95
+
96
+ if "imageDetails" not in image_info:
97
+ raise ImageNotFoundException(image_ref)
98
+
99
+ def _get_client(self):
100
+ if not self.session:
101
+ self.session = get_aws_session_or_abort()
102
+ return self.session.client("ecr")