dbt-platform-helper 12.3.0__py3-none-any.whl → 12.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbt-platform-helper might be problematic. Click here for more details.
- dbt_platform_helper/COMMANDS.md +6 -1
- dbt_platform_helper/commands/codebase.py +1 -1
- dbt_platform_helper/commands/conduit.py +2 -2
- dbt_platform_helper/commands/config.py +4 -4
- dbt_platform_helper/commands/copilot.py +13 -15
- dbt_platform_helper/commands/database.py +17 -4
- dbt_platform_helper/commands/environment.py +3 -2
- dbt_platform_helper/commands/pipeline.py +1 -29
- dbt_platform_helper/constants.py +3 -1
- dbt_platform_helper/domain/codebase.py +23 -5
- dbt_platform_helper/domain/conduit.py +0 -6
- dbt_platform_helper/domain/database_copy.py +14 -13
- dbt_platform_helper/domain/maintenance_page.py +9 -9
- dbt_platform_helper/platform_exception.py +5 -0
- dbt_platform_helper/providers/aws.py +32 -0
- dbt_platform_helper/providers/cache.py +83 -0
- dbt_platform_helper/providers/cloudformation.py +8 -1
- dbt_platform_helper/providers/copilot.py +2 -5
- dbt_platform_helper/providers/ecs.py +19 -4
- dbt_platform_helper/providers/load_balancers.py +11 -5
- dbt_platform_helper/providers/platform_config_schema.py +605 -0
- dbt_platform_helper/providers/secrets.py +51 -10
- dbt_platform_helper/providers/validation.py +19 -0
- dbt_platform_helper/utils/application.py +14 -2
- dbt_platform_helper/utils/arn_parser.py +1 -1
- dbt_platform_helper/utils/aws.py +22 -21
- dbt_platform_helper/utils/files.py +0 -70
- dbt_platform_helper/utils/git.py +2 -2
- dbt_platform_helper/utils/validation.py +3 -551
- dbt_platform_helper/utils/versioning.py +8 -8
- {dbt_platform_helper-12.3.0.dist-info → dbt_platform_helper-12.4.1.dist-info}/METADATA +1 -1
- {dbt_platform_helper-12.3.0.dist-info → dbt_platform_helper-12.4.1.dist-info}/RECORD +35 -35
- dbt_platform_helper/addons-template-map.yml +0 -29
- dbt_platform_helper/exceptions.py +0 -147
- dbt_platform_helper/templates/pipelines/environments/buildspec.yml +0 -80
- dbt_platform_helper/templates/pipelines/environments/manifest.yml +0 -48
- dbt_platform_helper/templates/pipelines/environments/overrides/cfn.patches.yml +0 -21
- {dbt_platform_helper-12.3.0.dist-info → dbt_platform_helper-12.4.1.dist-info}/LICENSE +0 -0
- {dbt_platform_helper-12.3.0.dist-info → dbt_platform_helper-12.4.1.dist-info}/WHEEL +0 -0
- {dbt_platform_helper-12.3.0.dist-info → dbt_platform_helper-12.4.1.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,605 @@
|
|
|
1
|
+
import ipaddress
|
|
2
|
+
import re
|
|
3
|
+
|
|
4
|
+
from schema import Optional
|
|
5
|
+
from schema import Or
|
|
6
|
+
from schema import Regex
|
|
7
|
+
from schema import Schema
|
|
8
|
+
from schema import SchemaError
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def _string_matching_regex(regex_pattern: str):
|
|
12
|
+
def validate(string):
|
|
13
|
+
if not re.match(regex_pattern, string):
|
|
14
|
+
# Todo: Raise suitable PlatformException?
|
|
15
|
+
raise SchemaError(
|
|
16
|
+
f"String '{string}' does not match the required pattern '{regex_pattern}'."
|
|
17
|
+
)
|
|
18
|
+
return string
|
|
19
|
+
|
|
20
|
+
return validate
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def _is_integer_between(lower_limit, upper_limit):
|
|
24
|
+
def validate(value):
|
|
25
|
+
if isinstance(value, int) and lower_limit <= value <= upper_limit:
|
|
26
|
+
return True
|
|
27
|
+
# Todo: Raise suitable PlatformException?
|
|
28
|
+
raise SchemaError(f"should be an integer between {lower_limit} and {upper_limit}")
|
|
29
|
+
|
|
30
|
+
return validate
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
_valid_schema_key = Regex(
|
|
34
|
+
r"^([a-z][a-zA-Z0-9_-]*|\*)$",
|
|
35
|
+
error="{} is invalid: must only contain lowercase alphanumeric characters separated by hyphen or underscore",
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
# Todo: Make this actually validate a git branch name properly; https://git-scm.com/docs/git-check-ref-format
|
|
39
|
+
_valid_branch_name = _string_matching_regex(r"^((?!\*).)*(\*)?$")
|
|
40
|
+
|
|
41
|
+
_valid_deletion_policy = Or("Delete", "Retain")
|
|
42
|
+
|
|
43
|
+
_valid_postgres_deletion_policy = Or("Delete", "Retain", "Snapshot")
|
|
44
|
+
|
|
45
|
+
_valid_environment_name = Regex(
|
|
46
|
+
r"^([a-z][a-zA-Z0-9]*|\*)$",
|
|
47
|
+
error="Environment name {} is invalid: names must only contain lowercase alphanumeric characters, or be the '*' default environment",
|
|
48
|
+
# For values the "error" parameter works and outputs the custom text. For keys the custom text doesn't get reported in the exception for some reason.
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def _valid_kms_key_arn(key):
|
|
53
|
+
return Regex(
|
|
54
|
+
r"^arn:aws:kms:.*:\d{12}:(key|alias).*",
|
|
55
|
+
error=f"{key} must contain a valid ARN for a KMS key",
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def _valid_iam_role_arn(key):
|
|
60
|
+
return Regex(
|
|
61
|
+
r"^arn:aws:iam::\d{12}:role/.*",
|
|
62
|
+
error=f"{key} must contain a valid ARN for an IAM role",
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _valid_dbt_email_address(key):
|
|
67
|
+
return Regex(
|
|
68
|
+
r"^[\w.-]+@(businessandtrade.gov.uk|digital.trade.gov.uk)$",
|
|
69
|
+
error=f"{key} must contain a valid DBT email address",
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
_cross_environment_service_access_schema = {
|
|
74
|
+
"application": str,
|
|
75
|
+
"environment": _valid_environment_name,
|
|
76
|
+
"account": str,
|
|
77
|
+
"service": str,
|
|
78
|
+
"read": bool,
|
|
79
|
+
"write": bool,
|
|
80
|
+
"cyber_sign_off_by": _valid_dbt_email_address("cyber_sign_off_by"),
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _no_configuration_required_schema(schema_type):
|
|
85
|
+
return Schema({"type": schema_type, Optional("services"): Or("__all__", [str])})
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
# Application load balancer....
|
|
89
|
+
_valid_alb_cache_policy = {
|
|
90
|
+
"min_ttl": int,
|
|
91
|
+
"max_ttl": int,
|
|
92
|
+
"default_ttl": int,
|
|
93
|
+
"cookies_config": Or("none", "whitelist", "allExcept", "all"),
|
|
94
|
+
"header": Or("none", "whitelist"),
|
|
95
|
+
"query_string_behavior": Or("none", "whitelist", "allExcept", "all"),
|
|
96
|
+
Optional("cookie_list"): list,
|
|
97
|
+
Optional("headers_list"): list,
|
|
98
|
+
Optional("cache_policy_query_strings"): list,
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
_valid_alb_paths_definition = {
|
|
102
|
+
Optional("default"): {
|
|
103
|
+
"cache": str,
|
|
104
|
+
"request": str,
|
|
105
|
+
},
|
|
106
|
+
Optional("additional"): list[
|
|
107
|
+
{
|
|
108
|
+
"path": str,
|
|
109
|
+
"cache": str,
|
|
110
|
+
"request": str,
|
|
111
|
+
}
|
|
112
|
+
],
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
_alb_schema = {
|
|
116
|
+
"type": "alb",
|
|
117
|
+
Optional("environments"): {
|
|
118
|
+
_valid_environment_name: Or(
|
|
119
|
+
{
|
|
120
|
+
Optional("additional_address_list"): list,
|
|
121
|
+
Optional("allowed_methods"): list,
|
|
122
|
+
Optional("cached_methods"): list,
|
|
123
|
+
Optional("cdn_compress"): bool,
|
|
124
|
+
Optional("cdn_domains_list"): dict,
|
|
125
|
+
Optional("cdn_geo_locations"): list,
|
|
126
|
+
Optional("cdn_geo_restriction_type"): str,
|
|
127
|
+
Optional("cdn_logging_bucket"): str,
|
|
128
|
+
Optional("cdn_logging_bucket_prefix"): str,
|
|
129
|
+
Optional("cdn_timeout_seconds"): int,
|
|
130
|
+
Optional("default_waf"): str,
|
|
131
|
+
Optional("domain_prefix"): str,
|
|
132
|
+
Optional("enable_logging"): bool,
|
|
133
|
+
Optional("env_root"): str,
|
|
134
|
+
Optional("forwarded_values_forward"): str,
|
|
135
|
+
Optional("forwarded_values_headers"): list,
|
|
136
|
+
Optional("forwarded_values_query_string"): bool,
|
|
137
|
+
Optional("origin_protocol_policy"): str,
|
|
138
|
+
Optional("origin_ssl_protocols"): list,
|
|
139
|
+
Optional("slack_alert_channel_alb_secret_rotation"): str,
|
|
140
|
+
Optional("viewer_certificate_minimum_protocol_version"): str,
|
|
141
|
+
Optional("viewer_certificate_ssl_support_method"): str,
|
|
142
|
+
Optional("viewer_protocol_policy"): str,
|
|
143
|
+
Optional("cache_policy"): dict({str: _valid_alb_cache_policy}),
|
|
144
|
+
Optional("origin_request_policy"): dict({str: {}}),
|
|
145
|
+
Optional("paths"): dict({str: _valid_alb_paths_definition}),
|
|
146
|
+
},
|
|
147
|
+
None,
|
|
148
|
+
)
|
|
149
|
+
},
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
# Monitoring...
|
|
153
|
+
_monitoring_schema = {
|
|
154
|
+
"type": "monitoring",
|
|
155
|
+
Optional("environments"): {
|
|
156
|
+
_valid_environment_name: {
|
|
157
|
+
Optional("enable_ops_center"): bool,
|
|
158
|
+
}
|
|
159
|
+
},
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
# Opensearch...
|
|
164
|
+
class ConditionalOpensSearchSchema(Schema):
|
|
165
|
+
def validate(self, data, _is_conditional_schema=True):
|
|
166
|
+
data = super(ConditionalOpensSearchSchema, self).validate(
|
|
167
|
+
data, _is_conditional_schema=False
|
|
168
|
+
)
|
|
169
|
+
if _is_conditional_schema:
|
|
170
|
+
default_plan = None
|
|
171
|
+
default_volume_size = None
|
|
172
|
+
|
|
173
|
+
default_environment_config = data["environments"].get(
|
|
174
|
+
"*", data["environments"].get("default", None)
|
|
175
|
+
)
|
|
176
|
+
if default_environment_config:
|
|
177
|
+
default_plan = default_environment_config.get("plan", None)
|
|
178
|
+
default_volume_size = default_environment_config.get("volume_size", None)
|
|
179
|
+
|
|
180
|
+
for env in data["environments"]:
|
|
181
|
+
volume_size = data["environments"][env].get("volume_size", default_volume_size)
|
|
182
|
+
plan = data["environments"][env].get("plan", default_plan)
|
|
183
|
+
|
|
184
|
+
if volume_size:
|
|
185
|
+
if not plan:
|
|
186
|
+
# Todo: Raise suitable PlatformException?
|
|
187
|
+
raise SchemaError(f"Missing key: 'plan'")
|
|
188
|
+
|
|
189
|
+
if volume_size < _valid_opensearch_min_volume_size:
|
|
190
|
+
# Todo: Raise suitable PlatformException?
|
|
191
|
+
raise SchemaError(
|
|
192
|
+
f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer greater than {_valid_opensearch_min_volume_size}"
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
for key in _valid_opensearch_max_volume_size:
|
|
196
|
+
if (
|
|
197
|
+
plan == key
|
|
198
|
+
and not volume_size <= _valid_opensearch_max_volume_size[key]
|
|
199
|
+
):
|
|
200
|
+
# Todo: Raise suitable PlatformException?
|
|
201
|
+
raise SchemaError(
|
|
202
|
+
f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer between {_valid_opensearch_min_volume_size} and {_valid_opensearch_max_volume_size[key]} for plan {plan}"
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
return data
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
# Todo: Move to OpenSearch provider?
|
|
209
|
+
_valid_opensearch_plans = Or(
|
|
210
|
+
"tiny", "small", "small-ha", "medium", "medium-ha", "large", "large-ha", "x-large", "x-large-ha"
|
|
211
|
+
)
|
|
212
|
+
# Todo: Move to OpenSearch provider?
|
|
213
|
+
_valid_opensearch_min_volume_size = 10
|
|
214
|
+
# Todo: Move to OpenSearch provider?
|
|
215
|
+
_valid_opensearch_max_volume_size = {
|
|
216
|
+
"tiny": 100,
|
|
217
|
+
"small": 200,
|
|
218
|
+
"small-ha": 200,
|
|
219
|
+
"medium": 512,
|
|
220
|
+
"medium-ha": 512,
|
|
221
|
+
"large": 1000,
|
|
222
|
+
"large-ha": 1000,
|
|
223
|
+
"x-large": 1500,
|
|
224
|
+
"x-large-ha": 1500,
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
_opensearch_schema = {
|
|
228
|
+
"type": "opensearch",
|
|
229
|
+
Optional("environments"): {
|
|
230
|
+
_valid_environment_name: {
|
|
231
|
+
Optional("engine"): str,
|
|
232
|
+
Optional("deletion_policy"): _valid_deletion_policy,
|
|
233
|
+
Optional("plan"): _valid_opensearch_plans,
|
|
234
|
+
Optional("volume_size"): int,
|
|
235
|
+
Optional("ebs_throughput"): int,
|
|
236
|
+
Optional("ebs_volume_type"): str,
|
|
237
|
+
Optional("instance"): str,
|
|
238
|
+
Optional("instances"): int,
|
|
239
|
+
Optional("master"): bool,
|
|
240
|
+
Optional("es_app_log_retention_in_days"): int,
|
|
241
|
+
Optional("index_slow_log_retention_in_days"): int,
|
|
242
|
+
Optional("audit_log_retention_in_days"): int,
|
|
243
|
+
Optional("search_slow_log_retention_in_days"): int,
|
|
244
|
+
Optional("password_special_characters"): str,
|
|
245
|
+
Optional("urlencode_password"): bool,
|
|
246
|
+
}
|
|
247
|
+
},
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
# Prometheus...
|
|
251
|
+
_prometheus_policy_schema = {
|
|
252
|
+
"type": "prometheus-policy",
|
|
253
|
+
Optional("services"): Or("__all__", [str]),
|
|
254
|
+
Optional("environments"): {
|
|
255
|
+
_valid_environment_name: {
|
|
256
|
+
"role_arn": str,
|
|
257
|
+
}
|
|
258
|
+
},
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
# Postgres...
|
|
262
|
+
# Todo: Move to Postgres provider?
|
|
263
|
+
_valid_postgres_plans = Or(
|
|
264
|
+
"tiny",
|
|
265
|
+
"small",
|
|
266
|
+
"small-ha",
|
|
267
|
+
"small-high-io",
|
|
268
|
+
"medium",
|
|
269
|
+
"medium-ha",
|
|
270
|
+
"medium-high-io",
|
|
271
|
+
"large",
|
|
272
|
+
"large-ha",
|
|
273
|
+
"large-high-io",
|
|
274
|
+
"x-large",
|
|
275
|
+
"x-large-ha",
|
|
276
|
+
"x-large-high-io",
|
|
277
|
+
)
|
|
278
|
+
|
|
279
|
+
# Todo: Move to Postgres provider?
|
|
280
|
+
_valid_postgres_storage_types = Or("gp2", "gp3", "io1", "io2")
|
|
281
|
+
|
|
282
|
+
_valid_postgres_database_copy = {
|
|
283
|
+
"from": _valid_environment_name,
|
|
284
|
+
"to": _valid_environment_name,
|
|
285
|
+
Optional("from_account"): str,
|
|
286
|
+
Optional("to_account"): str,
|
|
287
|
+
Optional("pipeline"): {Optional("schedule"): str},
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
_postgres_schema = {
|
|
291
|
+
"type": "postgres",
|
|
292
|
+
"version": (Or(int, float)),
|
|
293
|
+
Optional("deletion_policy"): _valid_postgres_deletion_policy,
|
|
294
|
+
Optional("environments"): {
|
|
295
|
+
_valid_environment_name: {
|
|
296
|
+
Optional("plan"): _valid_postgres_plans,
|
|
297
|
+
Optional("volume_size"): _is_integer_between(20, 10000),
|
|
298
|
+
Optional("iops"): _is_integer_between(1000, 9950),
|
|
299
|
+
Optional("snapshot_id"): str,
|
|
300
|
+
Optional("deletion_policy"): _valid_postgres_deletion_policy,
|
|
301
|
+
Optional("deletion_protection"): bool,
|
|
302
|
+
Optional("multi_az"): bool,
|
|
303
|
+
Optional("storage_type"): _valid_postgres_storage_types,
|
|
304
|
+
Optional("backup_retention_days"): _is_integer_between(1, 35),
|
|
305
|
+
}
|
|
306
|
+
},
|
|
307
|
+
Optional("database_copy"): [_valid_postgres_database_copy],
|
|
308
|
+
Optional("objects"): [
|
|
309
|
+
{
|
|
310
|
+
"key": str,
|
|
311
|
+
Optional("body"): str,
|
|
312
|
+
}
|
|
313
|
+
],
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
# Redis...
|
|
317
|
+
# Todo move to Redis provider?
|
|
318
|
+
_valid_redis_plans = Or(
|
|
319
|
+
"micro",
|
|
320
|
+
"micro-ha",
|
|
321
|
+
"tiny",
|
|
322
|
+
"tiny-ha",
|
|
323
|
+
"small",
|
|
324
|
+
"small-ha",
|
|
325
|
+
"medium",
|
|
326
|
+
"medium-ha",
|
|
327
|
+
"large",
|
|
328
|
+
"large-ha",
|
|
329
|
+
"x-large",
|
|
330
|
+
"x-large-ha",
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
_redis_schema = {
|
|
334
|
+
"type": "redis",
|
|
335
|
+
Optional("environments"): {
|
|
336
|
+
_valid_environment_name: {
|
|
337
|
+
Optional("plan"): _valid_redis_plans,
|
|
338
|
+
Optional("engine"): str,
|
|
339
|
+
Optional("replicas"): _is_integer_between(0, 5),
|
|
340
|
+
Optional("deletion_policy"): _valid_deletion_policy,
|
|
341
|
+
Optional("apply_immediately"): bool,
|
|
342
|
+
Optional("automatic_failover_enabled"): bool,
|
|
343
|
+
Optional("instance"): str,
|
|
344
|
+
Optional("multi_az_enabled"): bool,
|
|
345
|
+
}
|
|
346
|
+
},
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
|
|
350
|
+
# S3 Bucket...
|
|
351
|
+
def _valid_s3_bucket_name(name: str):
|
|
352
|
+
errors = []
|
|
353
|
+
if not (2 < len(name) < 64):
|
|
354
|
+
errors.append("Length must be between 3 and 63 characters inclusive.")
|
|
355
|
+
|
|
356
|
+
if not re.match(r"^[a-z0-9].*[a-z0-9]$", name):
|
|
357
|
+
errors.append("Names must start and end with 0-9 or a-z.")
|
|
358
|
+
|
|
359
|
+
if not re.match(r"^[a-z0-9.-]*$", name):
|
|
360
|
+
errors.append("Names can only contain the characters 0-9, a-z, '.' and '-'.")
|
|
361
|
+
|
|
362
|
+
if ".." in name:
|
|
363
|
+
errors.append("Names cannot contain two adjacent periods.")
|
|
364
|
+
|
|
365
|
+
try:
|
|
366
|
+
ipaddress.ip_address(name)
|
|
367
|
+
errors.append("Names cannot be IP addresses.")
|
|
368
|
+
except ValueError:
|
|
369
|
+
pass
|
|
370
|
+
|
|
371
|
+
for prefix in ("xn--", "sthree-"):
|
|
372
|
+
if name.startswith(prefix):
|
|
373
|
+
errors.append(f"Names cannot be prefixed '{prefix}'.")
|
|
374
|
+
|
|
375
|
+
for suffix in ("-s3alias", "--ol-s3"):
|
|
376
|
+
if name.endswith(suffix):
|
|
377
|
+
errors.append(f"Names cannot be suffixed '{suffix}'.")
|
|
378
|
+
|
|
379
|
+
if errors:
|
|
380
|
+
# Todo: Raise suitable PlatformException?
|
|
381
|
+
raise SchemaError(
|
|
382
|
+
"Bucket name '{}' is invalid:\n{}".format(name, "\n".join(f" {e}" for e in errors))
|
|
383
|
+
)
|
|
384
|
+
|
|
385
|
+
return True
|
|
386
|
+
|
|
387
|
+
|
|
388
|
+
def _valid_s3_bucket_arn(key):
|
|
389
|
+
return Regex(
|
|
390
|
+
r"^arn:aws:s3::.*",
|
|
391
|
+
error=f"{key} must contain a valid ARN for an S3 bucket",
|
|
392
|
+
)
|
|
393
|
+
|
|
394
|
+
|
|
395
|
+
_valid_s3_data_migration = {
|
|
396
|
+
"import": {
|
|
397
|
+
Optional("source_kms_key_arn"): _valid_kms_key_arn("source_kms_key_arn"),
|
|
398
|
+
"source_bucket_arn": _valid_s3_bucket_arn("source_bucket_arn"),
|
|
399
|
+
"worker_role_arn": _valid_iam_role_arn("worker_role_arn"),
|
|
400
|
+
},
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
_valid_s3_bucket_retention_policy = Or(
|
|
404
|
+
None,
|
|
405
|
+
{
|
|
406
|
+
"mode": Or("GOVERNANCE", "COMPLIANCE"),
|
|
407
|
+
Or("days", "years", only_one=True): int,
|
|
408
|
+
},
|
|
409
|
+
)
|
|
410
|
+
|
|
411
|
+
_valid_s3_bucket_lifecycle_rule = {
|
|
412
|
+
Optional("filter_prefix"): str,
|
|
413
|
+
"expiration_days": int,
|
|
414
|
+
"enabled": bool,
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
_valid_s3_bucket_external_role_access = {
|
|
418
|
+
"role_arn": _valid_iam_role_arn("role_arn"),
|
|
419
|
+
"read": bool,
|
|
420
|
+
"write": bool,
|
|
421
|
+
"cyber_sign_off_by": _valid_dbt_email_address("cyber_sign_off_by"),
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
_valid_s3_bucket_external_role_access_name = Regex(
|
|
425
|
+
r"^([a-z][a-zA-Z0-9_-]*)$",
|
|
426
|
+
error="External role access block name {} is invalid: names must only contain lowercase alphanumeric characters separated by hypen or underscore",
|
|
427
|
+
)
|
|
428
|
+
|
|
429
|
+
_valid_s3_base_definition = dict(
|
|
430
|
+
{
|
|
431
|
+
Optional("readonly"): bool,
|
|
432
|
+
Optional("serve_static_content"): bool,
|
|
433
|
+
Optional("serve_static_param_name"): str,
|
|
434
|
+
Optional("services"): Or("__all__", [str]),
|
|
435
|
+
Optional("environments"): {
|
|
436
|
+
_valid_environment_name: {
|
|
437
|
+
"bucket_name": _valid_s3_bucket_name,
|
|
438
|
+
Optional("deletion_policy"): _valid_deletion_policy,
|
|
439
|
+
Optional("retention_policy"): _valid_s3_bucket_retention_policy,
|
|
440
|
+
Optional("versioning"): bool,
|
|
441
|
+
Optional("lifecycle_rules"): [_valid_s3_bucket_lifecycle_rule],
|
|
442
|
+
Optional("data_migration"): _valid_s3_data_migration,
|
|
443
|
+
Optional("external_role_access"): {
|
|
444
|
+
_valid_schema_key: _valid_s3_bucket_external_role_access
|
|
445
|
+
},
|
|
446
|
+
Optional("cross_environment_service_access"): {
|
|
447
|
+
_valid_schema_key: _cross_environment_service_access_schema
|
|
448
|
+
},
|
|
449
|
+
},
|
|
450
|
+
},
|
|
451
|
+
}
|
|
452
|
+
)
|
|
453
|
+
|
|
454
|
+
_s3_bucket_schema = _valid_s3_base_definition | {
|
|
455
|
+
"type": "s3",
|
|
456
|
+
Optional("objects"): [{"key": str, Optional("body"): str, Optional("content_type"): str}],
|
|
457
|
+
}
|
|
458
|
+
|
|
459
|
+
_s3_bucket_policy_schema = _valid_s3_base_definition | {"type": "s3-policy"}
|
|
460
|
+
|
|
461
|
+
_default_versions_schema = {
|
|
462
|
+
Optional("terraform-platform-modules"): str,
|
|
463
|
+
Optional("platform-helper"): str,
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
_valid_environment_specific_version_overrides = {
|
|
467
|
+
Optional("terraform-platform-modules"): str,
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
_valid_pipeline_specific_version_overrides = {
|
|
471
|
+
Optional("platform-helper"): str,
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
_environments_schema = {
|
|
475
|
+
str: Or(
|
|
476
|
+
None,
|
|
477
|
+
{
|
|
478
|
+
Optional("accounts"): {
|
|
479
|
+
"deploy": {
|
|
480
|
+
"name": str,
|
|
481
|
+
"id": str,
|
|
482
|
+
},
|
|
483
|
+
"dns": {
|
|
484
|
+
"name": str,
|
|
485
|
+
"id": str,
|
|
486
|
+
},
|
|
487
|
+
},
|
|
488
|
+
# Todo: Is requires_approval relevant?
|
|
489
|
+
Optional("requires_approval"): bool,
|
|
490
|
+
Optional("versions"): _valid_environment_specific_version_overrides,
|
|
491
|
+
Optional("vpc"): str,
|
|
492
|
+
},
|
|
493
|
+
)
|
|
494
|
+
}
|
|
495
|
+
|
|
496
|
+
# Codebase pipelines...
|
|
497
|
+
_codebase_pipelines_schema = [
|
|
498
|
+
{
|
|
499
|
+
"name": str,
|
|
500
|
+
"repository": str,
|
|
501
|
+
Optional("additional_ecr_repository"): str,
|
|
502
|
+
Optional("deploy_repository_branch"): str,
|
|
503
|
+
"services": list[str],
|
|
504
|
+
"pipelines": [
|
|
505
|
+
Or(
|
|
506
|
+
{
|
|
507
|
+
"name": str,
|
|
508
|
+
"branch": _valid_branch_name,
|
|
509
|
+
"environments": [
|
|
510
|
+
{
|
|
511
|
+
"name": str,
|
|
512
|
+
Optional("requires_approval"): bool,
|
|
513
|
+
}
|
|
514
|
+
],
|
|
515
|
+
},
|
|
516
|
+
{
|
|
517
|
+
"name": str,
|
|
518
|
+
"tag": bool,
|
|
519
|
+
"environments": [
|
|
520
|
+
{
|
|
521
|
+
"name": str,
|
|
522
|
+
Optional("requires_approval"): bool,
|
|
523
|
+
}
|
|
524
|
+
],
|
|
525
|
+
},
|
|
526
|
+
),
|
|
527
|
+
],
|
|
528
|
+
},
|
|
529
|
+
]
|
|
530
|
+
|
|
531
|
+
# Environment pipelines...
|
|
532
|
+
_environment_pipelines_schema = {
|
|
533
|
+
str: {
|
|
534
|
+
Optional("account"): str,
|
|
535
|
+
Optional("branch", default="main"): _valid_branch_name,
|
|
536
|
+
Optional("pipeline_to_trigger"): str,
|
|
537
|
+
Optional("versions"): _valid_pipeline_specific_version_overrides,
|
|
538
|
+
"slack_channel": str,
|
|
539
|
+
"trigger_on_push": bool,
|
|
540
|
+
"environments": {
|
|
541
|
+
str: Or(
|
|
542
|
+
None,
|
|
543
|
+
{
|
|
544
|
+
Optional("accounts"): {
|
|
545
|
+
"deploy": {
|
|
546
|
+
"name": str,
|
|
547
|
+
"id": str,
|
|
548
|
+
},
|
|
549
|
+
"dns": {
|
|
550
|
+
"name": str,
|
|
551
|
+
"id": str,
|
|
552
|
+
},
|
|
553
|
+
},
|
|
554
|
+
Optional("requires_approval"): bool,
|
|
555
|
+
Optional("versions"): _valid_environment_specific_version_overrides,
|
|
556
|
+
Optional("vpc"): str,
|
|
557
|
+
},
|
|
558
|
+
)
|
|
559
|
+
},
|
|
560
|
+
}
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
|
|
564
|
+
# Used outside this file by validate_platform_config()
|
|
565
|
+
PLATFORM_CONFIG_SCHEMA = Schema(
|
|
566
|
+
{
|
|
567
|
+
# The following line is for the AWS Copilot version, will be removed under DBTP-1002
|
|
568
|
+
"application": str,
|
|
569
|
+
Optional("legacy_project", default=False): bool,
|
|
570
|
+
Optional("default_versions"): _default_versions_schema,
|
|
571
|
+
Optional("accounts"): list[str],
|
|
572
|
+
Optional("environments"): _environments_schema,
|
|
573
|
+
Optional("codebase_pipelines"): _codebase_pipelines_schema,
|
|
574
|
+
Optional("environment_pipelines"): _environment_pipelines_schema,
|
|
575
|
+
Optional("extensions"): {
|
|
576
|
+
str: Or(
|
|
577
|
+
_alb_schema,
|
|
578
|
+
_monitoring_schema,
|
|
579
|
+
_opensearch_schema,
|
|
580
|
+
_postgres_schema,
|
|
581
|
+
_prometheus_policy_schema,
|
|
582
|
+
_redis_schema,
|
|
583
|
+
_s3_bucket_schema,
|
|
584
|
+
_s3_bucket_policy_schema,
|
|
585
|
+
)
|
|
586
|
+
},
|
|
587
|
+
}
|
|
588
|
+
)
|
|
589
|
+
|
|
590
|
+
# This is used outside this file by validate_addons()
|
|
591
|
+
EXTENSION_SCHEMAS = {
|
|
592
|
+
"alb": Schema(_alb_schema),
|
|
593
|
+
"appconfig-ipfilter": _no_configuration_required_schema("appconfig-ipfilter"),
|
|
594
|
+
"opensearch": ConditionalOpensSearchSchema(_opensearch_schema),
|
|
595
|
+
"postgres": Schema(_postgres_schema),
|
|
596
|
+
"prometheus-policy": Schema(_prometheus_policy_schema),
|
|
597
|
+
"redis": Schema(_redis_schema),
|
|
598
|
+
"s3": Schema(_s3_bucket_schema),
|
|
599
|
+
"s3-policy": Schema(_s3_bucket_policy_schema),
|
|
600
|
+
"subscription-filter": _no_configuration_required_schema("subscription-filter"),
|
|
601
|
+
# Todo: We think the next three are no longer relevant?
|
|
602
|
+
"monitoring": Schema(_monitoring_schema),
|
|
603
|
+
"vpc": _no_configuration_required_schema("vpc"),
|
|
604
|
+
"xray": _no_configuration_required_schema("xray"),
|
|
605
|
+
}
|
|
@@ -2,11 +2,7 @@ import json
|
|
|
2
2
|
import urllib
|
|
3
3
|
|
|
4
4
|
from dbt_platform_helper.constants import CONDUIT_ADDON_TYPES
|
|
5
|
-
from dbt_platform_helper.
|
|
6
|
-
from dbt_platform_helper.exceptions import AddonTypeMissingFromConfigError
|
|
7
|
-
from dbt_platform_helper.exceptions import InvalidAddonTypeError
|
|
8
|
-
from dbt_platform_helper.exceptions import ParameterNotFoundError
|
|
9
|
-
from dbt_platform_helper.exceptions import SecretNotFoundError
|
|
5
|
+
from dbt_platform_helper.platform_exception import PlatformException
|
|
10
6
|
|
|
11
7
|
|
|
12
8
|
class Secrets:
|
|
@@ -43,8 +39,9 @@ class Secrets:
|
|
|
43
39
|
except self.secrets_manager_client.exceptions.ResourceNotFoundException:
|
|
44
40
|
pass
|
|
45
41
|
|
|
46
|
-
raise
|
|
42
|
+
raise SecretNotFoundException(secret_name)
|
|
47
43
|
|
|
44
|
+
# Todo: This probably does not belong in the secrets provider. When it moves, take the Todoed exceptions from below
|
|
48
45
|
def get_addon_type(self, addon_name: str) -> str:
|
|
49
46
|
addon_type = None
|
|
50
47
|
try:
|
|
@@ -54,19 +51,19 @@ class Secrets:
|
|
|
54
51
|
)["Parameter"]["Value"]
|
|
55
52
|
)
|
|
56
53
|
except self.ssm_client.exceptions.ParameterNotFound:
|
|
57
|
-
raise
|
|
54
|
+
raise ParameterNotFoundException(self.application_name, self.env)
|
|
58
55
|
|
|
59
56
|
if addon_name not in addon_config.keys():
|
|
60
|
-
raise
|
|
57
|
+
raise AddonNotFoundException(addon_name)
|
|
61
58
|
|
|
62
59
|
for name, config in addon_config.items():
|
|
63
60
|
if name == addon_name:
|
|
64
61
|
if not config.get("type"):
|
|
65
|
-
raise
|
|
62
|
+
raise AddonTypeMissingFromConfigException(addon_name)
|
|
66
63
|
addon_type = config["type"]
|
|
67
64
|
|
|
68
65
|
if not addon_type or addon_type not in CONDUIT_ADDON_TYPES:
|
|
69
|
-
raise
|
|
66
|
+
raise InvalidAddonTypeException(addon_type)
|
|
70
67
|
|
|
71
68
|
if "postgres" in addon_type:
|
|
72
69
|
addon_type = "postgres"
|
|
@@ -83,3 +80,47 @@ class Secrets:
|
|
|
83
80
|
|
|
84
81
|
def _normalise_secret_name(self, addon_name: str) -> str:
|
|
85
82
|
return addon_name.replace("-", "_").upper()
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
# Todo: This probably does not belong in the secrets provider. Move it when we find a better home for get_addon_type()
|
|
86
|
+
class AddonException(PlatformException):
|
|
87
|
+
pass
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
# Todo: This probably does not belong in the secrets provider. Move it when we find a better home for get_addon_type()
|
|
91
|
+
class AddonNotFoundException(AddonException):
|
|
92
|
+
def __init__(self, addon_name: str):
|
|
93
|
+
super().__init__(f"""Addon "{addon_name}" does not exist.""")
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
# Todo: This probably does not belong in the secrets provider. Move it when we find a better home for get_addon_type()
|
|
97
|
+
class AddonTypeMissingFromConfigException(AddonException):
|
|
98
|
+
def __init__(self, addon_name: str):
|
|
99
|
+
super().__init__(
|
|
100
|
+
f"""The configuration for the addon {addon_name}, is misconfigured and missing the addon type."""
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
# Todo: This probably does not belong in the secrets provider. Move it when we find a better home for get_addon_type()
|
|
105
|
+
class InvalidAddonTypeException(AddonException):
|
|
106
|
+
def __init__(self, addon_type):
|
|
107
|
+
self.addon_type = addon_type
|
|
108
|
+
super().__init__(
|
|
109
|
+
f"""Addon type "{self.addon_type}" is not supported, we support: {", ".join(CONDUIT_ADDON_TYPES)}."""
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
class SecretException(PlatformException):
|
|
114
|
+
pass
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class ParameterNotFoundException(SecretException):
|
|
118
|
+
def __init__(self, application_name: str, environment: str):
|
|
119
|
+
super().__init__(
|
|
120
|
+
f"""No parameter called "/copilot/applications/{application_name}/environments/{environment}/addons". Try deploying the "{application_name}" "{environment}" environment."""
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
class SecretNotFoundException(SecretException):
|
|
125
|
+
def __init__(self, secret_name: str):
|
|
126
|
+
super().__init__(f"""No secret called "{secret_name}".""")
|