dbt-platform-helper 15.1.0__py3-none-any.whl → 15.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbt-platform-helper might be problematic. Click here for more details.
- dbt_platform_helper/domain/codebase.py +12 -3
- dbt_platform_helper/domain/conduit.py +322 -76
- dbt_platform_helper/domain/copilot.py +4 -1
- dbt_platform_helper/domain/plans.py +41 -0
- dbt_platform_helper/entities/platform_config_schema.py +7 -48
- dbt_platform_helper/platform_exception.py +4 -0
- dbt_platform_helper/providers/aws/exceptions.py +20 -2
- dbt_platform_helper/providers/copilot.py +20 -12
- dbt_platform_helper/providers/ecr.py +62 -37
- dbt_platform_helper/providers/ecs.py +112 -40
- dbt_platform_helper/providers/io.py +6 -1
- dbt_platform_helper/providers/terraform_manifest.py +7 -1
- dbt_platform_helper/providers/vpc.py +1 -1
- dbt_platform_helper/utilities/decorators.py +103 -0
- {dbt_platform_helper-15.1.0.dist-info → dbt_platform_helper-15.2.1.dist-info}/METADATA +1 -1
- {dbt_platform_helper-15.1.0.dist-info → dbt_platform_helper-15.2.1.dist-info}/RECORD +22 -18
- {dbt_platform_helper-15.1.0.dist-info → dbt_platform_helper-15.2.1.dist-info}/WHEEL +1 -1
- terraform/elasticache-redis/plans.yml +85 -0
- terraform/opensearch/plans.yml +71 -0
- terraform/postgres/plans.yml +128 -0
- dbt_platform_helper/addon-plans.yml +0 -224
- {dbt_platform_helper-15.1.0.dist-info → dbt_platform_helper-15.2.1.dist-info}/LICENSE +0 -0
- {dbt_platform_helper-15.1.0.dist-info → dbt_platform_helper-15.2.1.dist-info}/entry_points.txt +0 -0
|
@@ -167,13 +167,16 @@ class Codebase:
|
|
|
167
167
|
|
|
168
168
|
image_ref = None
|
|
169
169
|
if commit:
|
|
170
|
-
|
|
170
|
+
self._validate_sha_length(commit)
|
|
171
|
+
image_ref = f"commit-{commit}"
|
|
171
172
|
elif tag:
|
|
172
173
|
image_ref = f"tag-{tag}"
|
|
173
174
|
elif branch:
|
|
174
175
|
image_ref = f"branch-{branch}"
|
|
175
|
-
|
|
176
|
-
image_ref = self.ecr_provider.
|
|
176
|
+
|
|
177
|
+
image_ref = self.ecr_provider.get_commit_tag_for_reference(
|
|
178
|
+
application.name, codebase, image_ref
|
|
179
|
+
)
|
|
177
180
|
|
|
178
181
|
codepipeline_client = session.client("codepipeline")
|
|
179
182
|
pipeline_name = self.get_manual_release_pipeline(codepipeline_client, app, codebase)
|
|
@@ -284,6 +287,12 @@ class Codebase:
|
|
|
284
287
|
return get_build_url_from_pipeline_execution_id(execution_id, build_options["name"])
|
|
285
288
|
return None
|
|
286
289
|
|
|
290
|
+
def _validate_sha_length(self, commit):
|
|
291
|
+
if len(commit) < 7:
|
|
292
|
+
self.io.abort_with_error(
|
|
293
|
+
"Your commit reference is too short. Commit sha hashes specified by '--commit' must be at least 7 characters long."
|
|
294
|
+
)
|
|
295
|
+
|
|
287
296
|
|
|
288
297
|
class ApplicationDeploymentNotTriggered(PlatformException):
|
|
289
298
|
def __init__(self, codebase: str):
|
|
@@ -1,119 +1,365 @@
|
|
|
1
|
-
import
|
|
1
|
+
from abc import ABC
|
|
2
|
+
from abc import abstractmethod
|
|
3
|
+
from typing import Callable
|
|
4
|
+
from typing import Optional
|
|
2
5
|
|
|
3
6
|
from dbt_platform_helper.providers.cloudformation import CloudFormation
|
|
7
|
+
from dbt_platform_helper.providers.copilot import _normalise_secret_name
|
|
4
8
|
from dbt_platform_helper.providers.copilot import connect_to_addon_client_task
|
|
5
9
|
from dbt_platform_helper.providers.copilot import create_addon_client_task
|
|
10
|
+
from dbt_platform_helper.providers.copilot import get_postgres_admin_connection_string
|
|
6
11
|
from dbt_platform_helper.providers.ecs import ECS
|
|
7
12
|
from dbt_platform_helper.providers.io import ClickIOProvider
|
|
8
13
|
from dbt_platform_helper.providers.secrets import Secrets
|
|
14
|
+
from dbt_platform_helper.providers.vpc import VpcProvider
|
|
9
15
|
from dbt_platform_helper.utils.application import Application
|
|
10
16
|
|
|
11
17
|
|
|
12
|
-
class
|
|
18
|
+
class ConduitECSStrategy(ABC):
|
|
19
|
+
@abstractmethod
|
|
20
|
+
def get_data(self):
|
|
21
|
+
pass
|
|
22
|
+
|
|
23
|
+
@abstractmethod
|
|
24
|
+
def start_task(self, data_context: dict):
|
|
25
|
+
pass
|
|
26
|
+
|
|
27
|
+
@abstractmethod
|
|
28
|
+
def exec_task(self, data_context: dict):
|
|
29
|
+
pass
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class TerraformConduitStrategy(ConduitECSStrategy):
|
|
13
33
|
def __init__(
|
|
14
34
|
self,
|
|
35
|
+
clients,
|
|
36
|
+
ecs_provider: ECS,
|
|
15
37
|
application: Application,
|
|
38
|
+
addon_name: str,
|
|
39
|
+
addon_type: str,
|
|
40
|
+
access: str,
|
|
41
|
+
env: str,
|
|
42
|
+
io: ClickIOProvider,
|
|
43
|
+
vpc_provider: Callable,
|
|
44
|
+
get_postgres_admin_connection_string: Callable,
|
|
45
|
+
):
|
|
46
|
+
self.clients = clients
|
|
47
|
+
self.ecs_provider = ecs_provider
|
|
48
|
+
self.io = io
|
|
49
|
+
self.vpc_provider = vpc_provider
|
|
50
|
+
self.access = access
|
|
51
|
+
self.addon_name = addon_name
|
|
52
|
+
self.addon_type = addon_type
|
|
53
|
+
self.application = application
|
|
54
|
+
self.env = env
|
|
55
|
+
self.get_postgres_admin_connection_string = get_postgres_admin_connection_string
|
|
56
|
+
|
|
57
|
+
def get_data(self):
|
|
58
|
+
self.io.info("Starting conduit in Terraform mode.")
|
|
59
|
+
return {
|
|
60
|
+
"cluster_arn": self.ecs_provider.get_cluster_arn_by_name(
|
|
61
|
+
f"{self.application.name}-{self.env}"
|
|
62
|
+
),
|
|
63
|
+
"task_def_family": self._generate_container_name(),
|
|
64
|
+
"vpc_name": self._resolve_vpc_name(),
|
|
65
|
+
"addon_type": self.addon_type,
|
|
66
|
+
"access": self.access,
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
def start_task(self, data_context: dict):
|
|
70
|
+
|
|
71
|
+
environments = self.application.environments
|
|
72
|
+
environment = environments.get(self.env)
|
|
73
|
+
env_session = environment.session
|
|
74
|
+
|
|
75
|
+
vpc_provider = self.vpc_provider(env_session)
|
|
76
|
+
vpc_config = vpc_provider.get_vpc(
|
|
77
|
+
self.application.name,
|
|
78
|
+
self.env,
|
|
79
|
+
data_context["vpc_name"],
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
postgres_admin_env_vars = None
|
|
83
|
+
if data_context["addon_type"] == "postgres" and data_context["access"] == "admin":
|
|
84
|
+
postgres_admin_env_vars = [
|
|
85
|
+
{
|
|
86
|
+
"name": "CONNECTION_SECRET",
|
|
87
|
+
"value": self.get_postgres_admin_connection_string(
|
|
88
|
+
self.clients.get("ssm"),
|
|
89
|
+
f"/copilot/{self.application.name}/{self.env}/secrets/{_normalise_secret_name(self.addon_name)}",
|
|
90
|
+
self.application,
|
|
91
|
+
self.env,
|
|
92
|
+
self.addon_name,
|
|
93
|
+
),
|
|
94
|
+
},
|
|
95
|
+
]
|
|
96
|
+
|
|
97
|
+
self.ecs_provider.start_ecs_task(
|
|
98
|
+
f"{self.application.name}-{self.env}",
|
|
99
|
+
self._generate_container_name(),
|
|
100
|
+
data_context["task_def_family"],
|
|
101
|
+
vpc_config,
|
|
102
|
+
postgres_admin_env_vars,
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
def exec_task(self, data_context: dict):
|
|
106
|
+
self.ecs_provider.exec_task(data_context["cluster_arn"], data_context["task_arns"][0])
|
|
107
|
+
|
|
108
|
+
def _generate_container_name(self):
|
|
109
|
+
return f"conduit-{self.addon_type}-{self.access}-{self.application.name}-{self.env}-{self.addon_name}"
|
|
110
|
+
|
|
111
|
+
def _resolve_vpc_name(self):
|
|
112
|
+
ssm_client = self.clients["ssm"]
|
|
113
|
+
parameter_key = f"/conduit/{self.application.name}/{self.env}/{_normalise_secret_name(self.addon_name)}_VPC_NAME"
|
|
114
|
+
|
|
115
|
+
try:
|
|
116
|
+
response = ssm_client.get_parameter(Name=parameter_key)
|
|
117
|
+
return response["Parameter"]["Value"]
|
|
118
|
+
except ssm_client.exceptions.ParameterNotFound:
|
|
119
|
+
self.io.abort_with_error(
|
|
120
|
+
f"Could not find VPC name for {self.addon_name}. Missing SSM param: {parameter_key}"
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
class CopilotConduitStrategy(ConduitECSStrategy):
|
|
125
|
+
def __init__(
|
|
126
|
+
self,
|
|
127
|
+
clients,
|
|
128
|
+
ecs_provider: ECS,
|
|
16
129
|
secrets_provider: Secrets,
|
|
17
130
|
cloudformation_provider: CloudFormation,
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
131
|
+
application: Application,
|
|
132
|
+
addon_name: str,
|
|
133
|
+
access: str,
|
|
134
|
+
env: str,
|
|
135
|
+
io: ClickIOProvider,
|
|
136
|
+
connect_to_addon_client_task: Callable,
|
|
137
|
+
create_addon_client_task: Callable,
|
|
23
138
|
):
|
|
24
|
-
|
|
25
|
-
self.application = application
|
|
26
|
-
self.secrets_provider = secrets_provider
|
|
139
|
+
self.clients = clients
|
|
27
140
|
self.cloudformation_provider = cloudformation_provider
|
|
28
141
|
self.ecs_provider = ecs_provider
|
|
29
|
-
self.
|
|
142
|
+
self.secrets_provider = secrets_provider
|
|
143
|
+
|
|
30
144
|
self.io = io
|
|
145
|
+
self.access = access
|
|
146
|
+
self.addon_name = addon_name
|
|
147
|
+
self.application = application
|
|
148
|
+
self.env = env
|
|
31
149
|
self.connect_to_addon_client_task = connect_to_addon_client_task
|
|
32
150
|
self.create_addon_client_task = create_addon_client_task
|
|
33
151
|
|
|
34
|
-
def
|
|
35
|
-
|
|
36
|
-
addon_type
|
|
37
|
-
|
|
152
|
+
def get_data(self):
|
|
153
|
+
|
|
154
|
+
addon_type = self.secrets_provider.get_addon_type(self.addon_name)
|
|
155
|
+
parameter_name = self.secrets_provider.get_parameter_name(
|
|
156
|
+
addon_type, self.addon_name, self.access
|
|
38
157
|
)
|
|
158
|
+
task_name = self.ecs_provider.get_or_create_task_name(self.addon_name, parameter_name)
|
|
39
159
|
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
160
|
+
return {
|
|
161
|
+
"cluster_arn": self.ecs_provider.get_cluster_arn_by_copilot_tag(),
|
|
162
|
+
"addon_type": addon_type,
|
|
163
|
+
"task_def_family": f"copilot-{task_name}",
|
|
164
|
+
"parameter_name": parameter_name,
|
|
165
|
+
"task_name": task_name,
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
def start_task(self, data_context: dict):
|
|
169
|
+
self.create_addon_client_task(
|
|
170
|
+
self.clients["iam"],
|
|
171
|
+
self.clients["ssm"],
|
|
172
|
+
self.application,
|
|
173
|
+
self.env,
|
|
174
|
+
data_context["addon_type"],
|
|
175
|
+
self.addon_name,
|
|
176
|
+
data_context["task_name"],
|
|
177
|
+
self.access,
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
self.io.info("Updating conduit task")
|
|
181
|
+
self.cloudformation_provider.add_stack_delete_policy_to_task_role(data_context["task_name"])
|
|
182
|
+
stack_name = self.cloudformation_provider.update_conduit_stack_resources(
|
|
183
|
+
self.application.name,
|
|
184
|
+
self.env,
|
|
185
|
+
data_context["addon_type"],
|
|
186
|
+
self.addon_name,
|
|
187
|
+
data_context["task_name"],
|
|
188
|
+
data_context["parameter_name"],
|
|
189
|
+
self.access,
|
|
190
|
+
)
|
|
191
|
+
self.io.info("Waiting for conduit task update to complete...")
|
|
192
|
+
self.cloudformation_provider.wait_for_cloudformation_to_reach_status(
|
|
193
|
+
"stack_update_complete", stack_name
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
def exec_task(self, data_context: dict):
|
|
197
|
+
self.connect_to_addon_client_task(
|
|
198
|
+
self.clients["ecs"],
|
|
199
|
+
self.application.name,
|
|
200
|
+
self.env,
|
|
201
|
+
data_context["cluster_arn"],
|
|
202
|
+
data_context["task_name"],
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
class ConduitStrategyFactory:
|
|
207
|
+
|
|
208
|
+
@staticmethod
|
|
209
|
+
def detect_mode(
|
|
210
|
+
ecs_client,
|
|
211
|
+
application,
|
|
212
|
+
environment,
|
|
213
|
+
addon_name: str,
|
|
214
|
+
addon_type: str,
|
|
215
|
+
access: str,
|
|
216
|
+
io: ClickIOProvider,
|
|
217
|
+
) -> str:
|
|
218
|
+
"""Detect if Terraform-based conduit task definitions are present,
|
|
219
|
+
otherwise default to Copilot mode."""
|
|
220
|
+
paginator = ecs_client.get_paginator("list_task_definitions")
|
|
221
|
+
prefix = f"conduit-{addon_type}-{access}-{application}-{environment}-{addon_name}"
|
|
222
|
+
|
|
223
|
+
for page in paginator.paginate():
|
|
224
|
+
for arn in page["taskDefinitionArns"]:
|
|
225
|
+
if arn.split("/")[-1].startswith(prefix):
|
|
226
|
+
return "terraform"
|
|
227
|
+
|
|
228
|
+
io.info("Defaulting to copilot mode.")
|
|
229
|
+
return "copilot"
|
|
230
|
+
|
|
231
|
+
@staticmethod
|
|
232
|
+
def create_strategy(
|
|
233
|
+
mode: str,
|
|
234
|
+
clients,
|
|
235
|
+
ecs_provider: ECS,
|
|
236
|
+
secrets_provider: Secrets,
|
|
237
|
+
cloudformation_provider: CloudFormation,
|
|
238
|
+
application: Application,
|
|
239
|
+
addon_name: str,
|
|
240
|
+
addon_type: str,
|
|
241
|
+
access: str,
|
|
242
|
+
env: str,
|
|
243
|
+
io: ClickIOProvider,
|
|
244
|
+
):
|
|
245
|
+
|
|
246
|
+
if mode == "terraform":
|
|
247
|
+
return TerraformConduitStrategy(
|
|
248
|
+
clients,
|
|
249
|
+
ecs_provider,
|
|
250
|
+
application,
|
|
51
251
|
addon_name,
|
|
52
|
-
|
|
252
|
+
addon_type,
|
|
53
253
|
access,
|
|
54
|
-
)
|
|
55
|
-
|
|
56
|
-
self.io.info("Updating conduit task")
|
|
57
|
-
self._update_stack_resources(
|
|
58
|
-
self.application.name,
|
|
59
254
|
env,
|
|
60
|
-
|
|
255
|
+
io,
|
|
256
|
+
vpc_provider=VpcProvider,
|
|
257
|
+
get_postgres_admin_connection_string=get_postgres_admin_connection_string,
|
|
258
|
+
)
|
|
259
|
+
else:
|
|
260
|
+
return CopilotConduitStrategy(
|
|
261
|
+
clients,
|
|
262
|
+
ecs_provider,
|
|
263
|
+
secrets_provider,
|
|
264
|
+
cloudformation_provider,
|
|
265
|
+
application,
|
|
61
266
|
addon_name,
|
|
62
|
-
task_name,
|
|
63
|
-
parameter_name,
|
|
64
267
|
access,
|
|
268
|
+
env,
|
|
269
|
+
io,
|
|
270
|
+
connect_to_addon_client_task=connect_to_addon_client_task,
|
|
271
|
+
create_addon_client_task=create_addon_client_task,
|
|
65
272
|
)
|
|
66
273
|
|
|
67
|
-
task_arns = self.ecs_provider.get_ecs_task_arns(cluster_arn, task_name)
|
|
68
274
|
|
|
69
|
-
|
|
70
|
-
|
|
275
|
+
class Conduit:
|
|
276
|
+
def __init__(
|
|
277
|
+
self,
|
|
278
|
+
application: Application,
|
|
279
|
+
secrets_provider: Secrets,
|
|
280
|
+
cloudformation_provider: CloudFormation,
|
|
281
|
+
ecs_provider: ECS,
|
|
282
|
+
io: ClickIOProvider = ClickIOProvider(),
|
|
283
|
+
vpc_provider=VpcProvider,
|
|
284
|
+
strategy_factory: Optional[ConduitStrategyFactory] = None,
|
|
285
|
+
):
|
|
286
|
+
|
|
287
|
+
self.application = application
|
|
288
|
+
self.secrets_provider = secrets_provider
|
|
289
|
+
self.cloudformation_provider = cloudformation_provider
|
|
290
|
+
self.ecs_provider = ecs_provider
|
|
291
|
+
self.io = io
|
|
292
|
+
self.vpc_provider = vpc_provider
|
|
293
|
+
self.strategy_factory = strategy_factory or ConduitStrategyFactory()
|
|
71
294
|
|
|
72
|
-
|
|
295
|
+
def start(self, env: str, addon_name: str, access: str = "read"):
|
|
296
|
+
self.clients = self._initialise_clients(env)
|
|
297
|
+
addon_type = self.secrets_provider.get_addon_type(addon_name)
|
|
73
298
|
|
|
74
|
-
|
|
299
|
+
if (addon_type == "opensearch" or addon_type == "redis") and (access != "read"):
|
|
300
|
+
access = "read"
|
|
75
301
|
|
|
76
|
-
self.
|
|
77
|
-
|
|
78
|
-
|
|
302
|
+
mode = self.strategy_factory.detect_mode(
|
|
303
|
+
self.clients.get("ecs"),
|
|
304
|
+
self.application.name,
|
|
305
|
+
env,
|
|
306
|
+
addon_name,
|
|
307
|
+
addon_type,
|
|
308
|
+
access,
|
|
309
|
+
self.io,
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
strategy = self.strategy_factory.create_strategy(
|
|
313
|
+
mode=mode,
|
|
314
|
+
clients=self.clients,
|
|
315
|
+
ecs_provider=self.ecs_provider,
|
|
316
|
+
secrets_provider=self.secrets_provider,
|
|
317
|
+
cloudformation_provider=self.cloudformation_provider,
|
|
318
|
+
application=self.application,
|
|
319
|
+
addon_name=addon_name,
|
|
320
|
+
addon_type=addon_type,
|
|
321
|
+
access=access,
|
|
322
|
+
env=env,
|
|
323
|
+
io=self.io,
|
|
324
|
+
)
|
|
325
|
+
|
|
326
|
+
data_context = strategy.get_data()
|
|
327
|
+
|
|
328
|
+
data_context["task_arns"] = self.ecs_provider.get_ecs_task_arns(
|
|
329
|
+
data_context["cluster_arn"], data_context["task_def_family"]
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
info_log = (
|
|
333
|
+
f"Checking if a conduit ECS task is already running for:\n"
|
|
334
|
+
f" Addon Name : {addon_name}\n"
|
|
335
|
+
f" Addon Type : {addon_type}"
|
|
79
336
|
)
|
|
80
337
|
|
|
338
|
+
if addon_type == "postgres":
|
|
339
|
+
info_log += f"\n Access Level : {access}"
|
|
340
|
+
|
|
341
|
+
self.io.info(info_log)
|
|
342
|
+
|
|
343
|
+
if not data_context["task_arns"]:
|
|
344
|
+
self.io.info("Creating conduit ECS task...")
|
|
345
|
+
strategy.start_task(data_context)
|
|
346
|
+
data_context["task_arns"] = self.ecs_provider.wait_for_task_to_register(
|
|
347
|
+
data_context["cluster_arn"], data_context["task_def_family"]
|
|
348
|
+
)
|
|
349
|
+
else:
|
|
350
|
+
self.io.info(f"Found a task already running: {data_context['task_arns'][0]}")
|
|
351
|
+
|
|
352
|
+
self.io.info(f"Waiting for ECS Exec agent to become available on the conduit task...")
|
|
353
|
+
self.ecs_provider.ecs_exec_is_available(
|
|
354
|
+
data_context["cluster_arn"], data_context["task_arns"]
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
self.io.info("Connecting to conduit task...")
|
|
358
|
+
strategy.exec_task(data_context)
|
|
359
|
+
|
|
81
360
|
def _initialise_clients(self, env):
|
|
82
361
|
return {
|
|
83
362
|
"ecs": self.application.environments[env].session.client("ecs"),
|
|
84
363
|
"iam": self.application.environments[env].session.client("iam"),
|
|
85
364
|
"ssm": self.application.environments[env].session.client("ssm"),
|
|
86
365
|
}
|
|
87
|
-
|
|
88
|
-
def _get_addon_details(self, addon_name, access):
|
|
89
|
-
addon_type = self.secrets_provider.get_addon_type(addon_name)
|
|
90
|
-
cluster_arn = self.ecs_provider.get_cluster_arn()
|
|
91
|
-
parameter_name = self.secrets_provider.get_parameter_name(addon_type, addon_name, access)
|
|
92
|
-
task_name = self.ecs_provider.get_or_create_task_name(addon_name, parameter_name)
|
|
93
|
-
|
|
94
|
-
return addon_type, cluster_arn, parameter_name, task_name
|
|
95
|
-
|
|
96
|
-
def _update_stack_resources(
|
|
97
|
-
self,
|
|
98
|
-
app_name,
|
|
99
|
-
env,
|
|
100
|
-
addon_type,
|
|
101
|
-
addon_name,
|
|
102
|
-
task_name,
|
|
103
|
-
parameter_name,
|
|
104
|
-
access,
|
|
105
|
-
):
|
|
106
|
-
self.cloudformation_provider.add_stack_delete_policy_to_task_role(task_name)
|
|
107
|
-
stack_name = self.cloudformation_provider.update_conduit_stack_resources(
|
|
108
|
-
app_name,
|
|
109
|
-
env,
|
|
110
|
-
addon_type,
|
|
111
|
-
addon_name,
|
|
112
|
-
task_name,
|
|
113
|
-
parameter_name,
|
|
114
|
-
access,
|
|
115
|
-
)
|
|
116
|
-
self.io.info("Waiting for conduit task update to complete...")
|
|
117
|
-
self.cloudformation_provider.wait_for_cloudformation_to_reach_status(
|
|
118
|
-
"stack_update_complete", stack_name
|
|
119
|
-
)
|
|
@@ -10,6 +10,7 @@ import botocore.errorfactory
|
|
|
10
10
|
|
|
11
11
|
from dbt_platform_helper.constants import PLATFORM_CONFIG_FILE
|
|
12
12
|
from dbt_platform_helper.domain.copilot_environment import CopilotTemplating
|
|
13
|
+
from dbt_platform_helper.domain.plans import PlanLoader
|
|
13
14
|
from dbt_platform_helper.providers.config import ConfigProvider
|
|
14
15
|
from dbt_platform_helper.providers.files import FileProvider
|
|
15
16
|
from dbt_platform_helper.providers.io import ClickIOProvider
|
|
@@ -45,6 +46,7 @@ class Copilot:
|
|
|
45
46
|
kms_provider: KMSProvider,
|
|
46
47
|
session,
|
|
47
48
|
io: ClickIOProvider = ClickIOProvider(),
|
|
49
|
+
plan_manager: PlanLoader = PlanLoader(),
|
|
48
50
|
yaml_file_provider: YamlFileProvider = YamlFileProvider,
|
|
49
51
|
):
|
|
50
52
|
self.config_provider = config_provider
|
|
@@ -53,6 +55,7 @@ class Copilot:
|
|
|
53
55
|
self.copilot_templating = copilot_templating
|
|
54
56
|
self.kms_provider = kms_provider
|
|
55
57
|
self.io = io
|
|
58
|
+
self.plan_manager = plan_manager
|
|
56
59
|
self.yaml_file_provider = yaml_file_provider
|
|
57
60
|
self.session = session
|
|
58
61
|
|
|
@@ -169,7 +172,7 @@ class Copilot:
|
|
|
169
172
|
def _normalise_keys(source: dict):
|
|
170
173
|
return {k.replace("-", "_"): v for k, v in source.items()}
|
|
171
174
|
|
|
172
|
-
addon_plans = self.
|
|
175
|
+
addon_plans = self.plan_manager.load()
|
|
173
176
|
|
|
174
177
|
# load and validate config
|
|
175
178
|
config = self.yaml_file_provider.load(config_file)
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from dbt_platform_helper.providers.yaml_file import YamlFileProvider
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class PlanLoader:
|
|
7
|
+
|
|
8
|
+
PROJECT_DIR = Path(__file__).resolve().parent.parent.parent
|
|
9
|
+
|
|
10
|
+
def __init__(
|
|
11
|
+
self,
|
|
12
|
+
extensions: dict = None,
|
|
13
|
+
terraform_dir: str = "terraform",
|
|
14
|
+
loader: YamlFileProvider = YamlFileProvider,
|
|
15
|
+
):
|
|
16
|
+
self.path = terraform_dir
|
|
17
|
+
self.loader = loader
|
|
18
|
+
self._cache = {}
|
|
19
|
+
self.extensions = extensions or {
|
|
20
|
+
"redis": "elasticache-redis",
|
|
21
|
+
"opensearch": "opensearch",
|
|
22
|
+
"postgres": "postgres",
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
def load(self):
|
|
26
|
+
result = {}
|
|
27
|
+
for key, value in self.extensions.items():
|
|
28
|
+
result[key] = self._load_plan(key, f"{self.PROJECT_DIR}/{self.path}/{value}/plans.yml")
|
|
29
|
+
return result
|
|
30
|
+
|
|
31
|
+
def _load_plan(self, name, path):
|
|
32
|
+
if name in self._cache:
|
|
33
|
+
return self._cache[name]
|
|
34
|
+
else:
|
|
35
|
+
plan = self.loader.load(path)
|
|
36
|
+
self._cache[name] = plan
|
|
37
|
+
return plan
|
|
38
|
+
|
|
39
|
+
def get_plan_names(self, extension):
|
|
40
|
+
plans = self.load()
|
|
41
|
+
return list(plans[extension].keys())
|
|
@@ -9,6 +9,10 @@ from schema import Schema
|
|
|
9
9
|
from schema import SchemaError
|
|
10
10
|
|
|
11
11
|
from dbt_platform_helper.constants import PLATFORM_CONFIG_SCHEMA_VERSION
|
|
12
|
+
from dbt_platform_helper.domain.plans import PlanLoader
|
|
13
|
+
|
|
14
|
+
plan_manager = PlanLoader()
|
|
15
|
+
plan_manager.load()
|
|
12
16
|
|
|
13
17
|
|
|
14
18
|
class PlatformConfigSchema:
|
|
@@ -248,17 +252,7 @@ class PlatformConfigSchema:
|
|
|
248
252
|
@staticmethod
|
|
249
253
|
def __opensearch_schema() -> dict:
|
|
250
254
|
# TODO: DBTP-1943: Move to OpenSearch provider?
|
|
251
|
-
_valid_opensearch_plans = Or(
|
|
252
|
-
"tiny",
|
|
253
|
-
"small",
|
|
254
|
-
"small-ha",
|
|
255
|
-
"medium",
|
|
256
|
-
"medium-ha",
|
|
257
|
-
"large",
|
|
258
|
-
"large-ha",
|
|
259
|
-
"x-large",
|
|
260
|
-
"x-large-ha",
|
|
261
|
-
)
|
|
255
|
+
_valid_opensearch_plans = Or(*plan_manager.get_plan_names("opensearch"))
|
|
262
256
|
|
|
263
257
|
return {
|
|
264
258
|
"type": "opensearch",
|
|
@@ -285,28 +279,7 @@ class PlatformConfigSchema:
|
|
|
285
279
|
|
|
286
280
|
@staticmethod
|
|
287
281
|
def __postgres_schema() -> dict:
|
|
288
|
-
|
|
289
|
-
_valid_postgres_plans = Or(
|
|
290
|
-
"tiny",
|
|
291
|
-
"small",
|
|
292
|
-
"small-ha",
|
|
293
|
-
"small-high-io",
|
|
294
|
-
"medium",
|
|
295
|
-
"medium-ha",
|
|
296
|
-
"medium-high-io",
|
|
297
|
-
"large",
|
|
298
|
-
"large-ha",
|
|
299
|
-
"large-high-io",
|
|
300
|
-
"x-large",
|
|
301
|
-
"x-large-ha",
|
|
302
|
-
"x-large-high-io",
|
|
303
|
-
"2x-large",
|
|
304
|
-
"2x-large-ha",
|
|
305
|
-
"2x-large-high-io",
|
|
306
|
-
"4x-large",
|
|
307
|
-
"4x-large-ha",
|
|
308
|
-
"4x-large-high-io",
|
|
309
|
-
)
|
|
282
|
+
_valid_postgres_plans = Or(*plan_manager.get_plan_names("postgres"))
|
|
310
283
|
|
|
311
284
|
# TODO: DBTP-1943: Move to Postgres provider?
|
|
312
285
|
_valid_postgres_storage_types = Or("gp2", "gp3", "io1", "io2")
|
|
@@ -361,21 +334,7 @@ class PlatformConfigSchema:
|
|
|
361
334
|
|
|
362
335
|
@staticmethod
|
|
363
336
|
def __redis_schema() -> dict:
|
|
364
|
-
|
|
365
|
-
_valid_redis_plans = Or(
|
|
366
|
-
"micro",
|
|
367
|
-
"micro-ha",
|
|
368
|
-
"tiny",
|
|
369
|
-
"tiny-ha",
|
|
370
|
-
"small",
|
|
371
|
-
"small-ha",
|
|
372
|
-
"medium",
|
|
373
|
-
"medium-ha",
|
|
374
|
-
"large",
|
|
375
|
-
"large-ha",
|
|
376
|
-
"x-large",
|
|
377
|
-
"x-large-ha",
|
|
378
|
-
)
|
|
337
|
+
_valid_redis_plans = Or(*plan_manager.get_plan_names("redis"))
|
|
379
338
|
|
|
380
339
|
return {
|
|
381
340
|
"type": "redis",
|
|
@@ -12,16 +12,34 @@ class CreateTaskTimeoutException(AWSException):
|
|
|
12
12
|
)
|
|
13
13
|
|
|
14
14
|
|
|
15
|
+
IMAGE_NOT_FOUND_TEMPLATE = """An image labelled "{image_ref}" could not be found in your image repository. Try the `platform-helper codebase build` command first."""
|
|
16
|
+
|
|
17
|
+
|
|
15
18
|
class ImageNotFoundException(AWSException):
|
|
16
19
|
def __init__(self, image_ref: str):
|
|
20
|
+
super().__init__(IMAGE_NOT_FOUND_TEMPLATE.format(image_ref=image_ref))
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
MULTIPLE_IMAGES_FOUND_TEMPLATE = (
|
|
24
|
+
'Image reference "{image_ref}" is matched by the following images: {matching_images}'
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class MultipleImagesFoundException(AWSException):
|
|
29
|
+
def __init__(self, image_ref: str, matching_images: list[str]):
|
|
17
30
|
super().__init__(
|
|
18
|
-
|
|
31
|
+
MULTIPLE_IMAGES_FOUND_TEMPLATE.format(
|
|
32
|
+
image_ref=image_ref, matching_images=", ".join(sorted(matching_images))
|
|
33
|
+
)
|
|
19
34
|
)
|
|
20
35
|
|
|
21
36
|
|
|
37
|
+
REPOSITORY_NOT_FOUND_TEMPLATE = """The ECR repository "{repository}" could not be found."""
|
|
38
|
+
|
|
39
|
+
|
|
22
40
|
class RepositoryNotFoundException(AWSException):
|
|
23
41
|
def __init__(self, repository: str):
|
|
24
|
-
super().__init__(
|
|
42
|
+
super().__init__(REPOSITORY_NOT_FOUND_TEMPLATE.format(repository=repository))
|
|
25
43
|
|
|
26
44
|
|
|
27
45
|
class LogGroupNotFoundException(AWSException):
|