dbt-platform-helper 15.10.0__py3-none-any.whl → 15.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbt-platform-helper might be problematic. Click here for more details.
- dbt_platform_helper/COMMANDS.md +0 -91
- dbt_platform_helper/commands/internal.py +114 -0
- dbt_platform_helper/constants.py +17 -0
- dbt_platform_helper/domain/conduit.py +13 -5
- dbt_platform_helper/domain/config.py +30 -1
- dbt_platform_helper/domain/maintenance_page.py +10 -8
- dbt_platform_helper/domain/service.py +274 -66
- dbt_platform_helper/domain/update_alb_rules.py +346 -0
- dbt_platform_helper/entities/platform_config_schema.py +0 -3
- dbt_platform_helper/entities/service.py +139 -13
- dbt_platform_helper/providers/aws/exceptions.py +5 -0
- dbt_platform_helper/providers/aws/sso_auth.py +14 -0
- dbt_platform_helper/providers/config.py +0 -11
- dbt_platform_helper/providers/ecs.py +104 -11
- dbt_platform_helper/providers/load_balancers.py +119 -14
- dbt_platform_helper/providers/logs.py +57 -0
- dbt_platform_helper/providers/s3.py +21 -0
- dbt_platform_helper/providers/terraform_manifest.py +3 -5
- dbt_platform_helper/providers/yaml_file.py +13 -5
- dbt_platform_helper/utils/application.py +66 -16
- {dbt_platform_helper-15.10.0.dist-info → dbt_platform_helper-15.12.0.dist-info}/METADATA +1 -1
- {dbt_platform_helper-15.10.0.dist-info → dbt_platform_helper-15.12.0.dist-info}/RECORD +26 -24
- platform_helper.py +0 -2
- dbt_platform_helper/commands/service.py +0 -53
- {dbt_platform_helper-15.10.0.dist-info → dbt_platform_helper-15.12.0.dist-info}/WHEEL +0 -0
- {dbt_platform_helper-15.10.0.dist-info → dbt_platform_helper-15.12.0.dist-info}/entry_points.txt +0 -0
- {dbt_platform_helper-15.10.0.dist-info → dbt_platform_helper-15.12.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,9 +1,12 @@
|
|
|
1
|
+
import json
|
|
1
2
|
import os
|
|
3
|
+
import time
|
|
2
4
|
from collections import OrderedDict
|
|
3
5
|
from copy import deepcopy
|
|
4
6
|
from datetime import datetime
|
|
5
7
|
from importlib.metadata import version
|
|
6
8
|
from pathlib import Path
|
|
9
|
+
from typing import Any
|
|
7
10
|
|
|
8
11
|
from dbt_platform_helper.constants import IMAGE_TAG_ENV_VAR
|
|
9
12
|
from dbt_platform_helper.constants import PLATFORM_HELPER_PACKAGE_NAME
|
|
@@ -19,14 +22,16 @@ from dbt_platform_helper.domain.terraform_environment import (
|
|
|
19
22
|
)
|
|
20
23
|
from dbt_platform_helper.entities.service import ServiceConfig
|
|
21
24
|
from dbt_platform_helper.platform_exception import PlatformException
|
|
22
|
-
from dbt_platform_helper.providers.config import ConfigLoader
|
|
23
25
|
from dbt_platform_helper.providers.config import ConfigProvider
|
|
24
26
|
from dbt_platform_helper.providers.config_validator import ConfigValidator
|
|
27
|
+
from dbt_platform_helper.providers.ecs import ECS
|
|
25
28
|
from dbt_platform_helper.providers.environment_variable import (
|
|
26
29
|
EnvironmentVariableProvider,
|
|
27
30
|
)
|
|
28
31
|
from dbt_platform_helper.providers.files import FileProvider
|
|
29
32
|
from dbt_platform_helper.providers.io import ClickIOProvider
|
|
33
|
+
from dbt_platform_helper.providers.logs import LogsProvider
|
|
34
|
+
from dbt_platform_helper.providers.s3 import S3Provider
|
|
30
35
|
from dbt_platform_helper.providers.terraform_manifest import TerraformManifestProvider
|
|
31
36
|
from dbt_platform_helper.providers.version import InstalledVersionProvider
|
|
32
37
|
from dbt_platform_helper.providers.yaml_file import YamlFileProvider
|
|
@@ -34,53 +39,51 @@ from dbt_platform_helper.utils.application import load_application
|
|
|
34
39
|
from dbt_platform_helper.utils.deep_merge import deep_merge
|
|
35
40
|
|
|
36
41
|
SERVICE_TYPES = ["Load Balanced Web Service", "Backend Service"]
|
|
37
|
-
|
|
42
|
+
DEPLOYMENT_TIMEOUT_SECONDS = 600
|
|
43
|
+
POLL_INTERVAL_SECONDS = 5
|
|
38
44
|
|
|
39
45
|
# TODO add schema version to service config
|
|
46
|
+
|
|
47
|
+
|
|
40
48
|
class ServiceManager:
|
|
41
49
|
def __init__(
|
|
42
50
|
self,
|
|
43
51
|
config_provider=ConfigProvider(ConfigValidator()),
|
|
44
|
-
loader: ConfigLoader = ConfigLoader(),
|
|
45
52
|
io: ClickIOProvider = ClickIOProvider(),
|
|
46
53
|
file_provider=YamlFileProvider,
|
|
47
|
-
environment_variable_provider: EnvironmentVariableProvider = None,
|
|
48
54
|
manifest_provider: TerraformManifestProvider = None,
|
|
49
55
|
platform_helper_version_override: str = None,
|
|
50
56
|
load_application=load_application,
|
|
51
57
|
installed_version_provider: InstalledVersionProvider = InstalledVersionProvider(),
|
|
58
|
+
ecs_provider: ECS = None,
|
|
59
|
+
s3_provider: S3Provider = None,
|
|
60
|
+
logs_provider: LogsProvider = None,
|
|
52
61
|
):
|
|
53
62
|
|
|
54
63
|
self.file_provider = file_provider
|
|
55
64
|
self.config_provider = config_provider
|
|
56
|
-
self.loader = loader
|
|
57
65
|
self.io = io
|
|
58
|
-
self.environment_variable_provider = (
|
|
59
|
-
environment_variable_provider or EnvironmentVariableProvider()
|
|
60
|
-
)
|
|
61
66
|
self.manifest_provider = manifest_provider or TerraformManifestProvider()
|
|
62
67
|
self.platform_helper_version_override = (
|
|
63
68
|
platform_helper_version_override
|
|
64
|
-
or
|
|
69
|
+
or EnvironmentVariableProvider.get(PLATFORM_HELPER_VERSION_OVERRIDE_KEY)
|
|
65
70
|
)
|
|
66
71
|
self.load_application = load_application
|
|
67
72
|
self.installed_version_provider = installed_version_provider
|
|
73
|
+
self.ecs_provider = ecs_provider
|
|
74
|
+
self.s3_provider = s3_provider
|
|
75
|
+
self.logs_provider = logs_provider
|
|
68
76
|
|
|
69
|
-
def generate(self,
|
|
77
|
+
def generate(self, environment: str, services: list[str]):
|
|
70
78
|
|
|
71
79
|
config = self.config_provider.get_enriched_config()
|
|
72
80
|
application_name = config.get("application", "")
|
|
73
81
|
application = self.load_application(app=application_name)
|
|
74
82
|
|
|
75
|
-
if not environments:
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
for environment in environments:
|
|
80
|
-
if environment not in application.environments:
|
|
81
|
-
raise EnvironmentNotFoundException(
|
|
82
|
-
f"cannot generate terraform for environment {environment}. It does not exist in your configuration"
|
|
83
|
-
)
|
|
83
|
+
if environment not in application.environments:
|
|
84
|
+
raise EnvironmentNotFoundException(
|
|
85
|
+
f"Cannot generate Terraform for environment '{environment}'. It does not exist in your configuration."
|
|
86
|
+
)
|
|
84
87
|
|
|
85
88
|
if not services:
|
|
86
89
|
try:
|
|
@@ -96,70 +99,71 @@ class ServiceManager:
|
|
|
96
99
|
)
|
|
97
100
|
except Exception as e:
|
|
98
101
|
self.io.abort_with_error(f"Failed extracting services with exception, {e}")
|
|
102
|
+
|
|
99
103
|
service_models = []
|
|
100
104
|
for service in services:
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
f"{SERVICE_DIRECTORY}/{service}/{SERVICE_CONFIG_FILE}",
|
|
104
|
-
ServiceConfig,
|
|
105
|
-
)
|
|
105
|
+
file_content = self.file_provider.load(
|
|
106
|
+
f"{SERVICE_DIRECTORY}/{service}/{SERVICE_CONFIG_FILE}"
|
|
106
107
|
)
|
|
107
108
|
|
|
109
|
+
file_content = YamlFileProvider.find_and_replace(
|
|
110
|
+
config=file_content,
|
|
111
|
+
strings=[
|
|
112
|
+
"${PLATFORM_APPLICATION_NAME}",
|
|
113
|
+
"${PLATFORM_ENVIRONMENT_NAME}",
|
|
114
|
+
],
|
|
115
|
+
replacements=[application.name, environment],
|
|
116
|
+
)
|
|
117
|
+
service_models.append(ServiceConfig(**file_content))
|
|
118
|
+
|
|
108
119
|
platform_helper_version_for_template: str = (
|
|
109
120
|
self.platform_helper_version_override
|
|
110
121
|
or config.get("default_versions", {}).get("platform-helper")
|
|
111
122
|
)
|
|
112
123
|
|
|
113
|
-
source_type =
|
|
124
|
+
source_type = EnvironmentVariableProvider.get(TERRAFORM_MODULE_SOURCE_TYPE_ENV_VAR)
|
|
114
125
|
|
|
115
126
|
if source_type == "LOCAL":
|
|
116
127
|
module_source_override = ServiceConfig.local_terraform_source
|
|
117
128
|
elif source_type == "OVERRIDE":
|
|
118
|
-
module_source_override =
|
|
129
|
+
module_source_override = EnvironmentVariableProvider.get(
|
|
119
130
|
TERRAFORM_ECS_SERVICE_MODULE_SOURCE_OVERRIDE_ENV_VAR
|
|
120
131
|
)
|
|
121
132
|
else:
|
|
122
133
|
module_source_override = None
|
|
123
134
|
|
|
124
|
-
image_tag = image_tag_flag or self.environment_variable_provider.get(IMAGE_TAG_ENV_VAR)
|
|
125
|
-
if not image_tag:
|
|
126
|
-
raise PlatformException(
|
|
127
|
-
f"An image tag must be provided to deploy a service. This can be set by the $IMAGE_TAG environment variable, or the --image-tag flag."
|
|
128
|
-
)
|
|
129
|
-
|
|
130
135
|
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
131
136
|
|
|
132
137
|
for service in service_models:
|
|
133
138
|
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
139
|
+
model_dump = service.model_dump(
|
|
140
|
+
exclude_none=True, by_alias=True
|
|
141
|
+
) # Use by_alias=True so that the Cooldown field 'in_' is written as 'in' in the output
|
|
142
|
+
env_overrides = model_dump.get("environments", {}).get(environment)
|
|
143
|
+
if env_overrides:
|
|
144
|
+
merged_config = deep_merge(model_dump, env_overrides)
|
|
145
|
+
else:
|
|
146
|
+
merged_config = model_dump.copy()
|
|
147
|
+
merged_config.pop("environments", None)
|
|
143
148
|
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
149
|
+
output_path = Path(
|
|
150
|
+
f"terraform/{SERVICE_DIRECTORY}/{environment}/{service.name}/{SERVICE_CONFIG_FILE}"
|
|
151
|
+
)
|
|
152
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
148
153
|
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
+
self.file_provider.write(
|
|
155
|
+
str(output_path),
|
|
156
|
+
merged_config,
|
|
157
|
+
f"# WARNING: This is an autogenerated file, not for manual editing.\n# Generated by platform-helper {version('dbt-platform-helper')} / {timestamp}.\n",
|
|
158
|
+
)
|
|
154
159
|
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
)
|
|
160
|
+
self.manifest_provider.generate_service_config(
|
|
161
|
+
service,
|
|
162
|
+
environment,
|
|
163
|
+
platform_helper_version_for_template,
|
|
164
|
+
config,
|
|
165
|
+
module_source_override,
|
|
166
|
+
)
|
|
163
167
|
|
|
164
168
|
def migrate_copilot_manifests(self) -> None:
|
|
165
169
|
service_directory = Path("services/")
|
|
@@ -180,20 +184,35 @@ class ServiceManager:
|
|
|
180
184
|
if "alb" in env_config["http"]:
|
|
181
185
|
del env_config["http"]["alb"]
|
|
182
186
|
|
|
183
|
-
|
|
184
|
-
service_manifest,
|
|
185
|
-
|
|
186
|
-
"${PLATFORM_APPLICATION_NAME}",
|
|
187
|
-
)
|
|
187
|
+
if "entrypoint" in service_manifest:
|
|
188
|
+
if isinstance(service_manifest["entrypoint"], str):
|
|
189
|
+
service_manifest["entrypoint"] = [service_manifest["entrypoint"]]
|
|
188
190
|
|
|
189
191
|
service_manifest = self.file_provider.find_and_replace(
|
|
190
|
-
service_manifest,
|
|
191
|
-
"${COPILOT_ENVIRONMENT_NAME}",
|
|
192
|
-
"${PLATFORM_ENVIRONMENT_NAME}",
|
|
192
|
+
config=service_manifest,
|
|
193
|
+
strings=["${COPILOT_APPLICATION_NAME}", "${COPILOT_ENVIRONMENT_NAME}"],
|
|
194
|
+
replacements=["${PLATFORM_APPLICATION_NAME}", "${PLATFORM_ENVIRONMENT_NAME}"],
|
|
193
195
|
)
|
|
194
196
|
|
|
195
197
|
service_manifest = self.file_provider.remove_empty_keys(service_manifest)
|
|
196
198
|
|
|
199
|
+
if "sidecars" in service_manifest:
|
|
200
|
+
new_sidecars = {}
|
|
201
|
+
writable_directories = []
|
|
202
|
+
|
|
203
|
+
for sidecar_name, sidecar in service_manifest["sidecars"].items():
|
|
204
|
+
if "chown" not in sidecar.get("command", "") and "chmod" not in sidecar.get(
|
|
205
|
+
"command", ""
|
|
206
|
+
):
|
|
207
|
+
new_sidecars[sidecar_name] = sidecar
|
|
208
|
+
if "chown" in sidecar.get("command", "") and "mount_points" in sidecar:
|
|
209
|
+
for mountpoint in sidecar["mount_points"]:
|
|
210
|
+
writable_directories.append(mountpoint["path"])
|
|
211
|
+
|
|
212
|
+
service_manifest["sidecars"] = new_sidecars
|
|
213
|
+
if "storage" in service_manifest:
|
|
214
|
+
service_manifest["storage"]["writable_directories"] = writable_directories
|
|
215
|
+
|
|
197
216
|
service_path = service_directory / service_manifest["name"]
|
|
198
217
|
|
|
199
218
|
self.io.info(
|
|
@@ -211,3 +230,192 @@ class ServiceManager:
|
|
|
211
230
|
self.file_provider.write(
|
|
212
231
|
f"{service_path}/service-config.yml", dict(service_manifest), message
|
|
213
232
|
)
|
|
233
|
+
|
|
234
|
+
def deploy(
|
|
235
|
+
self,
|
|
236
|
+
service: str,
|
|
237
|
+
environment: str,
|
|
238
|
+
application: str,
|
|
239
|
+
account_id: str,
|
|
240
|
+
image_tag: str = None,
|
|
241
|
+
):
|
|
242
|
+
"""Register a new ECS task definition revision, update the ECS service
|
|
243
|
+
with it, output a Cloudwatch logs URL, and wait until deployment is
|
|
244
|
+
complete."""
|
|
245
|
+
|
|
246
|
+
s3_response = self.s3_provider.get_object(
|
|
247
|
+
bucket_name=f"ecs-task-definitions-{application}-{environment}",
|
|
248
|
+
object_key=f"{application}/{environment}/{service}.json",
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
task_definition = json.loads(s3_response)
|
|
252
|
+
|
|
253
|
+
image_tag = image_tag or EnvironmentVariableProvider.get(IMAGE_TAG_ENV_VAR)
|
|
254
|
+
|
|
255
|
+
task_def_arn = self.ecs_provider.register_task_definition(
|
|
256
|
+
service=service,
|
|
257
|
+
image_tag=image_tag,
|
|
258
|
+
task_definition=task_definition,
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
self.io.info(f"Task definition successfully registered with ARN '{task_def_arn}'.\n")
|
|
262
|
+
|
|
263
|
+
service_response = self.ecs_provider.update_service(
|
|
264
|
+
service=service,
|
|
265
|
+
task_def_arn=task_def_arn,
|
|
266
|
+
environment=environment,
|
|
267
|
+
application=application,
|
|
268
|
+
)
|
|
269
|
+
|
|
270
|
+
self.io.info(f"Successfully updated ECS service '{service_response['serviceName']}'.\n")
|
|
271
|
+
|
|
272
|
+
primary_deployment_id = self._get_primary_deployment_id(service_response=service_response)
|
|
273
|
+
self.io.info(f"New ECS Deployment with ID '{primary_deployment_id}' has been triggered.\n")
|
|
274
|
+
|
|
275
|
+
expected_count = service_response.get("desiredCount", 1)
|
|
276
|
+
task_ids = self._fetch_ecs_task_ids(
|
|
277
|
+
application=application,
|
|
278
|
+
environment=environment,
|
|
279
|
+
deployment_id=primary_deployment_id,
|
|
280
|
+
expected_count=expected_count,
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
self.io.info(
|
|
284
|
+
f"Detected {len(task_ids)} new ECS task(s) with the following ID(s) {task_ids}.\n"
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
container_names = self.ecs_provider.get_container_names_from_ecs_tasks(
|
|
288
|
+
cluster_name=f"{application}-{environment}-cluster",
|
|
289
|
+
task_ids=task_ids,
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
log_streams = self._build_log_stream_names(
|
|
293
|
+
task_ids=task_ids, container_names=container_names, stream_prefix="platform"
|
|
294
|
+
)
|
|
295
|
+
|
|
296
|
+
log_group = f"/platform/ecs/service/{application}/{environment}/{service}"
|
|
297
|
+
self.logs_provider.check_log_streams_present(
|
|
298
|
+
log_group=log_group, expected_log_streams=log_streams
|
|
299
|
+
)
|
|
300
|
+
|
|
301
|
+
cloudwatch_url = self._build_cloudwatch_live_tail_url(
|
|
302
|
+
account_id=account_id, log_group=log_group, log_streams=log_streams
|
|
303
|
+
)
|
|
304
|
+
self.io.info(f"View real-time deployment logs in the AWS Console: \n{cloudwatch_url}\n")
|
|
305
|
+
|
|
306
|
+
self._monitor_ecs_deployment(
|
|
307
|
+
application=application,
|
|
308
|
+
environment=environment,
|
|
309
|
+
service=service,
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
@staticmethod
|
|
313
|
+
def _build_cloudwatch_live_tail_url(
|
|
314
|
+
account_id: str, log_group: str, log_streams: list[str]
|
|
315
|
+
) -> str:
|
|
316
|
+
"""Build CloudWatch live tail URL with log group and log streams pre-
|
|
317
|
+
populated in Rison format."""
|
|
318
|
+
|
|
319
|
+
log_group_rison = log_group.replace("/", "*2f")
|
|
320
|
+
|
|
321
|
+
delimiter = "~'"
|
|
322
|
+
log_streams_rison = ""
|
|
323
|
+
for stream in log_streams:
|
|
324
|
+
stream_rison = stream.replace("/", "*2f")
|
|
325
|
+
log_streams_rison = log_streams_rison + f"{delimiter}{stream_rison}"
|
|
326
|
+
|
|
327
|
+
base = "https://eu-west-2.console.aws.amazon.com/cloudwatch/home?region=eu-west-2#logsV2:live-tail"
|
|
328
|
+
log_group_fragment = f"$3FlogGroupArns$3D~(~'arn*3aaws*3alogs*3aeu-west-2*3a{account_id}*3alog-group*3a{log_group_rison}*3a*2a)"
|
|
329
|
+
log_streams_fragment = f"$26logStreamNames$3D~({log_streams_rison})"
|
|
330
|
+
|
|
331
|
+
return base + log_group_fragment + log_streams_fragment
|
|
332
|
+
|
|
333
|
+
@staticmethod
|
|
334
|
+
def _build_log_stream_names(
|
|
335
|
+
task_ids: list[str], container_names: list[str], stream_prefix: str
|
|
336
|
+
) -> list[str]:
|
|
337
|
+
"""Manually build names of the log stream that will get created."""
|
|
338
|
+
|
|
339
|
+
log_streams = []
|
|
340
|
+
for id in task_ids:
|
|
341
|
+
for name in container_names:
|
|
342
|
+
if not name.startswith(
|
|
343
|
+
"ecs-service-connect"
|
|
344
|
+
): # ECS Service Connect container logs are noisy and not relevant in most cases
|
|
345
|
+
log_streams.append(f"{stream_prefix}/{name}/{id}")
|
|
346
|
+
|
|
347
|
+
return log_streams
|
|
348
|
+
|
|
349
|
+
@staticmethod
|
|
350
|
+
def _get_primary_deployment_id(service_response: dict[str, Any]):
|
|
351
|
+
for dep in service_response["deployments"]:
|
|
352
|
+
if dep["status"] == "PRIMARY":
|
|
353
|
+
return dep["id"]
|
|
354
|
+
raise PlatformException(
|
|
355
|
+
f"\nUnable to find primary ECS deployment for service '{service_response['serviceName']}'\n"
|
|
356
|
+
)
|
|
357
|
+
|
|
358
|
+
def _fetch_ecs_task_ids(
|
|
359
|
+
self, application: str, environment: str, deployment_id: str, expected_count: int
|
|
360
|
+
) -> list[str]:
|
|
361
|
+
"""Return ECS task ID(s) of tasks started by the PRIMARY ECS
|
|
362
|
+
deployment."""
|
|
363
|
+
|
|
364
|
+
timeout_seconds = DEPLOYMENT_TIMEOUT_SECONDS
|
|
365
|
+
deadline = time.monotonic() + timeout_seconds # 10 minute deadline before timing out
|
|
366
|
+
|
|
367
|
+
self.io.info(f"Waiting for the new ECS task(s) to spin up.\n")
|
|
368
|
+
|
|
369
|
+
while time.monotonic() < deadline:
|
|
370
|
+
task_arns = self.ecs_provider.get_ecs_task_arns(
|
|
371
|
+
cluster=f"{application}-{environment}-cluster",
|
|
372
|
+
started_by=deployment_id,
|
|
373
|
+
desired_status="RUNNING",
|
|
374
|
+
)
|
|
375
|
+
|
|
376
|
+
if len(task_arns) >= expected_count:
|
|
377
|
+
break
|
|
378
|
+
|
|
379
|
+
time.sleep(POLL_INTERVAL_SECONDS)
|
|
380
|
+
|
|
381
|
+
if len(task_arns) < expected_count:
|
|
382
|
+
raise PlatformException(
|
|
383
|
+
f"Timed out waiting for {expected_count} RUNNING ECS task(s) to spin up after {timeout_seconds}s. Got {len(task_arns)} instead."
|
|
384
|
+
)
|
|
385
|
+
|
|
386
|
+
task_ids = []
|
|
387
|
+
for arn in task_arns:
|
|
388
|
+
task_ids.append(arn.rsplit("/", 1)[-1])
|
|
389
|
+
return task_ids
|
|
390
|
+
|
|
391
|
+
def _monitor_ecs_deployment(self, application: str, environment: str, service: str) -> bool:
|
|
392
|
+
"""Loop until ECS rollout state is SUCCESSFUL or a fail status or else
|
|
393
|
+
times out."""
|
|
394
|
+
|
|
395
|
+
cluster_name = f"{application}-{environment}-cluster"
|
|
396
|
+
ecs_service_name = f"{application}-{environment}-{service}"
|
|
397
|
+
start_time = time.time()
|
|
398
|
+
timeout_seconds = DEPLOYMENT_TIMEOUT_SECONDS
|
|
399
|
+
deadline = time.monotonic() + timeout_seconds # 10 minute deadline before timing out
|
|
400
|
+
|
|
401
|
+
while time.monotonic() < deadline:
|
|
402
|
+
try:
|
|
403
|
+
state, reason = self.ecs_provider.get_service_rollout_state(
|
|
404
|
+
cluster_name=cluster_name, service_name=ecs_service_name, start_time=start_time
|
|
405
|
+
)
|
|
406
|
+
except Exception as e:
|
|
407
|
+
raise PlatformException(f"Failed to fetch ECS rollout state: {e}")
|
|
408
|
+
|
|
409
|
+
if state == "SUCCESSFUL":
|
|
410
|
+
self.io.info("\nECS deployment complete!")
|
|
411
|
+
return True
|
|
412
|
+
if state in ["STOPPED", "ROLLBACK_SUCCESSFUL", "ROLLBACK_FAILED"]:
|
|
413
|
+
raise PlatformException(f"\nECS deployment failed: {reason or 'unknown reason'}")
|
|
414
|
+
|
|
415
|
+
elapsed_time = int(time.time() - start_time)
|
|
416
|
+
self.io.info(f"Deployment in progress {elapsed_time}s")
|
|
417
|
+
time.sleep(POLL_INTERVAL_SECONDS)
|
|
418
|
+
|
|
419
|
+
raise PlatformException(
|
|
420
|
+
f"Timed out after {timeout_seconds}s waiting for '{ecs_service_name}' to stabilise."
|
|
421
|
+
)
|