dbt-platform-helper 15.10.0__py3-none-any.whl → 15.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbt-platform-helper might be problematic. Click here for more details.

Files changed (27) hide show
  1. dbt_platform_helper/COMMANDS.md +0 -91
  2. dbt_platform_helper/commands/internal.py +114 -0
  3. dbt_platform_helper/constants.py +17 -0
  4. dbt_platform_helper/domain/conduit.py +13 -5
  5. dbt_platform_helper/domain/config.py +30 -1
  6. dbt_platform_helper/domain/maintenance_page.py +10 -8
  7. dbt_platform_helper/domain/service.py +274 -66
  8. dbt_platform_helper/domain/update_alb_rules.py +346 -0
  9. dbt_platform_helper/entities/platform_config_schema.py +0 -3
  10. dbt_platform_helper/entities/service.py +139 -13
  11. dbt_platform_helper/providers/aws/exceptions.py +5 -0
  12. dbt_platform_helper/providers/aws/sso_auth.py +14 -0
  13. dbt_platform_helper/providers/config.py +0 -11
  14. dbt_platform_helper/providers/ecs.py +104 -11
  15. dbt_platform_helper/providers/load_balancers.py +119 -14
  16. dbt_platform_helper/providers/logs.py +57 -0
  17. dbt_platform_helper/providers/s3.py +21 -0
  18. dbt_platform_helper/providers/terraform_manifest.py +3 -5
  19. dbt_platform_helper/providers/yaml_file.py +13 -5
  20. dbt_platform_helper/utils/application.py +66 -16
  21. {dbt_platform_helper-15.10.0.dist-info → dbt_platform_helper-15.12.0.dist-info}/METADATA +1 -1
  22. {dbt_platform_helper-15.10.0.dist-info → dbt_platform_helper-15.12.0.dist-info}/RECORD +26 -24
  23. platform_helper.py +0 -2
  24. dbt_platform_helper/commands/service.py +0 -53
  25. {dbt_platform_helper-15.10.0.dist-info → dbt_platform_helper-15.12.0.dist-info}/WHEEL +0 -0
  26. {dbt_platform_helper-15.10.0.dist-info → dbt_platform_helper-15.12.0.dist-info}/entry_points.txt +0 -0
  27. {dbt_platform_helper-15.10.0.dist-info → dbt_platform_helper-15.12.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,7 +1,10 @@
1
1
  import random
2
2
  import string
3
3
  import subprocess
4
- from typing import List
4
+ from typing import Any
5
+ from typing import Optional
6
+
7
+ from botocore.exceptions import ClientError
5
8
 
6
9
  from dbt_platform_helper.platform_exception import PlatformException
7
10
  from dbt_platform_helper.platform_exception import ValidationException
@@ -39,7 +42,7 @@ class ECS:
39
42
  container_name: str,
40
43
  task_def_arn: str,
41
44
  vpc_config: Vpc,
42
- env_vars: List[dict] = None,
45
+ env_vars: list[dict] = None,
43
46
  ):
44
47
  container_override = {"name": container_name}
45
48
  if env_vars:
@@ -107,13 +110,31 @@ class ECS:
107
110
  random_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=12))
108
111
  return f"conduit-{self.application_name}-{self.env}-{addon_name}-{random_id}"
109
112
 
110
- def get_ecs_task_arns(self, cluster_arn: str, task_def_family: str):
111
- """Gets the ECS task ARNs for a given task name and cluster ARN."""
112
- tasks = self.ecs_client.list_tasks(
113
- cluster=cluster_arn,
114
- desiredStatus="RUNNING",
115
- family=task_def_family,
116
- )
113
+ def get_ecs_task_arns(
114
+ self,
115
+ cluster: str,
116
+ max_results: int = 100,
117
+ desired_status: str = "RUNNING",
118
+ service_name: Optional[str] = None,
119
+ started_by: Optional[str] = None,
120
+ task_def_family: Optional[str] = None,
121
+ ) -> list[str]:
122
+ """Returns the ECS task ARNs based on the parameters provided."""
123
+
124
+ params = {
125
+ "cluster": cluster,
126
+ "maxResults": max_results,
127
+ "desiredStatus": desired_status,
128
+ }
129
+
130
+ if service_name:
131
+ params["serviceName"] = service_name
132
+ if started_by:
133
+ params["startedBy"] = started_by
134
+ if task_def_family:
135
+ params["family"] = task_def_family
136
+
137
+ tasks = self.ecs_client.list_tasks(**params)
117
138
 
118
139
  if not tasks["taskArns"]:
119
140
  return []
@@ -137,7 +158,7 @@ class ECS:
137
158
  exceptions_to_catch=(ECSException,),
138
159
  message_on_false="ECS Agent Not running",
139
160
  )
140
- def ecs_exec_is_available(self, cluster_arn: str, task_arns: List[str]) -> bool:
161
+ def ecs_exec_is_available(self, cluster_arn: str, task_arns: list[str]) -> bool:
141
162
  """
142
163
  Checks if the ExecuteCommandAgent is running on the specified ECS task.
143
164
 
@@ -168,7 +189,79 @@ class ECS:
168
189
  message_on_false="ECS task did not register in time",
169
190
  )
170
191
  def wait_for_task_to_register(self, cluster_arn: str, task_family: str) -> list[str]:
171
- task_arns = self.get_ecs_task_arns(cluster_arn, task_family)
192
+ task_arns = self.get_ecs_task_arns(cluster=cluster_arn, task_def_family=task_family)
172
193
  if task_arns:
173
194
  return task_arns
174
195
  return False
196
+
197
+ def get_service_rollout_state(
198
+ self, cluster_name: str, service_name: str, start_time: float
199
+ ) -> tuple[Optional[str], Optional[str]]:
200
+ """
201
+ Returns status & statusReason for the deployment of an ECS service.
202
+
203
+ statusReason can be:
204
+ PENDING | SUCCESSFUL | STOPPED | STOP_REQUESTED |
205
+ IN_PROGRESS | ROLLBACK_REQUESTED | ROLLBACK_IN_PROGRESS |
206
+ ROLLBACK_SUCCESSFUL | ROLLBACK_FAILED
207
+ """
208
+ resp = self.ecs_client.list_service_deployments(
209
+ cluster=cluster_name, service=service_name, createdAt={"after": start_time - 180}
210
+ )
211
+ deployments = resp.get("serviceDeployments", [])
212
+
213
+ if not deployments:
214
+ return None, f"No deployments found for '{service_name}'"
215
+
216
+ return deployments[0].get("status"), deployments[0].get("statusReason")
217
+
218
+ def get_container_names_from_ecs_tasks(
219
+ self, cluster_name: str, task_ids: list[str]
220
+ ) -> list[str]:
221
+ """Retrieve container names from each ECS task provided."""
222
+
223
+ response = self.ecs_client.describe_tasks(cluster=cluster_name, tasks=task_ids)
224
+
225
+ names = []
226
+ for task in response.get("tasks", []):
227
+ for container in task.get("containers", []):
228
+ if container["name"] not in names:
229
+ names.append(container["name"])
230
+ return names
231
+
232
+ def register_task_definition(
233
+ self,
234
+ service: str,
235
+ task_definition: dict,
236
+ image_tag: Optional[str] = None,
237
+ ) -> str:
238
+ """Register a new task definition revision using provided model and
239
+ containerDefinitions."""
240
+
241
+ if image_tag:
242
+ for container in task_definition["containerDefinitions"]:
243
+ if container["name"] == service:
244
+ image_uri = container["image"].rsplit(":", 1)[0]
245
+ container["image"] = f"{image_uri}:{image_tag}"
246
+ break
247
+
248
+ try:
249
+ task_definition_response = self.ecs_client.register_task_definition(**task_definition)
250
+ return task_definition_response["taskDefinition"]["taskDefinitionArn"]
251
+ except ClientError as err:
252
+ raise PlatformException(f"Error registering task definition: {err}")
253
+
254
+ def update_service(
255
+ self, service: str, task_def_arn: str, environment: str, application: str
256
+ ) -> dict[str, Any]:
257
+ """Update an ECS service and return the response."""
258
+
259
+ try:
260
+ service_response = self.ecs_client.update_service(
261
+ cluster=f"{application}-{environment}-cluster",
262
+ service=f"{application}-{environment}-{service}",
263
+ taskDefinition=task_def_arn,
264
+ )
265
+ return service_response["service"]
266
+ except ClientError as err:
267
+ raise PlatformException(f"Error updating ECS service: {err}")
@@ -1,7 +1,14 @@
1
+ import json
2
+ from typing import Dict
3
+ from typing import List
4
+
1
5
  from boto3 import Session
2
6
 
7
+ from dbt_platform_helper.constants import MANAGED_BY_PLATFORM_TERRAFORM
8
+ from dbt_platform_helper.constants import ROUTED_TO_PLATFORM_MODES
3
9
  from dbt_platform_helper.platform_exception import PlatformException
4
10
  from dbt_platform_helper.providers.io import ClickIOProvider
11
+ from dbt_platform_helper.providers.parameter_store import ParameterStore
5
12
  from dbt_platform_helper.utils.aws import get_aws_session_or_abort
6
13
 
7
14
 
@@ -12,12 +19,24 @@ def normalise_to_cidr(ip: str):
12
19
  return f"{ip}/{SINGLE_IPV4_CIDR_PREFIX_LENGTH}"
13
20
 
14
21
 
22
+ class ALBDataNormaliser:
23
+
24
+ @staticmethod
25
+ def tags_to_dict(tags: List[Dict[str, str]]) -> Dict[str, str]:
26
+ return {tag.get("Key", ""): tag.get("Value", "") for tag in tags}
27
+
28
+ @staticmethod
29
+ def conditions_to_dict(conditions: List[Dict[str, List[str]]]) -> Dict[str, List[str]]:
30
+ return {condition.get("Field", ""): condition.get("Values", "") for condition in conditions}
31
+
32
+
15
33
  class LoadBalancerProvider:
16
34
 
17
35
  def __init__(self, session: Session = None, io: ClickIOProvider = ClickIOProvider()):
18
36
  self.session = session
19
37
  self.evlb_client = self._get_client("elbv2")
20
38
  self.rg_tagging_client = self._get_client("resourcegroupstaggingapi")
39
+ self.parameter_store_provider = ParameterStore(self._get_client("ssm"))
21
40
  self.io = io
22
41
 
23
42
  def _get_client(self, client: str):
@@ -26,21 +45,44 @@ class LoadBalancerProvider:
26
45
  return self.session.client(client)
27
46
 
28
47
  def find_target_group(self, app: str, env: str, svc: str) -> str:
48
+
49
+ # TODO once copilot is gone this is no longer needed
50
+ try:
51
+ result = self.parameter_store_provider.get_ssm_parameter_by_name(
52
+ f"/platform/applications/{app}/environments/{env}"
53
+ )["Value"]
54
+ env_config = json.loads(result)
55
+ service_deployment_mode = env_config["service_deployment_mode"]
56
+ except Exception:
57
+ service_deployment_mode = "copilot"
58
+
59
+ if service_deployment_mode in ROUTED_TO_PLATFORM_MODES:
60
+ application_key = "application"
61
+ environment_key = "environment"
62
+ service_key = "service"
63
+ else:
64
+ application_key = "copilot-application"
65
+ environment_key = "copilot-environment"
66
+ service_key = "copilot-service"
29
67
  target_group_arn = None
30
68
 
31
69
  paginator = self.rg_tagging_client.get_paginator("get_resources")
32
70
  page_iterator = paginator.paginate(
33
71
  TagFilters=[
34
72
  {
35
- "Key": "copilot-application",
73
+ "Key": application_key,
36
74
  "Values": [
37
75
  app,
38
76
  ],
39
- "Key": "copilot-environment",
77
+ },
78
+ {
79
+ "Key": environment_key,
40
80
  "Values": [
41
81
  env,
42
82
  ],
43
- "Key": "copilot-service",
83
+ },
84
+ {
85
+ "Key": service_key,
44
86
  "Values": [
45
87
  svc,
46
88
  ],
@@ -56,9 +98,9 @@ class LoadBalancerProvider:
56
98
  tags = {tag["Key"]: tag["Value"] for tag in resource["Tags"]}
57
99
 
58
100
  if (
59
- tags.get("copilot-service") == svc
60
- and tags.get("copilot-environment") == env
61
- and tags.get("copilot-application") == app
101
+ tags.get(service_key) == svc
102
+ and tags.get(environment_key) == env
103
+ and tags.get(application_key) == app
62
104
  ):
63
105
  target_group_arn = resource["ResourceARN"]
64
106
 
@@ -69,6 +111,29 @@ class LoadBalancerProvider:
69
111
 
70
112
  return target_group_arn
71
113
 
114
+ def get_target_groups(self, target_group_arns: List[str]) -> List[dict]:
115
+ tgs = []
116
+ paginator = self.evlb_client.get_paginator("describe_target_groups")
117
+ page_iterator = paginator.paginate(TargetGroupArns=target_group_arns)
118
+ for page in page_iterator:
119
+ tgs.extend(page["TargetGroups"])
120
+
121
+ return tgs
122
+
123
+ def get_target_groups_with_tags(
124
+ self, target_group_arns: List[str], normalise: bool = True
125
+ ) -> List[dict]:
126
+ target_groups = self.get_target_groups(target_group_arns)
127
+
128
+ tags = self.get_resources_tag_descriptions(target_groups, "TargetGroupArn")
129
+
130
+ tgs_with_tags = self.merge_in_tags_by_resource_arn(target_groups, tags, "TargetGroupArn")
131
+
132
+ if normalise:
133
+ for tg in tgs_with_tags:
134
+ tg["Tags"] = ALBDataNormaliser.tags_to_dict(tg["Tags"])
135
+ return tgs_with_tags
136
+
72
137
  def get_https_certificate_for_listener(self, listener_arn: str, env: str):
73
138
  certificates = []
74
139
  paginator = self.evlb_client.get_paginator("describe_listener_certificates")
@@ -87,7 +152,7 @@ class LoadBalancerProvider:
87
152
  listener_arn = self.get_https_listener_for_application(app, env)
88
153
  return self.get_https_certificate_for_listener(listener_arn, env)
89
154
 
90
- def get_listeners_for_load_balancer(self, load_balancer_arn):
155
+ def get_listeners_for_load_balancer(self, load_balancer_arn: str) -> List[dict]:
91
156
  listeners = []
92
157
  paginator = self.evlb_client.get_paginator("describe_listeners")
93
158
  page_iterator = paginator.paginate(LoadBalancerArn=load_balancer_arn)
@@ -98,6 +163,7 @@ class LoadBalancerProvider:
98
163
 
99
164
  def get_https_listener_for_application(self, app: str, env: str) -> str:
100
165
  load_balancer_arn = self.get_load_balancer_for_application(app, env)
166
+ self.io.debug(f"Load Balancer ARN: {load_balancer_arn}")
101
167
  listeners = self.get_listeners_for_load_balancer(load_balancer_arn)
102
168
 
103
169
  listener_arn = None
@@ -112,7 +178,7 @@ class LoadBalancerProvider:
112
178
 
113
179
  return listener_arn
114
180
 
115
- def get_load_balancers(self):
181
+ def get_load_balancers(self) -> List[dict]:
116
182
  load_balancers = []
117
183
  paginator = self.evlb_client.get_paginator("describe_load_balancers")
118
184
  page_iterator = paginator.paginate()
@@ -133,7 +199,11 @@ class LoadBalancerProvider:
133
199
  for lb in tag_descriptions:
134
200
  tags = {t["Key"]: t["Value"] for t in lb["Tags"]}
135
201
  # TODO: DBTP-1967: copilot hangover, creates coupling to specific tags could update to check application and environment
136
- if tags.get("copilot-application") == app and tags.get("copilot-environment") == env:
202
+ if (
203
+ tags.get("copilot-application") == app
204
+ and tags.get("copilot-environment") == env
205
+ and tags.get("managed-by", "") == MANAGED_BY_PLATFORM_TERRAFORM
206
+ ):
137
207
  return lb["ResourceArn"]
138
208
 
139
209
  raise LoadBalancerNotFoundException(app, env)
@@ -173,7 +243,37 @@ class LoadBalancerProvider:
173
243
  def get_rules_tag_descriptions_by_listener_arn(self, listener_arn: str) -> list:
174
244
  rules = self.get_listener_rules_by_listener_arn(listener_arn)
175
245
 
176
- return self.get_rules_tag_descriptions(rules)
246
+ return self.get_resources_tag_descriptions(rules)
247
+
248
+ def merge_in_tags_by_resource_arn(
249
+ self,
250
+ resources: List[dict],
251
+ tag_descriptions: List[dict],
252
+ resources_identifier: str = "RuleArn",
253
+ ):
254
+ tags_by_resource_arn = {
255
+ rule_tags.get("ResourceArn"): rule_tags for rule_tags in tag_descriptions if rule_tags
256
+ }
257
+ for resource in resources:
258
+ tags = tags_by_resource_arn[resource[resources_identifier]]
259
+ resource.update(tags)
260
+ return resources
261
+
262
+ def get_rules_with_tags_by_listener_arn(
263
+ self, listener_arn: str, normalise: bool = True
264
+ ) -> list:
265
+ rules = self.get_listener_rules_by_listener_arn(listener_arn)
266
+
267
+ tags = self.get_resources_tag_descriptions(rules)
268
+
269
+ rules_with_tags = self.merge_in_tags_by_resource_arn(rules, tags)
270
+
271
+ if normalise:
272
+ for rule in rules_with_tags:
273
+ rule["Conditions"] = ALBDataNormaliser.conditions_to_dict(rule["Conditions"])
274
+ rule["Tags"] = ALBDataNormaliser.tags_to_dict(rule["Tags"])
275
+
276
+ return rules_with_tags
177
277
 
178
278
  def get_listener_rules_by_listener_arn(self, listener_arn: str) -> list:
179
279
  rules = []
@@ -184,13 +284,15 @@ class LoadBalancerProvider:
184
284
 
185
285
  return rules
186
286
 
187
- def get_rules_tag_descriptions(self, rules: list) -> list:
287
+ def get_resources_tag_descriptions(
288
+ self, resources: list, resource_identifier: str = "RuleArn"
289
+ ) -> list:
188
290
  tag_descriptions = []
189
291
  chunk_size = 20
190
292
 
191
- for i in range(0, len(rules), chunk_size):
192
- chunk = rules[i : i + chunk_size]
193
- resource_arns = [r["RuleArn"] for r in chunk]
293
+ for i in range(0, len(resources), chunk_size):
294
+ chunk = resources[i : i + chunk_size]
295
+ resource_arns = [r[resource_identifier] for r in chunk]
194
296
  response = self.evlb_client.describe_tags(
195
297
  ResourceArns=resource_arns
196
298
  ) # describe_tags cannot be paginated - 04/04/2025
@@ -305,6 +407,9 @@ class LoadBalancerProvider:
305
407
 
306
408
  return deleted_rules
307
409
 
410
+ def delete_listener_rule_by_resource_arn(self, resource_arn: str) -> list:
411
+ return self.evlb_client.delete_rule(RuleArn=resource_arn)
412
+
308
413
 
309
414
  class LoadBalancerException(PlatformException):
310
415
  pass
@@ -0,0 +1,57 @@
1
+ import time
2
+
3
+ import boto3
4
+ from botocore.exceptions import ClientError
5
+
6
+ from dbt_platform_helper.platform_exception import PlatformException
7
+
8
+
9
+ class LogsProvider:
10
+
11
+ def __init__(self, client: boto3.client):
12
+ self.client = client
13
+
14
+ def check_log_streams_present(self, log_group: str, expected_log_streams: list[str]) -> bool:
15
+ """
16
+ Check whether the logs streams provided exist or not.
17
+
18
+ Retry for up to 5 minutes.
19
+ """
20
+
21
+ found_log_streams = set()
22
+ expected_log_streams = set(expected_log_streams)
23
+ timeout_seconds = 300
24
+ poll_interval_seconds = 2
25
+ deadline_seconds = time.monotonic() + timeout_seconds
26
+
27
+ while time.monotonic() < deadline_seconds:
28
+
29
+ remaining_log_streams = expected_log_streams - found_log_streams
30
+ if not remaining_log_streams:
31
+ return True
32
+
33
+ for log_stream in list(remaining_log_streams):
34
+ try:
35
+ response = self.client.describe_log_streams(
36
+ logGroupName=log_group, logStreamNamePrefix=log_stream, limit=1
37
+ )
38
+ except ClientError as e:
39
+ code = e.response.get("Error", {}).get("Code")
40
+ if code == "ResourceNotFoundException":
41
+ continue # Log stream not there yet, keep going
42
+ else:
43
+ raise PlatformException(
44
+ f"Failed to check if log stream '{log_stream}' exists due to an error {e}"
45
+ )
46
+
47
+ for ls in response.get("logStreams", []):
48
+ if ls.get("logStreamName") == log_stream:
49
+ found_log_streams.add(log_stream)
50
+
51
+ if expected_log_streams - found_log_streams:
52
+ time.sleep(poll_interval_seconds)
53
+
54
+ missing_log_streams = expected_log_streams - found_log_streams
55
+ raise PlatformException(
56
+ f"Timed out waiting for the following log streams to create: {missing_log_streams}"
57
+ )
@@ -0,0 +1,21 @@
1
+ import boto3
2
+ from botocore.exceptions import ClientError
3
+
4
+ from dbt_platform_helper.platform_exception import PlatformException
5
+
6
+
7
+ class S3Provider:
8
+
9
+ def __init__(self, client: boto3.client):
10
+ self.client = client
11
+
12
+ def get_object(self, bucket_name: str, object_key: str) -> str:
13
+ """Returns an object from an S3 bucket."""
14
+
15
+ try:
16
+ content = self.client.get_object(Bucket=bucket_name, Key=object_key)
17
+ return content["Body"].read().decode("utf-8")
18
+ except ClientError as e:
19
+ raise PlatformException(
20
+ f"Failed to get '{object_key}' from '{bucket_name}'. Error: {e}"
21
+ )
@@ -21,7 +21,6 @@ class TerraformManifestProvider:
21
21
  self,
22
22
  config_object,
23
23
  environment,
24
- image_tag,
25
24
  platform_helper_version: str,
26
25
  platform_config,
27
26
  module_source_override: str = None,
@@ -36,7 +35,7 @@ class TerraformManifestProvider:
36
35
  terraform = {}
37
36
  self._add_header(terraform)
38
37
 
39
- self._add_service_locals(terraform, environment, image_tag)
38
+ self._add_service_locals(terraform, environment)
40
39
 
41
40
  self._add_provider(terraform, account, deploy_to_account_id)
42
41
  self._add_backend(
@@ -47,15 +46,14 @@ class TerraformManifestProvider:
47
46
 
48
47
  self._write_terraform_json(terraform, service_dir)
49
48
 
50
- def _add_service_locals(self, terraform, environment, image_tag):
49
+ def _add_service_locals(self, terraform, environment):
51
50
  terraform["locals"] = {
52
51
  "environment": environment,
53
- "image_tag": image_tag,
54
52
  "platform_config": '${yamldecode(file("../../../../platform-config.yml"))}',
55
53
  "application": '${local.platform_config["application"]}',
56
54
  "environments": '${local.platform_config["environments"]}',
57
55
  "env_config": '${{for name, config in local.environments: name => merge(lookup(local.environments, "*", {}), config)}}',
58
- "service_config": '${yamldecode(templatefile("./service-config.yml", {PLATFORM_ENVIRONMENT_NAME = local.environment, IMAGE_TAG = local.image_tag}))}',
56
+ "service_config": '${yamldecode(file("./service-config.yml"))}',
59
57
  "raw_env_config": '${local.platform_config["environments"]}',
60
58
  "combined_env_config": '${{for name, config in local.raw_env_config: name => merge(lookup(local.raw_env_config, "*", {}), config)}}',
61
59
  "service_deployment_mode": '${lookup(local.combined_env_config[local.environment], "service-deployment-mode", "copilot")}',
@@ -98,18 +98,26 @@ class YamlFileProvider:
98
98
  return cleaned
99
99
 
100
100
  @staticmethod
101
- def find_and_replace(config, string: str, replacement: str):
101
+ def find_and_replace(config, strings: list, replacements: list):
102
+ if len(strings) != len(replacements):
103
+ raise ValueError("'strings' and 'replacements' must be the same length.")
104
+ if not isinstance(strings, list) or not isinstance(replacements, list):
105
+ raise ValueError("'strings' and 'replacements' must both be lists.")
102
106
  if isinstance(config, (dict, OrderedDict)):
103
107
  return {
104
- k: YamlFileProvider.find_and_replace(v, string, replacement)
108
+ k: YamlFileProvider.find_and_replace(v, strings, replacements)
105
109
  for k, v in config.items()
106
110
  }
107
111
  elif isinstance(config, list):
108
- return [YamlFileProvider.find_and_replace(item, string, replacement) for item in config]
112
+ return [
113
+ YamlFileProvider.find_and_replace(item, strings, replacements) for item in config
114
+ ]
109
115
  elif isinstance(config, str):
110
- return config.replace(string, replacement)
116
+ for s, r in zip(strings, replacements):
117
+ config = config.replace(s, r)
118
+ return config
111
119
  else:
112
- return replacement if config == string else config
120
+ return replacements if config == strings else config
113
121
 
114
122
 
115
123
  def account_number_representer(dumper, data):
@@ -74,6 +74,7 @@ def load_application(app=None, default_session=None, env=None) -> Application:
74
74
  nesting.
75
75
 
76
76
  e.g.
77
+ - /platform/applications/test/environments/my_env will match.
77
78
  - /copilot/applications/test/environments/my_env will match.
78
79
  - /copilot/applications/test/environments/my_env/addons will not match.
79
80
  """
@@ -84,20 +85,24 @@ def load_application(app=None, default_session=None, env=None) -> Application:
84
85
 
85
86
  environments_data = []
86
87
 
87
- # Try to load the new /platform SSM parameter if present
88
- platform_env_path = f"/platform/applications/{application.name}/environments"
89
- secrets = get_ssm_secrets(app, None, current_session, platform_env_path)
88
+ # Try to load all /platform SSM parameters that are present
89
+ env_params = get_ssm_secrets(
90
+ app=app,
91
+ env=None,
92
+ session=current_session,
93
+ path=f"/platform/applications/{application.name}/environments",
94
+ )
90
95
 
91
- if secrets:
92
- for name, value in secrets:
96
+ if env_params:
97
+ for name, value in env_params:
93
98
  try:
94
- data = json.loads(value)
99
+ param_data = json.loads(value)
95
100
  except json.JSONDecodeError:
96
101
  continue
97
102
 
98
- # New /platform SSM parameter contains data about all environments
99
- if "allEnvironments" in data:
100
- environments_data = data["allEnvironments"]
103
+ # Each /platform SSM parameter contains data about all the environments of an application
104
+ if "allEnvironments" in param_data:
105
+ environments_data = param_data["allEnvironments"]
101
106
  break # Only need one
102
107
  else:
103
108
  try:
@@ -106,19 +111,20 @@ def load_application(app=None, default_session=None, env=None) -> Application:
106
111
  Name=f"/copilot/applications/{application.name}",
107
112
  WithDecryption=False,
108
113
  )
109
- secrets = get_ssm_secrets(
114
+
115
+ # Legacy /copilot SSM parameters for each environment
116
+ env_params = get_ssm_secrets(
110
117
  app, None, current_session, f"/copilot/applications/{application.name}/environments"
111
118
  )
112
119
 
113
- for name, value in secrets:
120
+ for name, value in env_params:
114
121
  try:
115
- data = json.loads(value)
122
+ param_data = json.loads(value)
116
123
  except json.JSONDecodeError:
117
124
  continue
118
125
 
119
126
  if is_environment_key(name):
120
- # Legacy /copilot SSM parameter. An individual SSM param is present per environment - looping through all of them is needed to extract necessary data about each env.
121
- environments_data.append(data)
127
+ environments_data.append(param_data)
122
128
 
123
129
  except ssm_client.exceptions.ParameterNotFound:
124
130
  raise ApplicationNotFoundException(
@@ -130,6 +136,50 @@ def load_application(app=None, default_session=None, env=None) -> Application:
130
136
  for env in environments_data
131
137
  }
132
138
 
139
+ application.services = _load_services(ssm_client, application)
140
+
141
+ return application
142
+
143
+
144
+ def _load_services(ssm_client, application: Application) -> Dict[str, Service]:
145
+ """
146
+ Try to load
147
+ /platform/applications/{app}/environments/{env}/services/{service}
148
+ parameters if present.
149
+
150
+ Otherwise, fall back to legacy /copilot/applications/{app}/components
151
+ parameters.
152
+ """
153
+ services: Dict[str, Service] = {}
154
+
155
+ # Try /platform SSM parameter
156
+ for env_name in application.environments.keys():
157
+ params = dict(
158
+ Path=f"/platform/applications/{application.name}/environments/{env_name}/services",
159
+ Recursive=False,
160
+ WithDecryption=False,
161
+ )
162
+
163
+ while True:
164
+ response = ssm_client.get_parameters_by_path(**params)
165
+ for ssm_param in response.get("Parameters", []):
166
+ try:
167
+ data = json.loads(ssm_param["Value"])
168
+ name = data["name"]
169
+ kind = data["type"]
170
+ services.setdefault(name, Service(name, kind)) # Avoid duplicates
171
+ except (json.JSONDecodeError, KeyError):
172
+ continue
173
+
174
+ if "NextToken" in response:
175
+ params["NextToken"] = response["NextToken"]
176
+ else:
177
+ break
178
+
179
+ if services:
180
+ return services
181
+
182
+ # Fallback to legacy /copilot SSM parameter
133
183
  response = ssm_client.get_parameters_by_path(
134
184
  Path=f"/copilot/applications/{application.name}/components",
135
185
  Recursive=False,
@@ -145,12 +195,12 @@ def load_application(app=None, default_session=None, env=None) -> Application:
145
195
  )
146
196
  results.extend(response["Parameters"])
147
197
 
148
- application.services = {
198
+ legacy_services = {
149
199
  svc["name"]: Service(svc["name"], svc["type"])
150
200
  for svc in [json.loads(parameter["Value"]) for parameter in results]
151
201
  }
152
202
 
153
- return application
203
+ return legacy_services
154
204
 
155
205
 
156
206
  def get_application_name(abort=abort_with_error):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dbt-platform-helper
3
- Version: 15.10.0
3
+ Version: 15.12.0
4
4
  Summary: Set of tools to help transfer applications/services from GOV.UK PaaS to DBT PaaS augmenting AWS Copilot.
5
5
  License: MIT
6
6
  License-File: LICENSE