apache-airflow-providers-amazon 9.2.0rc1__py3-none-any.whl → 9.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. airflow/providers/amazon/LICENSE +0 -52
  2. airflow/providers/amazon/__init__.py +1 -1
  3. airflow/providers/amazon/aws/auth_manager/avp/facade.py +1 -4
  4. airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +90 -106
  5. airflow/providers/amazon/aws/auth_manager/router/login.py +124 -0
  6. airflow/providers/amazon/aws/executors/batch/batch_executor.py +2 -2
  7. airflow/providers/amazon/aws/executors/ecs/boto_schema.py +1 -1
  8. airflow/providers/amazon/aws/executors/ecs/utils.py +2 -1
  9. airflow/providers/amazon/aws/hooks/base_aws.py +6 -1
  10. airflow/providers/amazon/aws/hooks/batch_client.py +1 -2
  11. airflow/providers/amazon/aws/hooks/ecr.py +7 -1
  12. airflow/providers/amazon/aws/hooks/ecs.py +1 -2
  13. airflow/providers/amazon/aws/hooks/eks.py +10 -3
  14. airflow/providers/amazon/aws/hooks/emr.py +20 -0
  15. airflow/providers/amazon/aws/hooks/mwaa.py +85 -0
  16. airflow/providers/amazon/aws/hooks/sqs.py +4 -0
  17. airflow/providers/amazon/aws/hooks/ssm.py +10 -1
  18. airflow/providers/amazon/aws/links/comprehend.py +41 -0
  19. airflow/providers/amazon/aws/links/datasync.py +37 -0
  20. airflow/providers/amazon/aws/links/ec2.py +46 -0
  21. airflow/providers/amazon/aws/links/sagemaker.py +27 -0
  22. airflow/providers/amazon/aws/operators/athena.py +7 -5
  23. airflow/providers/amazon/aws/operators/batch.py +16 -8
  24. airflow/providers/amazon/aws/operators/bedrock.py +20 -18
  25. airflow/providers/amazon/aws/operators/comprehend.py +52 -11
  26. airflow/providers/amazon/aws/operators/datasync.py +40 -2
  27. airflow/providers/amazon/aws/operators/dms.py +0 -4
  28. airflow/providers/amazon/aws/operators/ec2.py +50 -0
  29. airflow/providers/amazon/aws/operators/ecs.py +11 -7
  30. airflow/providers/amazon/aws/operators/eks.py +17 -17
  31. airflow/providers/amazon/aws/operators/emr.py +27 -27
  32. airflow/providers/amazon/aws/operators/glue.py +16 -14
  33. airflow/providers/amazon/aws/operators/glue_crawler.py +3 -3
  34. airflow/providers/amazon/aws/operators/glue_databrew.py +5 -5
  35. airflow/providers/amazon/aws/operators/kinesis_analytics.py +9 -9
  36. airflow/providers/amazon/aws/operators/lambda_function.py +4 -4
  37. airflow/providers/amazon/aws/operators/mwaa.py +109 -0
  38. airflow/providers/amazon/aws/operators/rds.py +16 -16
  39. airflow/providers/amazon/aws/operators/redshift_cluster.py +15 -15
  40. airflow/providers/amazon/aws/operators/redshift_data.py +4 -4
  41. airflow/providers/amazon/aws/operators/sagemaker.py +52 -29
  42. airflow/providers/amazon/aws/operators/sqs.py +6 -0
  43. airflow/providers/amazon/aws/operators/step_function.py +4 -4
  44. airflow/providers/amazon/aws/sensors/ec2.py +3 -3
  45. airflow/providers/amazon/aws/sensors/emr.py +9 -9
  46. airflow/providers/amazon/aws/sensors/glue.py +7 -7
  47. airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +3 -3
  48. airflow/providers/amazon/aws/sensors/redshift_cluster.py +3 -3
  49. airflow/providers/amazon/aws/sensors/sqs.py +6 -5
  50. airflow/providers/amazon/aws/transfers/google_api_to_s3.py +8 -3
  51. airflow/providers/amazon/aws/triggers/README.md +1 -1
  52. airflow/providers/amazon/aws/triggers/opensearch_serverless.py +2 -1
  53. airflow/providers/amazon/aws/triggers/sqs.py +2 -1
  54. airflow/providers/amazon/aws/utils/sqs.py +6 -4
  55. airflow/providers/amazon/aws/waiters/dms.json +12 -0
  56. airflow/providers/amazon/get_provider_info.py +106 -87
  57. {apache_airflow_providers_amazon-9.2.0rc1.dist-info → apache_airflow_providers_amazon-9.3.0.dist-info}/METADATA +18 -36
  58. {apache_airflow_providers_amazon-9.2.0rc1.dist-info → apache_airflow_providers_amazon-9.3.0.dist-info}/RECORD +61 -55
  59. airflow/providers/amazon/aws/auth_manager/views/auth.py +0 -151
  60. /airflow/providers/amazon/aws/auth_manager/{views → router}/__init__.py +0 -0
  61. {apache_airflow_providers_amazon-9.2.0rc1.dist-info → apache_airflow_providers_amazon-9.3.0.dist-info}/WHEEL +0 -0
  62. {apache_airflow_providers_amazon-9.2.0rc1.dist-info → apache_airflow_providers_amazon-9.3.0.dist-info}/entry_points.txt +0 -0
@@ -23,6 +23,10 @@ from typing import TYPE_CHECKING, Any, ClassVar
23
23
  from airflow.configuration import conf
24
24
  from airflow.exceptions import AirflowException
25
25
  from airflow.providers.amazon.aws.hooks.comprehend import ComprehendHook
26
+ from airflow.providers.amazon.aws.links.comprehend import (
27
+ ComprehendDocumentClassifierLink,
28
+ ComprehendPiiEntitiesDetectionLink,
29
+ )
26
30
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
27
31
  from airflow.providers.amazon.aws.triggers.comprehend import (
28
32
  ComprehendCreateDocumentClassifierCompletedTrigger,
@@ -120,6 +124,8 @@ class ComprehendStartPiiEntitiesDetectionJobOperator(ComprehendBaseOperator):
120
124
  https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
121
125
  """
122
126
 
127
+ operator_extra_links = (ComprehendPiiEntitiesDetectionLink(),)
128
+
123
129
  def __init__(
124
130
  self,
125
131
  input_data_config: dict,
@@ -166,6 +172,19 @@ class ComprehendStartPiiEntitiesDetectionJobOperator(ComprehendBaseOperator):
166
172
  **self.start_pii_entities_kwargs,
167
173
  )["JobId"]
168
174
 
175
+ job_url = ComprehendPiiEntitiesDetectionLink.format_str.format(
176
+ aws_domain=ComprehendPiiEntitiesDetectionLink.get_aws_domain(self.hook.conn_partition),
177
+ region_name=self.hook.conn_region_name,
178
+ job_id=job_id,
179
+ )
180
+ ComprehendPiiEntitiesDetectionLink.persist(
181
+ context=context,
182
+ operator=self,
183
+ region_name=self.hook.conn_region_name,
184
+ aws_partition=self.hook.conn_partition,
185
+ job_id=job_id,
186
+ )
187
+ self.log.info("You can view the PII entities detection job at %s", job_url)
169
188
  message_description = f"start pii entities detection job {job_id} to complete."
170
189
  if self.deferrable:
171
190
  self.log.info("Deferring %s", message_description)
@@ -188,12 +207,12 @@ class ComprehendStartPiiEntitiesDetectionJobOperator(ComprehendBaseOperator):
188
207
  return job_id
189
208
 
190
209
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str:
191
- event = validate_execute_complete_event(event)
192
- if event["status"] != "success":
193
- raise AirflowException("Error while running job: %s", event)
210
+ validated_event = validate_execute_complete_event(event)
211
+ if validated_event["status"] != "success":
212
+ raise AirflowException("Error while running job: %s", validated_event)
194
213
 
195
- self.log.info("Comprehend pii entities detection job `%s` complete.", event["job_id"])
196
- return event["job_id"]
214
+ self.log.info("Comprehend pii entities detection job `%s` complete.", validated_event["job_id"])
215
+ return validated_event["job_id"]
197
216
 
198
217
 
199
218
  class ComprehendCreateDocumentClassifierOperator(AwsBaseOperator[ComprehendHook]):
@@ -238,6 +257,7 @@ class ComprehendCreateDocumentClassifierOperator(AwsBaseOperator[ComprehendHook]
238
257
  """
239
258
 
240
259
  aws_hook_class = ComprehendHook
260
+ operator_extra_links = (ComprehendDocumentClassifierLink(),)
241
261
 
242
262
  template_fields: Sequence[str] = aws_template_fields(
243
263
  "document_classifier_name",
@@ -300,6 +320,22 @@ class ComprehendCreateDocumentClassifierOperator(AwsBaseOperator[ComprehendHook]
300
320
  **self.document_classifier_kwargs,
301
321
  )["DocumentClassifierArn"]
302
322
 
323
+ # create the link to console
324
+ job_url = ComprehendDocumentClassifierLink.format_str.format(
325
+ aws_domain=ComprehendDocumentClassifierLink.get_aws_domain(self.hook.conn_partition),
326
+ region_name=self.hook.conn_region_name,
327
+ arn=document_classifier_arn,
328
+ )
329
+
330
+ ComprehendDocumentClassifierLink.persist(
331
+ context=context,
332
+ operator=self,
333
+ region_name=self.hook.conn_region_name,
334
+ aws_partition=self.hook.conn_partition,
335
+ arn=document_classifier_arn,
336
+ )
337
+ self.log.info("You can monitor the classifier at %s", job_url)
338
+
303
339
  message_description = f"document classifier {document_classifier_arn} to complete."
304
340
  if self.deferrable:
305
341
  self.log.info("Deferring %s", message_description)
@@ -327,14 +363,19 @@ class ComprehendCreateDocumentClassifierOperator(AwsBaseOperator[ComprehendHook]
327
363
  return document_classifier_arn
328
364
 
329
365
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str:
330
- event = validate_execute_complete_event(event)
331
- if event["status"] != "success":
332
- raise AirflowException("Error while running comprehend create document classifier: %s", event)
366
+ validated_event = validate_execute_complete_event(event)
367
+ if validated_event["status"] != "success":
368
+ raise AirflowException(
369
+ "Error while running comprehend create document classifier: %s", validated_event
370
+ )
333
371
 
334
372
  self.hook.validate_document_classifier_training_status(
335
- document_classifier_arn=event["document_classifier_arn"], fail_on_warnings=self.fail_on_warnings
373
+ document_classifier_arn=validated_event["document_classifier_arn"],
374
+ fail_on_warnings=self.fail_on_warnings,
336
375
  )
337
376
 
338
- self.log.info("Comprehend document classifier `%s` complete.", event["document_classifier_arn"])
377
+ self.log.info(
378
+ "Comprehend document classifier `%s` complete.", validated_event["document_classifier_arn"]
379
+ )
339
380
 
340
- return event["document_classifier_arn"]
381
+ return validated_event["document_classifier_arn"]
@@ -25,6 +25,7 @@ from typing import TYPE_CHECKING, Any
25
25
 
26
26
  from airflow.exceptions import AirflowException, AirflowTaskTimeout
27
27
  from airflow.providers.amazon.aws.hooks.datasync import DataSyncHook
28
+ from airflow.providers.amazon.aws.links.datasync import DataSyncTaskExecutionLink, DataSyncTaskLink
28
29
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
29
30
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
30
31
 
@@ -130,6 +131,8 @@ class DataSyncOperator(AwsBaseOperator[DataSyncHook]):
130
131
  }
131
132
  ui_color = "#44b5e2"
132
133
 
134
+ operator_extra_links = (DataSyncTaskLink(), DataSyncTaskExecutionLink())
135
+
133
136
  def __init__(
134
137
  self,
135
138
  *,
@@ -215,6 +218,23 @@ class DataSyncOperator(AwsBaseOperator[DataSyncHook]):
215
218
  if not self.task_arn:
216
219
  raise AirflowException("DataSync TaskArn could not be identified or created.")
217
220
 
221
+ task_id = self.task_arn.split("/")[-1]
222
+
223
+ task_url = DataSyncTaskLink.format_str.format(
224
+ aws_domain=DataSyncTaskLink.get_aws_domain(self.hook.conn_partition),
225
+ region_name=self.hook.conn_region_name,
226
+ task_id=task_id,
227
+ )
228
+
229
+ DataSyncTaskLink.persist(
230
+ context=context,
231
+ operator=self,
232
+ region_name=self.hook.conn_region_name,
233
+ aws_partition=self.hook.conn_partition,
234
+ task_id=task_id,
235
+ )
236
+ self.log.info("You can view this DataSync task at %s", task_url)
237
+
218
238
  self.log.info("Using DataSync TaskArn %s", self.task_arn)
219
239
 
220
240
  # Update the DataSync Task
@@ -222,7 +242,7 @@ class DataSyncOperator(AwsBaseOperator[DataSyncHook]):
222
242
  self._update_datasync_task()
223
243
 
224
244
  # Execute the DataSync Task
225
- self._execute_datasync_task()
245
+ self._execute_datasync_task(context=context)
226
246
 
227
247
  if not self.task_execution_arn:
228
248
  raise AirflowException("Nothing was executed")
@@ -327,7 +347,7 @@ class DataSyncOperator(AwsBaseOperator[DataSyncHook]):
327
347
  self.hook.update_task(self.task_arn, **self.update_task_kwargs)
328
348
  self.log.info("Updated TaskArn %s", self.task_arn)
329
349
 
330
- def _execute_datasync_task(self) -> None:
350
+ def _execute_datasync_task(self, context: Context) -> None:
331
351
  """Create and monitor an AWS DataSync TaskExecution for a Task."""
332
352
  if not self.task_arn:
333
353
  raise AirflowException("Missing TaskArn")
@@ -337,6 +357,24 @@ class DataSyncOperator(AwsBaseOperator[DataSyncHook]):
337
357
  self.task_execution_arn = self.hook.start_task_execution(self.task_arn, **self.task_execution_kwargs)
338
358
  self.log.info("Started TaskExecutionArn %s", self.task_execution_arn)
339
359
 
360
+ # Create the execution extra link
361
+ execution_url = DataSyncTaskExecutionLink.format_str.format(
362
+ aws_domain=DataSyncTaskExecutionLink.get_aws_domain(self.hook.conn_partition),
363
+ region_name=self.hook.conn_region_name,
364
+ task_id=self.task_arn.split("/")[-1],
365
+ task_execution_id=self.task_execution_arn.split("/")[-1], # type: ignore[union-attr]
366
+ )
367
+ DataSyncTaskExecutionLink.persist(
368
+ context=context,
369
+ operator=self,
370
+ region_name=self.hook.conn_region_name,
371
+ aws_partition=self.hook.conn_partition,
372
+ task_id=self.task_arn.split("/")[-1],
373
+ task_execution_id=self.task_execution_arn.split("/")[-1], # type: ignore[union-attr]
374
+ )
375
+
376
+ self.log.info("You can view this DataSync task execution at %s", execution_url)
377
+
340
378
  if not self.wait_for_completion:
341
379
  return
342
380
 
@@ -493,10 +493,6 @@ class DmsDeleteReplicationConfigOperator(AwsBaseOperator[DmsHook]):
493
493
  Filters=[{"Name": "replication-config-arn", "Values": [self.replication_config_arn]}],
494
494
  WaiterConfig={"Delay": self.waiter_delay, "MaxAttempts": self.waiter_max_attempts},
495
495
  )
496
- self.hook.get_waiter("replication_deprovisioned").wait(
497
- Filters=[{"Name": "replication-config-arn", "Values": [self.replication_config_arn]}],
498
- WaiterConfig={"Delay": self.waiter_delay, "MaxAttempts": self.waiter_max_attempts},
499
- )
500
496
  self.hook.delete_replication_config(self.replication_config_arn)
501
497
  self.handle_delete_wait()
502
498
 
@@ -23,6 +23,10 @@ from typing import TYPE_CHECKING
23
23
  from airflow.exceptions import AirflowException
24
24
  from airflow.models import BaseOperator
25
25
  from airflow.providers.amazon.aws.hooks.ec2 import EC2Hook
26
+ from airflow.providers.amazon.aws.links.ec2 import (
27
+ EC2InstanceDashboardLink,
28
+ EC2InstanceLink,
29
+ )
26
30
 
27
31
  if TYPE_CHECKING:
28
32
  from airflow.utils.context import Context
@@ -47,6 +51,7 @@ class EC2StartInstanceOperator(BaseOperator):
47
51
  between each instance state checks until operation is completed
48
52
  """
49
53
 
54
+ operator_extra_links = (EC2InstanceLink(),)
50
55
  template_fields: Sequence[str] = ("instance_id", "region_name")
51
56
  ui_color = "#eeaa11"
52
57
  ui_fgcolor = "#ffffff"
@@ -71,6 +76,13 @@ class EC2StartInstanceOperator(BaseOperator):
71
76
  self.log.info("Starting EC2 instance %s", self.instance_id)
72
77
  instance = ec2_hook.get_instance(instance_id=self.instance_id)
73
78
  instance.start()
79
+ EC2InstanceLink.persist(
80
+ context=context,
81
+ operator=self,
82
+ aws_partition=ec2_hook.conn_partition,
83
+ instance_id=self.instance_id,
84
+ region_name=ec2_hook.conn_region_name,
85
+ )
74
86
  ec2_hook.wait_for_state(
75
87
  instance_id=self.instance_id,
76
88
  target_state="running",
@@ -97,6 +109,7 @@ class EC2StopInstanceOperator(BaseOperator):
97
109
  between each instance state checks until operation is completed
98
110
  """
99
111
 
112
+ operator_extra_links = (EC2InstanceLink(),)
100
113
  template_fields: Sequence[str] = ("instance_id", "region_name")
101
114
  ui_color = "#eeaa11"
102
115
  ui_fgcolor = "#ffffff"
@@ -120,7 +133,15 @@ class EC2StopInstanceOperator(BaseOperator):
120
133
  ec2_hook = EC2Hook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
121
134
  self.log.info("Stopping EC2 instance %s", self.instance_id)
122
135
  instance = ec2_hook.get_instance(instance_id=self.instance_id)
136
+ EC2InstanceLink.persist(
137
+ context=context,
138
+ operator=self,
139
+ aws_partition=ec2_hook.conn_partition,
140
+ instance_id=self.instance_id,
141
+ region_name=ec2_hook.conn_region_name,
142
+ )
123
143
  instance.stop()
144
+
124
145
  ec2_hook.wait_for_state(
125
146
  instance_id=self.instance_id,
126
147
  target_state="stopped",
@@ -154,6 +175,7 @@ class EC2CreateInstanceOperator(BaseOperator):
154
175
  in the `running` state before returning.
155
176
  """
156
177
 
178
+ operator_extra_links = (EC2InstanceDashboardLink(),)
157
179
  template_fields: Sequence[str] = (
158
180
  "image_id",
159
181
  "max_count",
@@ -198,6 +220,15 @@ class EC2CreateInstanceOperator(BaseOperator):
198
220
  )["Instances"]
199
221
 
200
222
  instance_ids = self._on_kill_instance_ids = [instance["InstanceId"] for instance in instances]
223
+ # Console link is for EC2 dashboard list, not individual instances when more than 1 instance
224
+
225
+ EC2InstanceDashboardLink.persist(
226
+ context=context,
227
+ operator=self,
228
+ region_name=ec2_hook.conn_region_name,
229
+ aws_partition=ec2_hook.conn_partition,
230
+ instance_ids=EC2InstanceDashboardLink.format_instance_id_filter(instance_ids),
231
+ )
201
232
  for instance_id in instance_ids:
202
233
  self.log.info("Created EC2 instance %s", instance_id)
203
234
 
@@ -311,6 +342,7 @@ class EC2RebootInstanceOperator(BaseOperator):
311
342
  in the `running` state before returning.
312
343
  """
313
344
 
345
+ operator_extra_links = (EC2InstanceDashboardLink(),)
314
346
  template_fields: Sequence[str] = ("instance_ids", "region_name")
315
347
  ui_color = "#eeaa11"
316
348
  ui_fgcolor = "#ffffff"
@@ -341,6 +373,14 @@ class EC2RebootInstanceOperator(BaseOperator):
341
373
  self.log.info("Rebooting EC2 instances %s", ", ".join(self.instance_ids))
342
374
  ec2_hook.conn.reboot_instances(InstanceIds=self.instance_ids)
343
375
 
376
+ # Console link is for EC2 dashboard list, not individual instances
377
+ EC2InstanceDashboardLink.persist(
378
+ context=context,
379
+ operator=self,
380
+ region_name=ec2_hook.conn_region_name,
381
+ aws_partition=ec2_hook.conn_partition,
382
+ instance_ids=EC2InstanceDashboardLink.format_instance_id_filter(self.instance_ids),
383
+ )
344
384
  if self.wait_for_completion:
345
385
  ec2_hook.get_waiter("instance_running").wait(
346
386
  InstanceIds=self.instance_ids,
@@ -374,6 +414,7 @@ class EC2HibernateInstanceOperator(BaseOperator):
374
414
  in the `stopped` state before returning.
375
415
  """
376
416
 
417
+ operator_extra_links = (EC2InstanceDashboardLink(),)
377
418
  template_fields: Sequence[str] = ("instance_ids", "region_name")
378
419
  ui_color = "#eeaa11"
379
420
  ui_fgcolor = "#ffffff"
@@ -404,6 +445,15 @@ class EC2HibernateInstanceOperator(BaseOperator):
404
445
  self.log.info("Hibernating EC2 instances %s", ", ".join(self.instance_ids))
405
446
  instances = ec2_hook.get_instances(instance_ids=self.instance_ids)
406
447
 
448
+ # Console link is for EC2 dashboard list, not individual instances
449
+ EC2InstanceDashboardLink.persist(
450
+ context=context,
451
+ operator=self,
452
+ region_name=ec2_hook.conn_region_name,
453
+ aws_partition=ec2_hook.conn_partition,
454
+ instance_ids=EC2InstanceDashboardLink.format_instance_id_filter(self.instance_ids),
455
+ )
456
+
407
457
  for instance in instances:
408
458
  hibernation_options = instance.get("HibernationOptions")
409
459
  if not hibernation_options or not hibernation_options["Configured"]:
@@ -375,6 +375,7 @@ class EcsRunTaskOperator(EcsBaseOperator):
375
375
  :param awslogs_fetch_interval: the interval that the ECS task log fetcher should wait
376
376
  in between each Cloudwatch logs fetches.
377
377
  If deferrable is set to True, that parameter is ignored and waiter_delay is used instead.
378
+ :param container_name: The name of the container to fetch logs from. If not set, the first container is used.
378
379
  :param quota_retry: Config if and how to retry the launch of a new ECS task, to handle
379
380
  transient errors.
380
381
  :param reattach: If set to True, will check if the task previously launched by the task_instance
@@ -414,6 +415,7 @@ class EcsRunTaskOperator(EcsBaseOperator):
414
415
  "awslogs_region",
415
416
  "awslogs_stream_prefix",
416
417
  "awslogs_fetch_interval",
418
+ "container_name",
417
419
  "propagate_tags",
418
420
  "reattach",
419
421
  "number_logs_exception",
@@ -445,6 +447,7 @@ class EcsRunTaskOperator(EcsBaseOperator):
445
447
  awslogs_region: str | None = None,
446
448
  awslogs_stream_prefix: str | None = None,
447
449
  awslogs_fetch_interval: timedelta = timedelta(seconds=30),
450
+ container_name: str | None = None,
448
451
  propagate_tags: str | None = None,
449
452
  quota_retry: dict | None = None,
450
453
  reattach: bool = False,
@@ -484,7 +487,7 @@ class EcsRunTaskOperator(EcsBaseOperator):
484
487
  self.awslogs_region = self.region_name
485
488
 
486
489
  self.arn: str | None = None
487
- self.container_name: str | None = None
490
+ self.container_name: str | None = container_name
488
491
  self._started_by: str | None = None
489
492
 
490
493
  self.retry_args = quota_retry
@@ -568,12 +571,12 @@ class EcsRunTaskOperator(EcsBaseOperator):
568
571
  return None
569
572
 
570
573
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str | None:
571
- event = validate_execute_complete_event(event)
574
+ validated_event = validate_execute_complete_event(event)
572
575
 
573
- if event["status"] != "success":
574
- raise AirflowException(f"Error in task execution: {event}")
575
- self.arn = event["task_arn"] # restore arn to its updated value, needed for next steps
576
- self.cluster = event["cluster"]
576
+ if validated_event["status"] != "success":
577
+ raise AirflowException(f"Error in task execution: {validated_event}")
578
+ self.arn = validated_event["task_arn"] # restore arn to its updated value, needed for next steps
579
+ self.cluster = validated_event["cluster"]
577
580
  self._after_execution()
578
581
  if self._aws_logs_enabled():
579
582
  # same behavior as non-deferrable mode, return last line of logs of the task.
@@ -628,7 +631,8 @@ class EcsRunTaskOperator(EcsBaseOperator):
628
631
  self.log.info("ECS Task started: %s", response)
629
632
 
630
633
  self.arn = response["tasks"][0]["taskArn"]
631
- self.container_name = response["tasks"][0]["containers"][0]["name"]
634
+ if not self.container_name:
635
+ self.container_name = response["tasks"][0]["containers"][0]["name"]
632
636
  self.log.info("ECS task ID is: %s", self._get_ecs_task_id(self.arn))
633
637
 
634
638
  def _try_reattach_task(self, started_by: str):
@@ -415,11 +415,11 @@ class EksCreateClusterOperator(BaseOperator):
415
415
  raise AirflowException("Error creating cluster")
416
416
 
417
417
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
418
- event = validate_execute_complete_event(event)
418
+ validated_event = validate_execute_complete_event(event)
419
419
 
420
420
  resource = "fargate profile" if self.compute == "fargate" else self.compute
421
- if event["status"] != "success":
422
- raise AirflowException(f"Error creating {resource}: {event}")
421
+ if validated_event["status"] != "success":
422
+ raise AirflowException(f"Error creating {resource}: {validated_event}")
423
423
 
424
424
  self.log.info("%s created successfully", resource)
425
425
 
@@ -543,10 +543,10 @@ class EksCreateNodegroupOperator(BaseOperator):
543
543
  )
544
544
 
545
545
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
546
- event = validate_execute_complete_event(event)
546
+ validated_event = validate_execute_complete_event(event)
547
547
 
548
- if event["status"] != "success":
549
- raise AirflowException(f"Error creating nodegroup: {event}")
548
+ if validated_event["status"] != "success":
549
+ raise AirflowException(f"Error creating nodegroup: {validated_event}")
550
550
 
551
551
 
552
552
  class EksCreateFargateProfileOperator(BaseOperator):
@@ -655,10 +655,10 @@ class EksCreateFargateProfileOperator(BaseOperator):
655
655
  )
656
656
 
657
657
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
658
- event = validate_execute_complete_event(event)
658
+ validated_event = validate_execute_complete_event(event)
659
659
 
660
- if event["status"] != "success":
661
- raise AirflowException(f"Error creating Fargate profile: {event}")
660
+ if validated_event["status"] != "success":
661
+ raise AirflowException(f"Error creating Fargate profile: {validated_event}")
662
662
 
663
663
  self.log.info("Fargate profile created successfully")
664
664
 
@@ -789,9 +789,9 @@ class EksDeleteClusterOperator(BaseOperator):
789
789
  self.log.info(SUCCESS_MSG.format(compute=FARGATE_FULL_NAME))
790
790
 
791
791
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
792
- event = validate_execute_complete_event(event)
792
+ validated_event = validate_execute_complete_event(event)
793
793
 
794
- if event["status"] == "success":
794
+ if validated_event["status"] == "success":
795
795
  self.log.info("Cluster deleted successfully.")
796
796
 
797
797
 
@@ -880,10 +880,10 @@ class EksDeleteNodegroupOperator(BaseOperator):
880
880
  )
881
881
 
882
882
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
883
- event = validate_execute_complete_event(event)
883
+ validated_event = validate_execute_complete_event(event)
884
884
 
885
- if event["status"] != "success":
886
- raise AirflowException(f"Error deleting nodegroup: {event}")
885
+ if validated_event["status"] != "success":
886
+ raise AirflowException(f"Error deleting nodegroup: {validated_event}")
887
887
 
888
888
 
889
889
  class EksDeleteFargateProfileOperator(BaseOperator):
@@ -974,10 +974,10 @@ class EksDeleteFargateProfileOperator(BaseOperator):
974
974
  )
975
975
 
976
976
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
977
- event = validate_execute_complete_event(event)
977
+ validated_event = validate_execute_complete_event(event)
978
978
 
979
- if event["status"] != "success":
980
- raise AirflowException(f"Error deleting Fargate profile: {event}")
979
+ if validated_event["status"] != "success":
980
+ raise AirflowException(f"Error deleting Fargate profile: {validated_event}")
981
981
 
982
982
  self.log.info("Fargate profile deleted successfully")
983
983
 
@@ -199,13 +199,13 @@ class EmrAddStepsOperator(BaseOperator):
199
199
  return step_ids
200
200
 
201
201
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str:
202
- event = validate_execute_complete_event(event)
202
+ validated_event = validate_execute_complete_event(event)
203
203
 
204
- if event["status"] != "success":
205
- raise AirflowException(f"Error while running steps: {event}")
204
+ if validated_event["status"] != "success":
205
+ raise AirflowException(f"Error while running steps: {validated_event}")
206
206
 
207
207
  self.log.info("Steps completed successfully")
208
- return event["value"]
208
+ return validated_event["value"]
209
209
 
210
210
 
211
211
  class EmrStartNotebookExecutionOperator(BaseOperator):
@@ -591,12 +591,12 @@ class EmrContainerOperator(BaseOperator):
591
591
  )
592
592
 
593
593
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str:
594
- event = validate_execute_complete_event(event)
594
+ validated_event = validate_execute_complete_event(event)
595
595
 
596
- if event["status"] != "success":
597
- raise AirflowException(f"Error while running job: {event}")
596
+ if validated_event["status"] != "success":
597
+ raise AirflowException(f"Error while running job: {validated_event}")
598
598
 
599
- return event["job_id"]
599
+ return validated_event["job_id"]
600
600
 
601
601
  def on_kill(self) -> None:
602
602
  """Cancel the submitted job run."""
@@ -769,13 +769,13 @@ class EmrCreateJobFlowOperator(BaseOperator):
769
769
  return self._job_flow_id
770
770
 
771
771
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str:
772
- event = validate_execute_complete_event(event)
772
+ validated_event = validate_execute_complete_event(event)
773
773
 
774
- if event["status"] != "success":
775
- raise AirflowException(f"Error creating jobFlow: {event}")
774
+ if validated_event["status"] != "success":
775
+ raise AirflowException(f"Error creating jobFlow: {validated_event}")
776
776
 
777
777
  self.log.info("JobFlow created successfully")
778
- return event["job_flow_id"]
778
+ return validated_event["job_flow_id"]
779
779
 
780
780
  def on_kill(self) -> None:
781
781
  """Terminate the EMR cluster (job flow) unless TerminationProtected is enabled on the cluster."""
@@ -944,10 +944,10 @@ class EmrTerminateJobFlowOperator(BaseOperator):
944
944
  )
945
945
 
946
946
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
947
- event = validate_execute_complete_event(event)
947
+ validated_event = validate_execute_complete_event(event)
948
948
 
949
- if event["status"] != "success":
950
- raise AirflowException(f"Error terminating JobFlow: {event}")
949
+ if validated_event["status"] != "success":
950
+ raise AirflowException(f"Error terminating JobFlow: {validated_event}")
951
951
 
952
952
  self.log.info("Jobflow terminated successfully.")
953
953
 
@@ -1086,13 +1086,13 @@ class EmrServerlessCreateApplicationOperator(BaseOperator):
1086
1086
  )
1087
1087
 
1088
1088
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
1089
- event = validate_execute_complete_event(event)
1089
+ validated_event = validate_execute_complete_event(event)
1090
1090
 
1091
- if event["status"] != "success":
1092
- raise AirflowException(f"Trigger error: Application failed to start, event is {event}")
1091
+ if validated_event["status"] != "success":
1092
+ raise AirflowException(f"Trigger error: Application failed to start, event is {validated_event}")
1093
1093
 
1094
- self.log.info("Application %s started", event["application_id"])
1095
- return event["application_id"]
1094
+ self.log.info("Application %s started", validated_event["application_id"])
1095
+ return validated_event["application_id"]
1096
1096
 
1097
1097
 
1098
1098
  class EmrServerlessStartJobOperator(BaseOperator):
@@ -1273,11 +1273,11 @@ class EmrServerlessStartJobOperator(BaseOperator):
1273
1273
  return self.job_id
1274
1274
 
1275
1275
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
1276
- event = validate_execute_complete_event(event)
1276
+ validated_event = validate_execute_complete_event(event)
1277
1277
 
1278
- if event["status"] == "success":
1278
+ if validated_event["status"] == "success":
1279
1279
  self.log.info("Serverless job completed")
1280
- return event["job_id"]
1280
+ return validated_event["job_id"]
1281
1281
 
1282
1282
  def on_kill(self) -> None:
1283
1283
  """
@@ -1547,9 +1547,9 @@ class EmrServerlessStopApplicationOperator(BaseOperator):
1547
1547
  )
1548
1548
 
1549
1549
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
1550
- event = validate_execute_complete_event(event)
1550
+ validated_event = validate_execute_complete_event(event)
1551
1551
 
1552
- if event["status"] == "success":
1552
+ if validated_event["status"] == "success":
1553
1553
  self.log.info("EMR serverless application %s stopped successfully", self.application_id)
1554
1554
 
1555
1555
 
@@ -1651,7 +1651,7 @@ class EmrServerlessDeleteApplicationOperator(EmrServerlessStopApplicationOperato
1651
1651
  self.log.info("EMR serverless application deleted")
1652
1652
 
1653
1653
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
1654
- event = validate_execute_complete_event(event)
1654
+ validated_event = validate_execute_complete_event(event)
1655
1655
 
1656
- if event["status"] == "success":
1656
+ if validated_event["status"] == "success":
1657
1657
  self.log.info("EMR serverless application %s deleted successfully", self.application_id)