apache-airflow-providers-amazon 9.19.0rc1__py3-none-any.whl → 9.21.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. airflow/providers/amazon/__init__.py +1 -1
  2. airflow/providers/amazon/aws/auth_manager/avp/facade.py +1 -2
  3. airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +6 -25
  4. airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py +1 -2
  5. airflow/providers/amazon/aws/auth_manager/routes/login.py +3 -4
  6. airflow/providers/amazon/aws/auth_manager/user.py +1 -1
  7. airflow/providers/amazon/aws/cli/__init__.py +16 -0
  8. airflow/providers/amazon/aws/{auth_manager/cli → cli}/definition.py +33 -0
  9. airflow/providers/amazon/aws/executors/aws_lambda/lambda_executor.py +2 -9
  10. airflow/providers/amazon/aws/executors/aws_lambda/utils.py +1 -1
  11. airflow/providers/amazon/aws/executors/batch/batch_executor.py +28 -32
  12. airflow/providers/amazon/aws/executors/batch/batch_executor_config.py +5 -6
  13. airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +4 -17
  14. airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py +5 -1
  15. airflow/providers/amazon/aws/hooks/athena_sql.py +12 -2
  16. airflow/providers/amazon/aws/hooks/base_aws.py +1 -2
  17. airflow/providers/amazon/aws/hooks/redshift_sql.py +17 -3
  18. airflow/providers/amazon/aws/hooks/s3.py +16 -13
  19. airflow/providers/amazon/aws/hooks/sagemaker.py +1 -6
  20. airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py +4 -5
  21. airflow/providers/amazon/aws/hooks/ssm.py +22 -0
  22. airflow/providers/amazon/aws/links/base_aws.py +1 -1
  23. airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +1 -1
  24. airflow/providers/amazon/aws/log/s3_task_handler.py +1 -1
  25. airflow/providers/amazon/aws/operators/athena.py +1 -2
  26. airflow/providers/amazon/aws/operators/batch.py +1 -2
  27. airflow/providers/amazon/aws/operators/bedrock.py +1 -2
  28. airflow/providers/amazon/aws/operators/comprehend.py +1 -2
  29. airflow/providers/amazon/aws/operators/dms.py +1 -2
  30. airflow/providers/amazon/aws/operators/ec2.py +39 -24
  31. airflow/providers/amazon/aws/operators/ecs.py +1 -2
  32. airflow/providers/amazon/aws/operators/eks.py +1 -2
  33. airflow/providers/amazon/aws/operators/emr.py +34 -13
  34. airflow/providers/amazon/aws/operators/glue.py +1 -2
  35. airflow/providers/amazon/aws/operators/glue_crawler.py +1 -2
  36. airflow/providers/amazon/aws/operators/glue_databrew.py +1 -2
  37. airflow/providers/amazon/aws/operators/kinesis_analytics.py +1 -2
  38. airflow/providers/amazon/aws/operators/lambda_function.py +1 -2
  39. airflow/providers/amazon/aws/operators/mwaa.py +1 -2
  40. airflow/providers/amazon/aws/operators/neptune.py +1 -2
  41. airflow/providers/amazon/aws/operators/rds.py +1 -2
  42. airflow/providers/amazon/aws/operators/redshift_cluster.py +1 -2
  43. airflow/providers/amazon/aws/operators/redshift_data.py +1 -2
  44. airflow/providers/amazon/aws/operators/sagemaker.py +1 -2
  45. airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py +3 -5
  46. airflow/providers/amazon/aws/operators/ssm.py +54 -17
  47. airflow/providers/amazon/aws/operators/step_function.py +1 -2
  48. airflow/providers/amazon/aws/queues/sqs.py +1 -1
  49. airflow/providers/amazon/aws/secrets/secrets_manager.py +2 -1
  50. airflow/providers/amazon/aws/secrets/systems_manager.py +2 -1
  51. airflow/providers/amazon/aws/sensors/batch.py +1 -2
  52. airflow/providers/amazon/aws/sensors/bedrock.py +1 -2
  53. airflow/providers/amazon/aws/sensors/comprehend.py +1 -2
  54. airflow/providers/amazon/aws/sensors/ec2.py +1 -2
  55. airflow/providers/amazon/aws/sensors/emr.py +1 -2
  56. airflow/providers/amazon/aws/sensors/glue.py +1 -2
  57. airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +1 -2
  58. airflow/providers/amazon/aws/sensors/kinesis_analytics.py +1 -2
  59. airflow/providers/amazon/aws/sensors/mwaa.py +1 -2
  60. airflow/providers/amazon/aws/sensors/opensearch_serverless.py +1 -2
  61. airflow/providers/amazon/aws/sensors/redshift_cluster.py +1 -2
  62. airflow/providers/amazon/aws/sensors/s3.py +1 -2
  63. airflow/providers/amazon/aws/sensors/sqs.py +1 -2
  64. airflow/providers/amazon/aws/sensors/ssm.py +22 -2
  65. airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py +1 -1
  66. airflow/providers/amazon/aws/transfers/sql_to_s3.py +1 -1
  67. airflow/providers/amazon/aws/triggers/emr.py +2 -2
  68. airflow/providers/amazon/aws/triggers/ssm.py +68 -11
  69. airflow/providers/amazon/get_provider_info.py +2 -1
  70. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/METADATA +17 -14
  71. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/RECORD +75 -74
  72. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/licenses/NOTICE +1 -1
  73. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/WHEEL +0 -0
  74. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/entry_points.txt +0 -0
  75. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/licenses/LICENSE +0 -0
@@ -35,12 +35,7 @@ from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
35
35
  from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
36
36
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
37
37
  from airflow.providers.amazon.aws.utils.tags import format_tags
38
- from airflow.providers.common.compat.sdk import AirflowException
39
-
40
- try:
41
- from airflow.sdk import timezone
42
- except ImportError:
43
- from airflow.utils import timezone # type: ignore[attr-defined,no-redef]
38
+ from airflow.providers.common.compat.sdk import AirflowException, timezone
44
39
 
45
40
 
46
41
  class LogState:
@@ -52,13 +52,12 @@ class SageMakerNotebookHook(BaseHook):
52
52
  Example: {'input_path': 'folder/input/notebook.ipynb', 'input_params': {'param1': 'value1'}}
53
53
  :param output_config: Configuration for the output format. It should include an output_formats parameter to specify the output format.
54
54
  Example: {'output_formats': ['NOTEBOOK']}
55
- :param compute: compute configuration to use for the notebook execution. This is a required attribute
56
- if the execution is on a remote compute.
57
- Example: { "instance_type": "ml.m5.large", "volume_size_in_gb": 30, "volume_kms_key_id": "", "image_uri": "string", "container_entrypoint": [ "string" ]}
55
+ :param compute: compute configuration to use for the notebook execution. This is a required attribute if the execution is on a remote compute.
56
+ Example: {"instance_type": "ml.m5.large", "volume_size_in_gb": 30, "volume_kms_key_id": "", "image_details": {"ecr_uri": "string"}, "container_entrypoint": ["string"]}
58
57
  :param termination_condition: conditions to match to terminate the remote execution.
59
- Example: { "MaxRuntimeInSeconds": 3600 }
58
+ Example: {"MaxRuntimeInSeconds": 3600}
60
59
  :param tags: tags to be associated with the remote execution runs.
61
- Example: { "md_analytics": "logs" }
60
+ Example: {"md_analytics": "logs"}
62
61
  :param waiter_delay: Interval in seconds to check the task execution status.
63
62
  :param waiter_max_attempts: Number of attempts to wait before returning FAILED.
64
63
  """
@@ -99,3 +99,25 @@ class SsmHook(AwsBaseHook):
99
99
  :return: Response from SSM list_command_invocations API.
100
100
  """
101
101
  return self.conn.list_command_invocations(CommandId=command_id)
102
+
103
+ @staticmethod
104
+ def is_aws_level_failure(status: str) -> bool:
105
+ """
106
+ Check if a command status represents an AWS-level failure.
107
+
108
+ AWS-level failures are service-level issues that should always raise exceptions,
109
+ as opposed to command-level failures (non-zero exit codes) which may be tolerated
110
+ depending on the fail_on_nonzero_exit parameter.
111
+
112
+ According to AWS SSM documentation, the possible statuses are:
113
+ Pending, InProgress, Delayed, Success, Cancelled, TimedOut, Failed, Cancelling
114
+
115
+ AWS-level failures are:
116
+ - Cancelled: Command was cancelled before completion
117
+ - TimedOut: Command exceeded the timeout period
118
+ - Cancelling: Command is in the process of being cancelled
119
+
120
+ :param status: The command invocation status from SSM.
121
+ :return: True if the status represents an AWS-level failure, False otherwise.
122
+ """
123
+ return status in ("Cancelled", "TimedOut", "Cancelling")
@@ -24,7 +24,7 @@ from airflow.providers.common.compat.sdk import BaseOperatorLink, XCom
24
24
 
25
25
  if TYPE_CHECKING:
26
26
  from airflow.models import BaseOperator
27
- from airflow.models.taskinstancekey import TaskInstanceKey
27
+ from airflow.providers.common.compat.sdk import TaskInstanceKey
28
28
  from airflow.sdk import Context
29
29
 
30
30
 
@@ -31,9 +31,9 @@ from typing import TYPE_CHECKING, Any
31
31
  import attrs
32
32
  import watchtower
33
33
 
34
- from airflow.configuration import conf
35
34
  from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
36
35
  from airflow.providers.amazon.aws.utils import datetime_to_epoch_utc_ms
36
+ from airflow.providers.common.compat.sdk import conf
37
37
  from airflow.utils.log.file_task_handler import FileTaskHandler
38
38
  from airflow.utils.log.logging_mixin import LoggingMixin
39
39
 
@@ -26,9 +26,9 @@ from typing import TYPE_CHECKING
26
26
 
27
27
  import attrs
28
28
 
29
- from airflow.configuration import conf
30
29
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
31
30
  from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
31
+ from airflow.providers.common.compat.sdk import conf
32
32
  from airflow.utils.log.file_task_handler import FileTaskHandler
33
33
  from airflow.utils.log.logging_mixin import LoggingMixin
34
34
 
@@ -21,14 +21,13 @@ from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
  from urllib.parse import urlparse
23
23
 
24
- from airflow.configuration import conf
25
24
  from airflow.providers.amazon.aws.hooks.athena import AthenaHook
26
25
  from airflow.providers.amazon.aws.links.athena import AthenaQueryResultsLink
27
26
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
28
27
  from airflow.providers.amazon.aws.triggers.athena import AthenaTrigger
29
28
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
30
29
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
31
- from airflow.providers.common.compat.sdk import AirflowException
30
+ from airflow.providers.common.compat.sdk import AirflowException, conf
32
31
 
33
32
  if TYPE_CHECKING:
34
33
  from airflow.providers.common.compat.openlineage.facet import BaseFacet, Dataset, DatasetFacet
@@ -30,7 +30,6 @@ from collections.abc import Sequence
30
30
  from datetime import timedelta
31
31
  from typing import TYPE_CHECKING, Any
32
32
 
33
- from airflow.configuration import conf
34
33
  from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook
35
34
  from airflow.providers.amazon.aws.links.batch import (
36
35
  BatchJobDefinitionLink,
@@ -46,7 +45,7 @@ from airflow.providers.amazon.aws.triggers.batch import (
46
45
  from airflow.providers.amazon.aws.utils import trim_none_values, validate_execute_complete_event
47
46
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
48
47
  from airflow.providers.amazon.aws.utils.task_log_fetcher import AwsTaskLogFetcher
49
- from airflow.providers.common.compat.sdk import AirflowException
48
+ from airflow.providers.common.compat.sdk import AirflowException, conf
50
49
 
51
50
  if TYPE_CHECKING:
52
51
  from airflow.sdk import Context
@@ -23,7 +23,6 @@ from typing import TYPE_CHECKING, Any
23
23
 
24
24
  from botocore.exceptions import ClientError
25
25
 
26
- from airflow.configuration import conf
27
26
  from airflow.providers.amazon.aws.hooks.bedrock import (
28
27
  BedrockAgentHook,
29
28
  BedrockAgentRuntimeHook,
@@ -40,7 +39,7 @@ from airflow.providers.amazon.aws.triggers.bedrock import (
40
39
  )
41
40
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
42
41
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
43
- from airflow.providers.common.compat.sdk import AirflowException, timezone
42
+ from airflow.providers.common.compat.sdk import AirflowException, conf, timezone
44
43
  from airflow.utils.helpers import prune_dict
45
44
 
46
45
  if TYPE_CHECKING:
@@ -20,7 +20,6 @@ from collections.abc import Sequence
20
20
  from functools import cached_property
21
21
  from typing import TYPE_CHECKING, Any, ClassVar
22
22
 
23
- from airflow.configuration import conf
24
23
  from airflow.providers.amazon.aws.hooks.comprehend import ComprehendHook
25
24
  from airflow.providers.amazon.aws.links.comprehend import (
26
25
  ComprehendDocumentClassifierLink,
@@ -33,7 +32,7 @@ from airflow.providers.amazon.aws.triggers.comprehend import (
33
32
  )
34
33
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
35
34
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
36
- from airflow.providers.common.compat.sdk import AirflowException, timezone
35
+ from airflow.providers.common.compat.sdk import AirflowException, conf, timezone
37
36
 
38
37
  if TYPE_CHECKING:
39
38
  import boto3
@@ -21,7 +21,6 @@ from collections.abc import Sequence
21
21
  from datetime import datetime
22
22
  from typing import Any, ClassVar
23
23
 
24
- from airflow.configuration import conf
25
24
  from airflow.providers.amazon.aws.hooks.dms import DmsHook
26
25
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
27
26
  from airflow.providers.amazon.aws.triggers.dms import (
@@ -32,7 +31,7 @@ from airflow.providers.amazon.aws.triggers.dms import (
32
31
  DmsReplicationTerminalStatusTrigger,
33
32
  )
34
33
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
35
- from airflow.providers.common.compat.sdk import AirflowException, Context
34
+ from airflow.providers.common.compat.sdk import AirflowException, Context, conf
36
35
 
37
36
 
38
37
  class DmsCreateTaskOperator(AwsBaseOperator[DmsHook]):
@@ -218,31 +218,46 @@ class EC2CreateInstanceOperator(AwsBaseOperator[EC2Hook]):
218
218
  MaxCount=self.max_count,
219
219
  **self.config,
220
220
  )["Instances"]
221
-
222
- instance_ids = self._on_kill_instance_ids = [instance["InstanceId"] for instance in instances]
223
- # Console link is for EC2 dashboard list, not individual instances when more than 1 instance
224
-
225
- EC2InstanceDashboardLink.persist(
226
- context=context,
227
- operator=self,
228
- region_name=self.hook.conn_region_name,
229
- aws_partition=self.hook.conn_partition,
230
- instance_ids=EC2InstanceDashboardLink.format_instance_id_filter(instance_ids),
231
- )
232
- for instance_id in instance_ids:
233
- self.log.info("Created EC2 instance %s", instance_id)
234
-
235
- if self.wait_for_completion:
236
- self.hook.get_waiter("instance_running").wait(
237
- InstanceIds=[instance_id],
238
- WaiterConfig={
239
- "Delay": self.poll_interval,
240
- "MaxAttempts": self.max_attempts,
241
- },
221
+ try:
222
+ instance_ids = self._on_kill_instance_ids = [instance["InstanceId"] for instance in instances]
223
+ # Console link is for EC2 dashboard list, not individual instances when more than 1 instance
224
+
225
+ EC2InstanceDashboardLink.persist(
226
+ context=context,
227
+ operator=self,
228
+ region_name=self.hook.conn_region_name,
229
+ aws_partition=self.hook.conn_partition,
230
+ instance_ids=EC2InstanceDashboardLink.format_instance_id_filter(instance_ids),
231
+ )
232
+ for instance_id in instance_ids:
233
+ self.log.info("Created EC2 instance %s", instance_id)
234
+
235
+ if self.wait_for_completion:
236
+ self.hook.get_waiter("instance_running").wait(
237
+ InstanceIds=[instance_id],
238
+ WaiterConfig={
239
+ "Delay": self.poll_interval,
240
+ "MaxAttempts": self.max_attempts,
241
+ },
242
+ )
243
+
244
+ # leave "_on_kill_instance_ids" in place for finishing post-processing
245
+ return instance_ids
246
+
247
+ # Best-effort cleanup when post-creation steps fail (e.g. IAM/permission errors).
248
+ except Exception:
249
+ self.log.exception(
250
+ "Exception after EC2 instance creation; attempting cleanup for instances %s",
251
+ instance_ids,
252
+ )
253
+ try:
254
+ self.hook.terminate_instances(instance_ids=instance_ids)
255
+ except Exception:
256
+ self.log.exception(
257
+ "Failed to cleanup EC2 instances %s after task failure",
258
+ instance_ids,
242
259
  )
243
-
244
- # leave "_on_kill_instance_ids" in place for finishing post-processing
245
- return instance_ids
260
+ raise
246
261
 
247
262
  def on_kill(self) -> None:
248
263
  instance_ids = getattr(self, "_on_kill_instance_ids", [])
@@ -24,7 +24,6 @@ from functools import cached_property
24
24
  from time import sleep
25
25
  from typing import TYPE_CHECKING, Any
26
26
 
27
- from airflow.configuration import conf
28
27
  from airflow.providers.amazon.aws.exceptions import EcsOperatorError, EcsTaskFailToStart
29
28
  from airflow.providers.amazon.aws.hooks.ecs import EcsClusterStates, EcsHook
30
29
  from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
@@ -38,7 +37,7 @@ from airflow.providers.amazon.aws.utils import validate_execute_complete_event
38
37
  from airflow.providers.amazon.aws.utils.identifiers import generate_uuid
39
38
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
40
39
  from airflow.providers.amazon.aws.utils.task_log_fetcher import AwsTaskLogFetcher
41
- from airflow.providers.common.compat.sdk import AirflowException
40
+ from airflow.providers.common.compat.sdk import AirflowException, conf
42
41
  from airflow.utils.helpers import prune_dict
43
42
 
44
43
  if TYPE_CHECKING:
@@ -27,7 +27,6 @@ from typing import TYPE_CHECKING, Any, cast
27
27
 
28
28
  from botocore.exceptions import ClientError, WaiterError
29
29
 
30
- from airflow.configuration import conf
31
30
  from airflow.exceptions import AirflowProviderDeprecationWarning
32
31
  from airflow.providers.amazon.aws.hooks.eks import EksHook
33
32
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
@@ -43,7 +42,7 @@ from airflow.providers.amazon.aws.utils import validate_execute_complete_event
43
42
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
44
43
  from airflow.providers.amazon.aws.utils.waiter_with_logging import wait
45
44
  from airflow.providers.cncf.kubernetes.utils.pod_manager import OnFinishAction
46
- from airflow.providers.common.compat.sdk import AirflowException
45
+ from airflow.providers.common.compat.sdk import AirflowException, conf
47
46
 
48
47
  try:
49
48
  from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator
@@ -24,7 +24,6 @@ from datetime import timedelta
24
24
  from typing import TYPE_CHECKING, Any
25
25
  from uuid import uuid4
26
26
 
27
- from airflow.configuration import conf
28
27
  from airflow.exceptions import AirflowProviderDeprecationWarning
29
28
  from airflow.providers.amazon.aws.hooks.emr import EmrContainerHook, EmrHook, EmrServerlessHook
30
29
  from airflow.providers.amazon.aws.links.emr import (
@@ -58,7 +57,7 @@ from airflow.providers.amazon.aws.utils.waiter import (
58
57
  )
59
58
  from airflow.providers.amazon.aws.utils.waiter_with_logging import wait
60
59
  from airflow.providers.amazon.version_compat import NOTSET, ArgNotSet
61
- from airflow.providers.common.compat.sdk import AirflowException
60
+ from airflow.providers.common.compat.sdk import AirflowException, conf
62
61
  from airflow.utils.helpers import exactly_one, prune_dict
63
62
 
64
63
  if TYPE_CHECKING:
@@ -1275,16 +1274,32 @@ class EmrServerlessStartJobOperator(AwsBaseOperator[EmrServerlessHook]):
1275
1274
  timeout=timedelta(seconds=self.waiter_max_attempts * self.waiter_delay),
1276
1275
  )
1277
1276
  else:
1278
- waiter = self.hook.get_waiter("serverless_job_completed")
1279
- wait(
1280
- waiter=waiter,
1281
- waiter_max_attempts=self.waiter_max_attempts,
1282
- waiter_delay=self.waiter_delay,
1283
- args={"applicationId": self.application_id, "jobRunId": self.job_id},
1284
- failure_message="Serverless Job failed",
1285
- status_message="Serverless Job status is",
1286
- status_args=["jobRun.state", "jobRun.stateDetails"],
1287
- )
1277
+ try:
1278
+ waiter = self.hook.get_waiter("serverless_job_completed")
1279
+ wait(
1280
+ waiter=waiter,
1281
+ waiter_max_attempts=self.waiter_max_attempts,
1282
+ waiter_delay=self.waiter_delay,
1283
+ args={"applicationId": self.application_id, "jobRunId": self.job_id},
1284
+ failure_message="Serverless Job failed",
1285
+ status_message="Serverless Job status is",
1286
+ status_args=["jobRun.state", "jobRun.stateDetails"],
1287
+ )
1288
+ except AirflowException as e:
1289
+ if "Waiter error: max attempts reached" in str(e):
1290
+ self.log.info(
1291
+ "Cancelling EMR Serverless job %s due to max waiter attempts reached", self.job_id
1292
+ )
1293
+ try:
1294
+ self.hook.conn.cancel_job_run(
1295
+ applicationId=self.application_id, jobRunId=self.job_id
1296
+ )
1297
+ except Exception:
1298
+ self.log.exception(
1299
+ "Failed to cancel EMR Serverless job %s after waiter timeout",
1300
+ self.job_id,
1301
+ )
1302
+ raise
1288
1303
 
1289
1304
  return self.job_id
1290
1305
 
@@ -1293,7 +1308,13 @@ class EmrServerlessStartJobOperator(AwsBaseOperator[EmrServerlessHook]):
1293
1308
 
1294
1309
  if validated_event["status"] == "success":
1295
1310
  self.log.info("Serverless job completed")
1296
- return validated_event["job_id"]
1311
+ return validated_event["job_details"]["job_id"]
1312
+ self.log.info("Cancelling EMR Serverless job %s", self.job_id)
1313
+ self.hook.conn.cancel_job_run(
1314
+ applicationId=validated_event["job_details"]["application_id"],
1315
+ jobRunId=validated_event["job_details"]["job_id"],
1316
+ )
1317
+ raise AirflowException("EMR Serverless job failed or timed out in deferrable mode")
1297
1318
 
1298
1319
  def on_kill(self) -> None:
1299
1320
  """
@@ -24,7 +24,6 @@ from typing import TYPE_CHECKING, Any
24
24
 
25
25
  from botocore.exceptions import ClientError
26
26
 
27
- from airflow.configuration import conf
28
27
  from airflow.providers.amazon.aws.hooks.glue import GlueDataQualityHook, GlueJobHook
29
28
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
30
29
  from airflow.providers.amazon.aws.links.glue import GlueJobRunDetailsLink
@@ -36,7 +35,7 @@ from airflow.providers.amazon.aws.triggers.glue import (
36
35
  )
37
36
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
38
37
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
39
- from airflow.providers.common.compat.sdk import AirflowException
38
+ from airflow.providers.common.compat.sdk import AirflowException, conf
40
39
 
41
40
  if TYPE_CHECKING:
42
41
  from airflow.sdk import Context
@@ -20,13 +20,12 @@ from __future__ import annotations
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
- from airflow.configuration import conf
24
23
  from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook
25
24
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
26
25
  from airflow.providers.amazon.aws.triggers.glue_crawler import GlueCrawlerCompleteTrigger
27
26
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
28
27
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
29
- from airflow.providers.common.compat.sdk import AirflowException
28
+ from airflow.providers.common.compat.sdk import AirflowException, conf
30
29
 
31
30
  if TYPE_CHECKING:
32
31
  from airflow.sdk import Context
@@ -20,13 +20,12 @@ from __future__ import annotations
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
- from airflow.configuration import conf
24
23
  from airflow.providers.amazon.aws.hooks.glue_databrew import GlueDataBrewHook
25
24
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
26
25
  from airflow.providers.amazon.aws.triggers.glue_databrew import GlueDataBrewJobCompleteTrigger
27
26
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
28
27
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
29
- from airflow.providers.common.compat.sdk import AirflowException
28
+ from airflow.providers.common.compat.sdk import AirflowException, conf
30
29
 
31
30
  if TYPE_CHECKING:
32
31
  from airflow.sdk import Context
@@ -21,7 +21,6 @@ from typing import TYPE_CHECKING, Any, ClassVar
21
21
 
22
22
  from botocore.exceptions import ClientError
23
23
 
24
- from airflow.configuration import conf
25
24
  from airflow.providers.amazon.aws.hooks.kinesis_analytics import KinesisAnalyticsV2Hook
26
25
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
27
26
  from airflow.providers.amazon.aws.triggers.kinesis_analytics import (
@@ -29,7 +28,7 @@ from airflow.providers.amazon.aws.triggers.kinesis_analytics import (
29
28
  )
30
29
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
31
30
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
32
- from airflow.providers.common.compat.sdk import AirflowException
31
+ from airflow.providers.common.compat.sdk import AirflowException, conf
33
32
 
34
33
  if TYPE_CHECKING:
35
34
  from airflow.sdk import Context
@@ -22,13 +22,12 @@ from collections.abc import Sequence
22
22
  from datetime import timedelta
23
23
  from typing import TYPE_CHECKING, Any
24
24
 
25
- from airflow.configuration import conf
26
25
  from airflow.providers.amazon.aws.hooks.lambda_function import LambdaHook
27
26
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
28
27
  from airflow.providers.amazon.aws.triggers.lambda_function import LambdaCreateFunctionCompleteTrigger
29
28
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
30
29
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
31
- from airflow.providers.common.compat.sdk import AirflowException
30
+ from airflow.providers.common.compat.sdk import AirflowException, conf
32
31
 
33
32
  if TYPE_CHECKING:
34
33
  from airflow.sdk import Context
@@ -21,13 +21,12 @@ from __future__ import annotations
21
21
  from collections.abc import Sequence
22
22
  from typing import TYPE_CHECKING, Any, Literal
23
23
 
24
- from airflow.configuration import conf
25
24
  from airflow.providers.amazon.aws.hooks.mwaa import MwaaHook
26
25
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
27
26
  from airflow.providers.amazon.aws.triggers.mwaa import MwaaDagRunCompletedTrigger
28
27
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
29
28
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
30
- from airflow.providers.common.compat.sdk import AirflowException
29
+ from airflow.providers.common.compat.sdk import AirflowException, conf
31
30
 
32
31
  if TYPE_CHECKING:
33
32
  from airflow.sdk import Context
@@ -22,7 +22,6 @@ from typing import TYPE_CHECKING, Any
22
22
 
23
23
  from botocore.exceptions import ClientError
24
24
 
25
- from airflow.configuration import conf
26
25
  from airflow.providers.amazon.aws.hooks.neptune import NeptuneHook
27
26
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
28
27
  from airflow.providers.amazon.aws.triggers.neptune import (
@@ -31,7 +30,7 @@ from airflow.providers.amazon.aws.triggers.neptune import (
31
30
  NeptuneClusterStoppedTrigger,
32
31
  )
33
32
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
34
- from airflow.providers.common.compat.sdk import AirflowException
33
+ from airflow.providers.common.compat.sdk import AirflowException, conf
35
34
 
36
35
  if TYPE_CHECKING:
37
36
  from airflow.sdk import Context
@@ -22,7 +22,6 @@ from collections.abc import Sequence
22
22
  from datetime import timedelta
23
23
  from typing import TYPE_CHECKING, Any
24
24
 
25
- from airflow.configuration import conf
26
25
  from airflow.providers.amazon.aws.hooks.rds import RdsHook
27
26
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
28
27
  from airflow.providers.amazon.aws.triggers.rds import (
@@ -35,7 +34,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
35
34
  from airflow.providers.amazon.aws.utils.rds import RdsDbType
36
35
  from airflow.providers.amazon.aws.utils.tags import format_tags
37
36
  from airflow.providers.amazon.aws.utils.waiter_with_logging import wait
38
- from airflow.providers.common.compat.sdk import AirflowException
37
+ from airflow.providers.common.compat.sdk import AirflowException, conf
39
38
  from airflow.utils.helpers import prune_dict
40
39
 
41
40
  if TYPE_CHECKING:
@@ -21,7 +21,6 @@ from collections.abc import Sequence
21
21
  from datetime import timedelta
22
22
  from typing import TYPE_CHECKING, Any
23
23
 
24
- from airflow.configuration import conf
25
24
  from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftHook
26
25
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
27
26
  from airflow.providers.amazon.aws.triggers.redshift_cluster import (
@@ -33,7 +32,7 @@ from airflow.providers.amazon.aws.triggers.redshift_cluster import (
33
32
  )
34
33
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
35
34
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
36
- from airflow.providers.common.compat.sdk import AirflowException
35
+ from airflow.providers.common.compat.sdk import AirflowException, conf
37
36
  from airflow.utils.helpers import prune_dict
38
37
 
39
38
  if TYPE_CHECKING:
@@ -19,13 +19,12 @@ from __future__ import annotations
19
19
 
20
20
  from typing import TYPE_CHECKING, Any
21
21
 
22
- from airflow.configuration import conf
23
22
  from airflow.providers.amazon.aws.hooks.redshift_data import RedshiftDataHook
24
23
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
25
24
  from airflow.providers.amazon.aws.triggers.redshift_data import RedshiftDataTrigger
26
25
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
27
26
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
28
- from airflow.providers.common.compat.sdk import AirflowException
27
+ from airflow.providers.common.compat.sdk import AirflowException, conf
29
28
 
30
29
  if TYPE_CHECKING:
31
30
  from mypy_boto3_redshift_data.type_defs import (
@@ -25,7 +25,6 @@ from typing import TYPE_CHECKING, Any, ClassVar
25
25
 
26
26
  from botocore.exceptions import ClientError
27
27
 
28
- from airflow.configuration import conf
29
28
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
30
29
  from airflow.providers.amazon.aws.hooks.sagemaker import (
31
30
  LogState,
@@ -42,7 +41,7 @@ from airflow.providers.amazon.aws.utils import trim_none_values, validate_execut
42
41
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
43
42
  from airflow.providers.amazon.aws.utils.sagemaker import ApprovalStatus
44
43
  from airflow.providers.amazon.aws.utils.tags import format_tags
45
- from airflow.providers.common.compat.sdk import AirflowException
44
+ from airflow.providers.common.compat.sdk import AirflowException, conf
46
45
  from airflow.utils.helpers import prune_dict
47
46
 
48
47
  if TYPE_CHECKING:
@@ -22,7 +22,6 @@ from __future__ import annotations
22
22
  from functools import cached_property
23
23
  from typing import TYPE_CHECKING
24
24
 
25
- from airflow.configuration import conf
26
25
  from airflow.providers.amazon.aws.hooks.sagemaker_unified_studio import (
27
26
  SageMakerNotebookHook,
28
27
  )
@@ -32,7 +31,7 @@ from airflow.providers.amazon.aws.links.sagemaker_unified_studio import (
32
31
  from airflow.providers.amazon.aws.triggers.sagemaker_unified_studio import (
33
32
  SageMakerNotebookJobTrigger,
34
33
  )
35
- from airflow.providers.common.compat.sdk import AirflowException, BaseOperator
34
+ from airflow.providers.common.compat.sdk import AirflowException, BaseOperator, conf
36
35
 
37
36
  if TYPE_CHECKING:
38
37
  from airflow.sdk import Context
@@ -64,9 +63,8 @@ class SageMakerNotebookOperator(BaseOperator):
64
63
  :param output_config: Configuration for the output format. It should include an output_format parameter to control
65
64
  the format of the notebook execution output.
66
65
  Example: {"output_formats": ["NOTEBOOK"]}
67
- :param compute: compute configuration to use for the artifact execution. This is a required attribute
68
- if the execution is on a remote compute.
69
- Example: { "InstanceType": "ml.m5.large", "VolumeSizeInGB": 30, "VolumeKmsKeyId": "", "ImageUri": "string", "ContainerEntrypoint": [ "string" ]}
66
+ :param compute: compute configuration to use for the notebook execution. This is a required attribute if the execution is on a remote compute.
67
+ Example: {"instance_type": "ml.m5.large", "volume_size_in_gb": 30, "volume_kms_key_id": "", "image_details": {"ecr_uri": "string"}, "container_entrypoint": ["string"]}
70
68
  :param termination_condition: conditions to match to terminate the remote execution.
71
69
  Example: { "MaxRuntimeInSeconds": 3600 }
72
70
  :param tags: tags to be associated with the remote execution runs.