apache-airflow-providers-amazon 9.19.0rc1__py3-none-any.whl → 9.21.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. airflow/providers/amazon/__init__.py +1 -1
  2. airflow/providers/amazon/aws/auth_manager/avp/facade.py +1 -2
  3. airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +6 -25
  4. airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py +1 -2
  5. airflow/providers/amazon/aws/auth_manager/routes/login.py +3 -4
  6. airflow/providers/amazon/aws/auth_manager/user.py +1 -1
  7. airflow/providers/amazon/aws/cli/__init__.py +16 -0
  8. airflow/providers/amazon/aws/{auth_manager/cli → cli}/definition.py +33 -0
  9. airflow/providers/amazon/aws/executors/aws_lambda/lambda_executor.py +2 -9
  10. airflow/providers/amazon/aws/executors/aws_lambda/utils.py +1 -1
  11. airflow/providers/amazon/aws/executors/batch/batch_executor.py +28 -32
  12. airflow/providers/amazon/aws/executors/batch/batch_executor_config.py +5 -6
  13. airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +4 -17
  14. airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py +5 -1
  15. airflow/providers/amazon/aws/hooks/athena_sql.py +12 -2
  16. airflow/providers/amazon/aws/hooks/base_aws.py +1 -2
  17. airflow/providers/amazon/aws/hooks/redshift_sql.py +17 -3
  18. airflow/providers/amazon/aws/hooks/s3.py +16 -13
  19. airflow/providers/amazon/aws/hooks/sagemaker.py +1 -6
  20. airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py +4 -5
  21. airflow/providers/amazon/aws/hooks/ssm.py +22 -0
  22. airflow/providers/amazon/aws/links/base_aws.py +1 -1
  23. airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +1 -1
  24. airflow/providers/amazon/aws/log/s3_task_handler.py +1 -1
  25. airflow/providers/amazon/aws/operators/athena.py +1 -2
  26. airflow/providers/amazon/aws/operators/batch.py +1 -2
  27. airflow/providers/amazon/aws/operators/bedrock.py +1 -2
  28. airflow/providers/amazon/aws/operators/comprehend.py +1 -2
  29. airflow/providers/amazon/aws/operators/dms.py +1 -2
  30. airflow/providers/amazon/aws/operators/ec2.py +39 -24
  31. airflow/providers/amazon/aws/operators/ecs.py +1 -2
  32. airflow/providers/amazon/aws/operators/eks.py +1 -2
  33. airflow/providers/amazon/aws/operators/emr.py +34 -13
  34. airflow/providers/amazon/aws/operators/glue.py +1 -2
  35. airflow/providers/amazon/aws/operators/glue_crawler.py +1 -2
  36. airflow/providers/amazon/aws/operators/glue_databrew.py +1 -2
  37. airflow/providers/amazon/aws/operators/kinesis_analytics.py +1 -2
  38. airflow/providers/amazon/aws/operators/lambda_function.py +1 -2
  39. airflow/providers/amazon/aws/operators/mwaa.py +1 -2
  40. airflow/providers/amazon/aws/operators/neptune.py +1 -2
  41. airflow/providers/amazon/aws/operators/rds.py +1 -2
  42. airflow/providers/amazon/aws/operators/redshift_cluster.py +1 -2
  43. airflow/providers/amazon/aws/operators/redshift_data.py +1 -2
  44. airflow/providers/amazon/aws/operators/sagemaker.py +1 -2
  45. airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py +3 -5
  46. airflow/providers/amazon/aws/operators/ssm.py +54 -17
  47. airflow/providers/amazon/aws/operators/step_function.py +1 -2
  48. airflow/providers/amazon/aws/queues/sqs.py +1 -1
  49. airflow/providers/amazon/aws/secrets/secrets_manager.py +2 -1
  50. airflow/providers/amazon/aws/secrets/systems_manager.py +2 -1
  51. airflow/providers/amazon/aws/sensors/batch.py +1 -2
  52. airflow/providers/amazon/aws/sensors/bedrock.py +1 -2
  53. airflow/providers/amazon/aws/sensors/comprehend.py +1 -2
  54. airflow/providers/amazon/aws/sensors/ec2.py +1 -2
  55. airflow/providers/amazon/aws/sensors/emr.py +1 -2
  56. airflow/providers/amazon/aws/sensors/glue.py +1 -2
  57. airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +1 -2
  58. airflow/providers/amazon/aws/sensors/kinesis_analytics.py +1 -2
  59. airflow/providers/amazon/aws/sensors/mwaa.py +1 -2
  60. airflow/providers/amazon/aws/sensors/opensearch_serverless.py +1 -2
  61. airflow/providers/amazon/aws/sensors/redshift_cluster.py +1 -2
  62. airflow/providers/amazon/aws/sensors/s3.py +1 -2
  63. airflow/providers/amazon/aws/sensors/sqs.py +1 -2
  64. airflow/providers/amazon/aws/sensors/ssm.py +22 -2
  65. airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py +1 -1
  66. airflow/providers/amazon/aws/transfers/sql_to_s3.py +1 -1
  67. airflow/providers/amazon/aws/triggers/emr.py +2 -2
  68. airflow/providers/amazon/aws/triggers/ssm.py +68 -11
  69. airflow/providers/amazon/get_provider_info.py +2 -1
  70. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/METADATA +17 -14
  71. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/RECORD +75 -74
  72. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/licenses/NOTICE +1 -1
  73. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/WHEEL +0 -0
  74. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/entry_points.txt +0 -0
  75. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/licenses/LICENSE +0 -0
@@ -19,12 +19,14 @@ from __future__ import annotations
19
19
  from collections.abc import Sequence
20
20
  from typing import TYPE_CHECKING, Any
21
21
 
22
- from airflow.configuration import conf
22
+ from botocore.exceptions import WaiterError
23
+
23
24
  from airflow.providers.amazon.aws.hooks.ssm import SsmHook
24
25
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
25
26
  from airflow.providers.amazon.aws.triggers.ssm import SsmRunCommandTrigger
26
27
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
27
28
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
29
+ from airflow.providers.common.compat.sdk import conf
28
30
 
29
31
  if TYPE_CHECKING:
30
32
  from airflow.sdk import Context
@@ -50,6 +52,12 @@ class SsmRunCommandOperator(AwsBaseOperator[SsmHook]):
50
52
  (default: 120)
51
53
  :param waiter_max_attempts: Maximum number of attempts to check for job
52
54
  completion. (default: 75)
55
+ :param fail_on_nonzero_exit: If True (default), the operator will fail when
56
+ the command returns a non-zero exit code. If False, the operator will
57
+ complete successfully regardless of the command exit code, allowing
58
+ downstream tasks to handle exit codes for workflow routing. Note that
59
+ AWS-level failures (Cancelled, TimedOut) will still raise exceptions
60
+ even when this is False. (default: True)
53
61
  :param deferrable: If True, the operator will wait asynchronously for the
54
62
  cluster to stop. This implies waiting for completion. This mode
55
63
  requires aiobotocore module to be installed. (default: False)
@@ -81,6 +89,7 @@ class SsmRunCommandOperator(AwsBaseOperator[SsmHook]):
81
89
  wait_for_completion: bool = True,
82
90
  waiter_delay: int = 120,
83
91
  waiter_max_attempts: int = 75,
92
+ fail_on_nonzero_exit: bool = True,
84
93
  deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
85
94
  **kwargs,
86
95
  ):
@@ -88,6 +97,7 @@ class SsmRunCommandOperator(AwsBaseOperator[SsmHook]):
88
97
  self.wait_for_completion = wait_for_completion
89
98
  self.waiter_delay = waiter_delay
90
99
  self.waiter_max_attempts = waiter_max_attempts
100
+ self.fail_on_nonzero_exit = fail_on_nonzero_exit
91
101
  self.deferrable = deferrable
92
102
 
93
103
  self.document_name = document_name
@@ -96,6 +106,19 @@ class SsmRunCommandOperator(AwsBaseOperator[SsmHook]):
96
106
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str:
97
107
  event = validate_execute_complete_event(event)
98
108
 
109
+ if event["status"] == "failed":
110
+ # Command failed - raise an exception with detailed information
111
+ command_status = event.get("command_status", "Unknown")
112
+ exit_code = event.get("exit_code", -1)
113
+ instance_id = event.get("instance_id", "Unknown")
114
+ message = event.get("message", "Command failed")
115
+
116
+ error_msg = (
117
+ f"SSM run command {event['command_id']} failed on instance {instance_id}. "
118
+ f"Status: {command_status}, Exit code: {exit_code}. {message}"
119
+ )
120
+ raise RuntimeError(error_msg)
121
+
99
122
  if event["status"] != "success":
100
123
  raise RuntimeError(f"Error while running run command: {event}")
101
124
 
@@ -118,6 +141,7 @@ class SsmRunCommandOperator(AwsBaseOperator[SsmHook]):
118
141
  command_id=command_id,
119
142
  waiter_delay=self.waiter_delay,
120
143
  waiter_max_attempts=self.waiter_max_attempts,
144
+ fail_on_nonzero_exit=self.fail_on_nonzero_exit,
121
145
  aws_conn_id=self.aws_conn_id,
122
146
  region_name=self.region_name,
123
147
  verify=self.verify,
@@ -132,14 +156,35 @@ class SsmRunCommandOperator(AwsBaseOperator[SsmHook]):
132
156
 
133
157
  instance_ids = response["Command"]["InstanceIds"]
134
158
  for instance_id in instance_ids:
135
- waiter.wait(
136
- CommandId=command_id,
137
- InstanceId=instance_id,
138
- WaiterConfig={
139
- "Delay": self.waiter_delay,
140
- "MaxAttempts": self.waiter_max_attempts,
141
- },
142
- )
159
+ try:
160
+ waiter.wait(
161
+ CommandId=command_id,
162
+ InstanceId=instance_id,
163
+ WaiterConfig={
164
+ "Delay": self.waiter_delay,
165
+ "MaxAttempts": self.waiter_max_attempts,
166
+ },
167
+ )
168
+ except WaiterError:
169
+ if not self.fail_on_nonzero_exit:
170
+ # Enhanced mode: distinguish between AWS-level and command-level failures
171
+ invocation = self.hook.get_command_invocation(command_id, instance_id)
172
+ status = invocation.get("Status", "")
173
+
174
+ # AWS-level failures should always raise
175
+ if SsmHook.is_aws_level_failure(status):
176
+ raise
177
+
178
+ # Command-level failure - tolerate it in enhanced mode
179
+ self.log.info(
180
+ "Command completed with status %s (exit code: %s). "
181
+ "Continuing due to fail_on_nonzero_exit=False",
182
+ status,
183
+ invocation.get("ResponseCode", "unknown"),
184
+ )
185
+ else:
186
+ # Traditional mode: all failures raise
187
+ raise
143
188
 
144
189
  return command_id
145
190
 
@@ -148,14 +193,6 @@ class SsmGetCommandInvocationOperator(AwsBaseOperator[SsmHook]):
148
193
  """
149
194
  Retrieves the output and execution details of an SSM command invocation.
150
195
 
151
- This operator allows you to fetch the standard output, standard error,
152
- execution status, and other details from SSM commands. It can be used to
153
- retrieve output from commands executed by SsmRunCommandOperator in previous
154
- tasks, or from commands executed outside of Airflow entirely.
155
-
156
- The operator returns structured data including stdout, stderr, execution
157
- times, and status information for each instance that executed the command.
158
-
159
196
  .. seealso::
160
197
  For more information on how to use this operator, take a look at the
161
198
  guide:
@@ -21,7 +21,6 @@ from collections.abc import Sequence
21
21
  from datetime import timedelta
22
22
  from typing import TYPE_CHECKING, Any
23
23
 
24
- from airflow.configuration import conf
25
24
  from airflow.providers.amazon.aws.hooks.step_function import StepFunctionHook
26
25
  from airflow.providers.amazon.aws.links.step_function import (
27
26
  StateMachineDetailsLink,
@@ -31,7 +30,7 @@ from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
31
30
  from airflow.providers.amazon.aws.triggers.step_function import StepFunctionsExecutionCompleteTrigger
32
31
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
33
32
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
34
- from airflow.providers.common.compat.sdk import AirflowException
33
+ from airflow.providers.common.compat.sdk import AirflowException, conf
35
34
 
36
35
  if TYPE_CHECKING:
37
36
  from airflow.sdk import Context
@@ -19,8 +19,8 @@ from __future__ import annotations
19
19
  import re
20
20
  from typing import TYPE_CHECKING
21
21
 
22
- from airflow.exceptions import AirflowOptionalProviderFeatureException
23
22
  from airflow.providers.amazon.aws.triggers.sqs import SqsSensorTrigger
23
+ from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException
24
24
 
25
25
  try:
26
26
  from airflow.providers.common.messaging.providers.base_provider import BaseMessageQueueProvider
@@ -197,11 +197,12 @@ class SecretsManagerBackend(BaseSecretsBackend, LoggingMixin):
197
197
 
198
198
  return conn_d
199
199
 
200
- def get_conn_value(self, conn_id: str) -> str | None:
200
+ def get_conn_value(self, conn_id: str, team_name: str | None = None) -> str | None:
201
201
  """
202
202
  Get serialized representation of Connection.
203
203
 
204
204
  :param conn_id: connection id
205
+ :param team_name: Team name associated to the task trying to access the connection (if any)
205
206
  """
206
207
  if self.connections_prefix is None:
207
208
  return None
@@ -132,11 +132,12 @@ class SystemsManagerParameterStoreBackend(BaseSecretsBackend, LoggingMixin):
132
132
  session = SessionFactory(conn=conn_config).create_session()
133
133
  return session.client(service_name="ssm", **client_kwargs)
134
134
 
135
- def get_conn_value(self, conn_id: str) -> str | None:
135
+ def get_conn_value(self, conn_id: str, team_name: str | None = None) -> str | None:
136
136
  """
137
137
  Get param value.
138
138
 
139
139
  :param conn_id: connection id
140
+ :param team_name: Team name associated to the task trying to access the connection (if any)
140
141
  """
141
142
  if self.connections_prefix is None:
142
143
  return None
@@ -20,12 +20,11 @@ from collections.abc import Sequence
20
20
  from datetime import timedelta
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
- from airflow.configuration import conf
24
23
  from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook
25
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
26
25
  from airflow.providers.amazon.aws.triggers.batch import BatchJobTrigger
27
26
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
28
- from airflow.providers.common.compat.sdk import AirflowException
27
+ from airflow.providers.common.compat.sdk import AirflowException, conf
29
28
 
30
29
  if TYPE_CHECKING:
31
30
  from airflow.sdk import Context
@@ -21,7 +21,6 @@ import abc
21
21
  from collections.abc import Sequence
22
22
  from typing import TYPE_CHECKING, Any, TypeVar
23
23
 
24
- from airflow.configuration import conf
25
24
  from airflow.providers.amazon.aws.hooks.bedrock import BedrockAgentHook, BedrockHook
26
25
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
26
  from airflow.providers.amazon.aws.triggers.bedrock import (
@@ -33,7 +32,7 @@ from airflow.providers.amazon.aws.triggers.bedrock import (
33
32
  BedrockProvisionModelThroughputCompletedTrigger,
34
33
  )
35
34
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
36
- from airflow.providers.common.compat.sdk import AirflowException
35
+ from airflow.providers.common.compat.sdk import AirflowException, conf
37
36
 
38
37
  if TYPE_CHECKING:
39
38
  from airflow.providers.amazon.aws.triggers.bedrock import BedrockBaseBatchInferenceTrigger
@@ -20,7 +20,6 @@ import abc
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
- from airflow.configuration import conf
24
23
  from airflow.providers.amazon.aws.hooks.comprehend import ComprehendHook
25
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
26
25
  from airflow.providers.amazon.aws.triggers.comprehend import (
@@ -28,7 +27,7 @@ from airflow.providers.amazon.aws.triggers.comprehend import (
28
27
  ComprehendPiiEntitiesDetectionJobCompletedTrigger,
29
28
  )
30
29
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
31
- from airflow.providers.common.compat.sdk import AirflowException
30
+ from airflow.providers.common.compat.sdk import AirflowException, conf
32
31
 
33
32
  if TYPE_CHECKING:
34
33
  from airflow.sdk import Context
@@ -20,13 +20,12 @@ from __future__ import annotations
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
- from airflow.configuration import conf
24
23
  from airflow.providers.amazon.aws.hooks.ec2 import EC2Hook
25
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
26
25
  from airflow.providers.amazon.aws.triggers.ec2 import EC2StateSensorTrigger
27
26
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
28
27
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
29
- from airflow.providers.common.compat.sdk import AirflowException
28
+ from airflow.providers.common.compat.sdk import AirflowException, conf
30
29
 
31
30
  if TYPE_CHECKING:
32
31
  from airflow.sdk import Context
@@ -21,7 +21,6 @@ from collections.abc import Iterable, Sequence
21
21
  from datetime import timedelta
22
22
  from typing import TYPE_CHECKING, Any
23
23
 
24
- from airflow.configuration import conf
25
24
  from airflow.providers.amazon.aws.hooks.emr import EmrContainerHook, EmrHook, EmrServerlessHook
26
25
  from airflow.providers.amazon.aws.links.emr import EmrClusterLink, EmrLogsLink, get_log_uri
27
26
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
@@ -32,7 +31,7 @@ from airflow.providers.amazon.aws.triggers.emr import (
32
31
  )
33
32
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
34
33
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
35
- from airflow.providers.common.compat.sdk import AirflowException
34
+ from airflow.providers.common.compat.sdk import AirflowException, conf
36
35
 
37
36
  if TYPE_CHECKING:
38
37
  from airflow.sdk import Context
@@ -20,7 +20,6 @@ from __future__ import annotations
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
- from airflow.configuration import conf
24
23
  from airflow.providers.amazon.aws.hooks.glue import GlueDataQualityHook, GlueJobHook
25
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
26
25
  from airflow.providers.amazon.aws.triggers.glue import (
@@ -30,7 +29,7 @@ from airflow.providers.amazon.aws.triggers.glue import (
30
29
  )
31
30
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
32
31
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
33
- from airflow.providers.common.compat.sdk import AirflowException
32
+ from airflow.providers.common.compat.sdk import AirflowException, conf
34
33
 
35
34
  if TYPE_CHECKING:
36
35
  from airflow.sdk import Context
@@ -21,13 +21,12 @@ from collections.abc import Sequence
21
21
  from datetime import timedelta
22
22
  from typing import TYPE_CHECKING, Any
23
23
 
24
- from airflow.configuration import conf
25
24
  from airflow.providers.amazon.aws.hooks.glue_catalog import GlueCatalogHook
26
25
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
26
  from airflow.providers.amazon.aws.triggers.glue import GlueCatalogPartitionTrigger
28
27
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
29
28
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
30
- from airflow.providers.common.compat.sdk import AirflowException
29
+ from airflow.providers.common.compat.sdk import AirflowException, conf
31
30
 
32
31
  if TYPE_CHECKING:
33
32
  from airflow.sdk import Context
@@ -19,14 +19,13 @@ from __future__ import annotations
19
19
  from collections.abc import Sequence
20
20
  from typing import TYPE_CHECKING, Any
21
21
 
22
- from airflow.configuration import conf
23
22
  from airflow.providers.amazon.aws.hooks.kinesis_analytics import KinesisAnalyticsV2Hook
24
23
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
25
24
  from airflow.providers.amazon.aws.triggers.kinesis_analytics import (
26
25
  KinesisAnalyticsV2ApplicationOperationCompleteTrigger,
27
26
  )
28
27
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
29
- from airflow.providers.common.compat.sdk import AirflowException
28
+ from airflow.providers.common.compat.sdk import AirflowException, conf
30
29
 
31
30
  if TYPE_CHECKING:
32
31
  from airflow.sdk import Context
@@ -20,12 +20,11 @@ from __future__ import annotations
20
20
  from collections.abc import Collection, Sequence
21
21
  from typing import TYPE_CHECKING, Any, Literal
22
22
 
23
- from airflow.configuration import conf
24
23
  from airflow.providers.amazon.aws.hooks.mwaa import MwaaHook
25
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
26
25
  from airflow.providers.amazon.aws.triggers.mwaa import MwaaDagRunCompletedTrigger, MwaaTaskCompletedTrigger
27
26
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
28
- from airflow.providers.common.compat.sdk import AirflowException
27
+ from airflow.providers.common.compat.sdk import AirflowException, conf
29
28
  from airflow.utils.state import DagRunState, TaskInstanceState
30
29
 
31
30
  if TYPE_CHECKING:
@@ -19,14 +19,13 @@ from __future__ import annotations
19
19
  from collections.abc import Sequence
20
20
  from typing import TYPE_CHECKING, Any
21
21
 
22
- from airflow.configuration import conf
23
22
  from airflow.providers.amazon.aws.hooks.opensearch_serverless import OpenSearchServerlessHook
24
23
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
25
24
  from airflow.providers.amazon.aws.triggers.opensearch_serverless import (
26
25
  OpenSearchServerlessCollectionActiveTrigger,
27
26
  )
28
27
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
29
- from airflow.providers.common.compat.sdk import AirflowException
28
+ from airflow.providers.common.compat.sdk import AirflowException, conf
30
29
  from airflow.utils.helpers import exactly_one
31
30
 
32
31
  if TYPE_CHECKING:
@@ -20,13 +20,12 @@ from collections.abc import Sequence
20
20
  from datetime import timedelta
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
- from airflow.configuration import conf
24
23
  from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftHook
25
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
26
25
  from airflow.providers.amazon.aws.triggers.redshift_cluster import RedshiftClusterTrigger
27
26
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
28
27
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
29
- from airflow.providers.common.compat.sdk import AirflowException
28
+ from airflow.providers.common.compat.sdk import AirflowException, conf
30
29
 
31
30
  if TYPE_CHECKING:
32
31
  from airflow.sdk import Context
@@ -25,13 +25,12 @@ from collections.abc import Callable, Sequence
25
25
  from datetime import datetime, timedelta
26
26
  from typing import TYPE_CHECKING, Any, cast
27
27
 
28
- from airflow.configuration import conf
29
28
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
30
29
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
31
30
  from airflow.providers.amazon.aws.triggers.s3 import S3KeysUnchangedTrigger, S3KeyTrigger
32
31
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
33
32
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
34
- from airflow.providers.common.compat.sdk import AirflowException, poke_mode_only
33
+ from airflow.providers.common.compat.sdk import AirflowException, conf, poke_mode_only
35
34
 
36
35
  if TYPE_CHECKING:
37
36
  from airflow.sdk import Context
@@ -23,14 +23,13 @@ from collections.abc import Collection, Sequence
23
23
  from datetime import timedelta
24
24
  from typing import TYPE_CHECKING, Any
25
25
 
26
- from airflow.configuration import conf
27
26
  from airflow.providers.amazon.aws.hooks.sqs import SqsHook
28
27
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
29
28
  from airflow.providers.amazon.aws.triggers.sqs import SqsSensorTrigger
30
29
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
31
30
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
32
31
  from airflow.providers.amazon.aws.utils.sqs import process_response
33
- from airflow.providers.common.compat.sdk import AirflowException
32
+ from airflow.providers.common.compat.sdk import AirflowException, conf
34
33
 
35
34
  if TYPE_CHECKING:
36
35
  from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection
@@ -20,12 +20,12 @@ from __future__ import annotations
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
- from airflow.configuration import conf
24
23
  from airflow.providers.amazon.aws.hooks.ssm import SsmHook
25
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
26
25
  from airflow.providers.amazon.aws.triggers.ssm import SsmRunCommandTrigger
27
26
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
28
27
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
28
+ from airflow.providers.common.compat.sdk import conf
29
29
 
30
30
  if TYPE_CHECKING:
31
31
  from airflow.sdk import Context
@@ -44,6 +44,11 @@ class SsmRunCommandCompletedSensor(AwsBaseSensor[SsmHook]):
44
44
  :ref:`howto/sensor:SsmRunCommandCompletedSensor`
45
45
 
46
46
  :param command_id: The ID of the AWS SSM Run Command.
47
+ :param fail_on_nonzero_exit: If True (default), the sensor will fail when the command
48
+ returns a non-zero exit code. If False, the sensor will complete successfully
49
+ for both Success and Failed command statuses, allowing downstream tasks to handle
50
+ exit codes. AWS-level failures (Cancelled, TimedOut) will still raise exceptions.
51
+ (default: True)
47
52
  :param deferrable: If True, the sensor will operate in deferrable mode.
48
53
  This mode requires aiobotocore module to be installed.
49
54
  (default: False, but can be overridden in config file by setting
@@ -85,6 +90,7 @@ class SsmRunCommandCompletedSensor(AwsBaseSensor[SsmHook]):
85
90
  self,
86
91
  *,
87
92
  command_id,
93
+ fail_on_nonzero_exit: bool = True,
88
94
  deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
89
95
  poke_interval: int = 120,
90
96
  max_retries: int = 75,
@@ -92,6 +98,7 @@ class SsmRunCommandCompletedSensor(AwsBaseSensor[SsmHook]):
92
98
  ):
93
99
  super().__init__(**kwargs)
94
100
  self.command_id = command_id
101
+ self.fail_on_nonzero_exit = fail_on_nonzero_exit
95
102
  self.deferrable = deferrable
96
103
  self.poke_interval = poke_interval
97
104
  self.max_retries = max_retries
@@ -112,7 +119,19 @@ class SsmRunCommandCompletedSensor(AwsBaseSensor[SsmHook]):
112
119
  state = invocation["Status"]
113
120
 
114
121
  if state in self.FAILURE_STATES:
115
- raise RuntimeError(self.FAILURE_MESSAGE)
122
+ # Check if we should tolerate this failure
123
+ if self.fail_on_nonzero_exit:
124
+ raise RuntimeError(self.FAILURE_MESSAGE) # Traditional behavior
125
+
126
+ # Only fail on AWS-level issues, tolerate command failures
127
+ if SsmHook.is_aws_level_failure(state):
128
+ raise RuntimeError(f"SSM command {self.command_id} {state}")
129
+
130
+ # Command failed but we're tolerating it
131
+ self.log.info(
132
+ "Command invocation has status %s. Continuing due to fail_on_nonzero_exit=False",
133
+ state,
134
+ )
116
135
 
117
136
  if state in self.INTERMEDIATE_STATES:
118
137
  return False
@@ -127,6 +146,7 @@ class SsmRunCommandCompletedSensor(AwsBaseSensor[SsmHook]):
127
146
  waiter_delay=int(self.poke_interval),
128
147
  waiter_max_attempts=self.max_retries,
129
148
  aws_conn_id=self.aws_conn_id,
149
+ fail_on_nonzero_exit=self.fail_on_nonzero_exit,
130
150
  ),
131
151
  method_name="execute_complete",
132
152
  )
@@ -28,7 +28,7 @@ from airflow.providers.common.compat.sdk import BaseOperator
28
28
  try:
29
29
  from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
30
30
  except ModuleNotFoundError as e:
31
- from airflow.exceptions import AirflowOptionalProviderFeatureException
31
+ from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException
32
32
 
33
33
  raise AirflowOptionalProviderFeatureException(e)
34
34
 
@@ -183,7 +183,7 @@ class SqlToS3Operator(BaseOperator):
183
183
  import numpy as np
184
184
  import pandas as pd
185
185
  except ImportError as e:
186
- from airflow.exceptions import AirflowOptionalProviderFeatureException
186
+ from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException
187
187
 
188
188
  raise AirflowOptionalProviderFeatureException(e)
189
189
 
@@ -351,8 +351,8 @@ class EmrServerlessStartJobTrigger(AwsBaseWaiterTrigger):
351
351
  failure_message="Serverless Job failed",
352
352
  status_message="Serverless Job status is",
353
353
  status_queries=["jobRun.state", "jobRun.stateDetails"],
354
- return_key="job_id",
355
- return_value=job_id,
354
+ return_key="job_details",
355
+ return_value={"application_id": application_id, "job_id": job_id},
356
356
  waiter_delay=waiter_delay,
357
357
  waiter_max_attempts=waiter_max_attempts,
358
358
  aws_conn_id=aws_conn_id,
@@ -35,6 +35,9 @@ class SsmRunCommandTrigger(AwsBaseWaiterTrigger):
35
35
  :param command_id: The ID of the AWS SSM Run Command.
36
36
  :param waiter_delay: The amount of time in seconds to wait between attempts. (default: 120)
37
37
  :param waiter_max_attempts: The maximum number of attempts to be made. (default: 75)
38
+ :param fail_on_nonzero_exit: If True (default), the trigger will fail when the command returns
39
+ a non-zero exit code. If False, the trigger will complete successfully regardless of the
40
+ command exit code. (default: True)
38
41
  :param aws_conn_id: The Airflow connection used for AWS credentials.
39
42
  :param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
40
43
  :param verify: Whether or not to verify SSL certificates. See:
@@ -49,18 +52,19 @@ class SsmRunCommandTrigger(AwsBaseWaiterTrigger):
49
52
  command_id: str,
50
53
  waiter_delay: int = 120,
51
54
  waiter_max_attempts: int = 75,
55
+ fail_on_nonzero_exit: bool = True,
52
56
  aws_conn_id: str | None = None,
53
57
  region_name: str | None = None,
54
58
  verify: bool | str | None = None,
55
59
  botocore_config: dict | None = None,
56
60
  ) -> None:
57
61
  super().__init__(
58
- serialized_fields={"command_id": command_id},
62
+ serialized_fields={"command_id": command_id, "fail_on_nonzero_exit": fail_on_nonzero_exit},
59
63
  waiter_name="command_executed",
60
64
  waiter_args={"CommandId": command_id},
61
65
  failure_message="SSM run command failed.",
62
66
  status_message="Status of SSM run command is",
63
- status_queries=["status"],
67
+ status_queries=["Status"],
64
68
  return_key="command_id",
65
69
  return_value=command_id,
66
70
  waiter_delay=waiter_delay,
@@ -71,6 +75,7 @@ class SsmRunCommandTrigger(AwsBaseWaiterTrigger):
71
75
  botocore_config=botocore_config,
72
76
  )
73
77
  self.command_id = command_id
78
+ self.fail_on_nonzero_exit = fail_on_nonzero_exit
74
79
 
75
80
  def hook(self) -> AwsGenericHook:
76
81
  return SsmHook(
@@ -89,14 +94,66 @@ class SsmRunCommandTrigger(AwsBaseWaiterTrigger):
89
94
 
90
95
  for instance_id in instance_ids:
91
96
  self.waiter_args["InstanceId"] = instance_id
92
- await async_wait(
93
- waiter,
94
- self.waiter_delay,
95
- self.attempts,
96
- self.waiter_args,
97
- self.failure_message,
98
- self.status_message,
99
- self.status_queries,
100
- )
97
+ try:
98
+ await async_wait(
99
+ waiter,
100
+ self.waiter_delay,
101
+ self.attempts,
102
+ self.waiter_args,
103
+ self.failure_message,
104
+ self.status_message,
105
+ self.status_queries,
106
+ )
107
+ except Exception:
108
+ # Get detailed invocation information to determine failure type
109
+ invocation = await client.get_command_invocation(
110
+ CommandId=self.command_id, InstanceId=instance_id
111
+ )
112
+ status = invocation.get("Status", "")
113
+ response_code = invocation.get("ResponseCode", -1)
114
+
115
+ # AWS-level failures should always raise
116
+ if SsmHook.is_aws_level_failure(status):
117
+ self.log.error(
118
+ "AWS-level failure for command %s on instance %s: status=%s",
119
+ self.command_id,
120
+ instance_id,
121
+ status,
122
+ )
123
+ raise
124
+
125
+ # Command-level failure (non-zero exit code)
126
+ if not self.fail_on_nonzero_exit:
127
+ # Enhanced mode: tolerate command-level failures
128
+ self.log.info(
129
+ "Command %s completed with status %s (exit code: %s) for instance %s. "
130
+ "Continuing due to fail_on_nonzero_exit=False",
131
+ self.command_id,
132
+ status,
133
+ response_code,
134
+ instance_id,
135
+ )
136
+ continue
137
+ else:
138
+ # Traditional mode: yield failure event instead of raising
139
+ # This allows the operator to handle the failure gracefully
140
+ self.log.warning(
141
+ "Command %s failed with status %s (exit code: %s) for instance %s",
142
+ self.command_id,
143
+ status,
144
+ response_code,
145
+ instance_id,
146
+ )
147
+ yield TriggerEvent(
148
+ {
149
+ "status": "failed",
150
+ "message": f"Command failed with status {status} (exit code: {response_code})",
151
+ "command_status": status,
152
+ "exit_code": response_code,
153
+ "instance_id": instance_id,
154
+ self.return_key: self.return_value,
155
+ }
156
+ )
157
+ return
101
158
 
102
159
  yield TriggerEvent({"status": "success", self.return_key: self.return_value})
@@ -1207,7 +1207,7 @@ def get_provider_info():
1207
1207
  "example": "True",
1208
1208
  "default": "True",
1209
1209
  },
1210
- "max_run_task_attempts": {
1210
+ "max_invoke_attempts": {
1211
1211
  "description": "The maximum number of times the Lambda Executor should attempt to start an Airflow task.\n",
1212
1212
  "version_added": "9.9.0",
1213
1213
  "type": "integer",
@@ -1390,5 +1390,6 @@ def get_provider_info():
1390
1390
  },
1391
1391
  "executors": ["airflow.providers.amazon.aws.executors.ecs.ecs_executor.AwsEcsExecutor"],
1392
1392
  "auth-managers": ["airflow.providers.amazon.aws.auth_manager.aws_auth_manager.AwsAuthManager"],
1393
+ "cli": ["airflow.providers.amazon.aws.cli.definition.get_aws_cli_commands"],
1393
1394
  "queues": ["airflow.providers.amazon.aws.queues.sqs.SqsMessageQueueProvider"],
1394
1395
  }