apache-airflow-providers-amazon 9.15.0__py3-none-any.whl → 9.18.1rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (138) hide show
  1. airflow/providers/amazon/__init__.py +3 -3
  2. airflow/providers/amazon/aws/auth_manager/avp/facade.py +1 -1
  3. airflow/providers/amazon/aws/auth_manager/routes/login.py +7 -1
  4. airflow/providers/amazon/aws/bundles/s3.py +1 -1
  5. airflow/providers/amazon/aws/exceptions.py +1 -1
  6. airflow/providers/amazon/aws/executors/aws_lambda/docker/app.py +5 -1
  7. airflow/providers/amazon/aws/executors/aws_lambda/lambda_executor.py +1 -2
  8. airflow/providers/amazon/aws/executors/batch/batch_executor.py +1 -2
  9. airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +1 -2
  10. airflow/providers/amazon/aws/hooks/athena.py +7 -3
  11. airflow/providers/amazon/aws/hooks/athena_sql.py +3 -3
  12. airflow/providers/amazon/aws/hooks/base_aws.py +3 -7
  13. airflow/providers/amazon/aws/hooks/batch_client.py +5 -7
  14. airflow/providers/amazon/aws/hooks/batch_waiters.py +1 -2
  15. airflow/providers/amazon/aws/hooks/chime.py +2 -2
  16. airflow/providers/amazon/aws/hooks/comprehend.py +1 -1
  17. airflow/providers/amazon/aws/hooks/datasync.py +3 -3
  18. airflow/providers/amazon/aws/hooks/dynamodb.py +1 -1
  19. airflow/providers/amazon/aws/hooks/ec2.py +1 -1
  20. airflow/providers/amazon/aws/hooks/elasticache_replication_group.py +1 -1
  21. airflow/providers/amazon/aws/hooks/emr.py +1 -1
  22. airflow/providers/amazon/aws/hooks/firehose.py +56 -0
  23. airflow/providers/amazon/aws/hooks/glue.py +9 -2
  24. airflow/providers/amazon/aws/hooks/glue_catalog.py +1 -1
  25. airflow/providers/amazon/aws/hooks/kinesis.py +31 -13
  26. airflow/providers/amazon/aws/hooks/logs.py +10 -2
  27. airflow/providers/amazon/aws/hooks/mwaa.py +38 -7
  28. airflow/providers/amazon/aws/hooks/quicksight.py +1 -1
  29. airflow/providers/amazon/aws/hooks/rds.py +1 -1
  30. airflow/providers/amazon/aws/hooks/redshift_sql.py +31 -8
  31. airflow/providers/amazon/aws/hooks/s3.py +14 -6
  32. airflow/providers/amazon/aws/hooks/sagemaker.py +1 -1
  33. airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py +1 -2
  34. airflow/providers/amazon/aws/hooks/ssm.py +34 -6
  35. airflow/providers/amazon/aws/hooks/step_function.py +1 -1
  36. airflow/providers/amazon/aws/links/base_aws.py +1 -1
  37. airflow/providers/amazon/aws/links/emr.py +1 -1
  38. airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +50 -20
  39. airflow/providers/amazon/aws/operators/appflow.py +1 -1
  40. airflow/providers/amazon/aws/operators/athena.py +1 -1
  41. airflow/providers/amazon/aws/operators/base_aws.py +2 -2
  42. airflow/providers/amazon/aws/operators/batch.py +1 -1
  43. airflow/providers/amazon/aws/operators/bedrock.py +3 -1
  44. airflow/providers/amazon/aws/operators/cloud_formation.py +2 -2
  45. airflow/providers/amazon/aws/operators/comprehend.py +1 -1
  46. airflow/providers/amazon/aws/operators/datasync.py +1 -1
  47. airflow/providers/amazon/aws/operators/dms.py +1 -1
  48. airflow/providers/amazon/aws/operators/ec2.py +1 -1
  49. airflow/providers/amazon/aws/operators/ecs.py +1 -1
  50. airflow/providers/amazon/aws/operators/eks.py +2 -1
  51. airflow/providers/amazon/aws/operators/emr.py +22 -12
  52. airflow/providers/amazon/aws/operators/eventbridge.py +1 -1
  53. airflow/providers/amazon/aws/operators/glue.py +1 -1
  54. airflow/providers/amazon/aws/operators/glue_crawler.py +1 -1
  55. airflow/providers/amazon/aws/operators/glue_databrew.py +1 -1
  56. airflow/providers/amazon/aws/operators/kinesis_analytics.py +1 -1
  57. airflow/providers/amazon/aws/operators/lambda_function.py +1 -1
  58. airflow/providers/amazon/aws/operators/mwaa.py +13 -4
  59. airflow/providers/amazon/aws/operators/neptune.py +1 -1
  60. airflow/providers/amazon/aws/operators/rds.py +1 -1
  61. airflow/providers/amazon/aws/operators/redshift_cluster.py +1 -1
  62. airflow/providers/amazon/aws/operators/redshift_data.py +1 -1
  63. airflow/providers/amazon/aws/operators/s3.py +1 -1
  64. airflow/providers/amazon/aws/operators/sagemaker.py +1 -1
  65. airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py +1 -2
  66. airflow/providers/amazon/aws/operators/ssm.py +122 -17
  67. airflow/providers/amazon/aws/operators/step_function.py +1 -1
  68. airflow/providers/amazon/aws/secrets/secrets_manager.py +3 -4
  69. airflow/providers/amazon/aws/sensors/athena.py +1 -1
  70. airflow/providers/amazon/aws/sensors/base_aws.py +2 -2
  71. airflow/providers/amazon/aws/sensors/batch.py +1 -1
  72. airflow/providers/amazon/aws/sensors/bedrock.py +1 -1
  73. airflow/providers/amazon/aws/sensors/comprehend.py +1 -1
  74. airflow/providers/amazon/aws/sensors/dms.py +1 -1
  75. airflow/providers/amazon/aws/sensors/ec2.py +1 -1
  76. airflow/providers/amazon/aws/sensors/ecs.py +1 -1
  77. airflow/providers/amazon/aws/sensors/eks.py +2 -1
  78. airflow/providers/amazon/aws/sensors/emr.py +1 -3
  79. airflow/providers/amazon/aws/sensors/glacier.py +1 -1
  80. airflow/providers/amazon/aws/sensors/glue.py +1 -1
  81. airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +1 -1
  82. airflow/providers/amazon/aws/sensors/glue_crawler.py +1 -1
  83. airflow/providers/amazon/aws/sensors/kinesis_analytics.py +1 -1
  84. airflow/providers/amazon/aws/sensors/lambda_function.py +1 -1
  85. airflow/providers/amazon/aws/sensors/mwaa.py +15 -2
  86. airflow/providers/amazon/aws/sensors/opensearch_serverless.py +1 -1
  87. airflow/providers/amazon/aws/sensors/quicksight.py +1 -1
  88. airflow/providers/amazon/aws/sensors/rds.py +1 -1
  89. airflow/providers/amazon/aws/sensors/redshift_cluster.py +1 -1
  90. airflow/providers/amazon/aws/sensors/s3.py +3 -3
  91. airflow/providers/amazon/aws/sensors/sagemaker.py +1 -1
  92. airflow/providers/amazon/aws/sensors/sagemaker_unified_studio.py +1 -2
  93. airflow/providers/amazon/aws/sensors/sqs.py +1 -1
  94. airflow/providers/amazon/aws/sensors/ssm.py +33 -17
  95. airflow/providers/amazon/aws/sensors/step_function.py +1 -1
  96. airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py +3 -3
  97. airflow/providers/amazon/aws/transfers/base.py +5 -5
  98. airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +4 -4
  99. airflow/providers/amazon/aws/transfers/exasol_to_s3.py +1 -1
  100. airflow/providers/amazon/aws/transfers/ftp_to_s3.py +1 -1
  101. airflow/providers/amazon/aws/transfers/gcs_to_s3.py +48 -6
  102. airflow/providers/amazon/aws/transfers/glacier_to_gcs.py +1 -1
  103. airflow/providers/amazon/aws/transfers/google_api_to_s3.py +2 -5
  104. airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py +1 -1
  105. airflow/providers/amazon/aws/transfers/http_to_s3.py +1 -1
  106. airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py +1 -1
  107. airflow/providers/amazon/aws/transfers/local_to_s3.py +1 -1
  108. airflow/providers/amazon/aws/transfers/mongo_to_s3.py +1 -1
  109. airflow/providers/amazon/aws/transfers/redshift_to_s3.py +6 -7
  110. airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py +1 -2
  111. airflow/providers/amazon/aws/transfers/s3_to_ftp.py +1 -1
  112. airflow/providers/amazon/aws/transfers/s3_to_redshift.py +6 -7
  113. airflow/providers/amazon/aws/transfers/s3_to_sftp.py +1 -1
  114. airflow/providers/amazon/aws/transfers/s3_to_sql.py +1 -2
  115. airflow/providers/amazon/aws/transfers/salesforce_to_s3.py +1 -1
  116. airflow/providers/amazon/aws/transfers/sftp_to_s3.py +1 -1
  117. airflow/providers/amazon/aws/transfers/sql_to_s3.py +8 -9
  118. airflow/providers/amazon/aws/triggers/bedrock.py +1 -1
  119. airflow/providers/amazon/aws/triggers/ecs.py +1 -1
  120. airflow/providers/amazon/aws/triggers/eks.py +1 -1
  121. airflow/providers/amazon/aws/triggers/s3.py +29 -2
  122. airflow/providers/amazon/aws/triggers/sagemaker.py +1 -1
  123. airflow/providers/amazon/aws/triggers/sqs.py +1 -1
  124. airflow/providers/amazon/aws/triggers/ssm.py +17 -1
  125. airflow/providers/amazon/aws/utils/__init__.py +1 -1
  126. airflow/providers/amazon/aws/utils/connection_wrapper.py +3 -6
  127. airflow/providers/amazon/aws/utils/mixins.py +1 -1
  128. airflow/providers/amazon/aws/utils/waiter.py +3 -3
  129. airflow/providers/amazon/aws/utils/waiter_with_logging.py +1 -1
  130. airflow/providers/amazon/aws/waiters/emr.json +6 -6
  131. airflow/providers/amazon/get_provider_info.py +19 -1
  132. airflow/providers/amazon/version_compat.py +19 -16
  133. {apache_airflow_providers_amazon-9.15.0.dist-info → apache_airflow_providers_amazon-9.18.1rc1.dist-info}/METADATA +29 -19
  134. {apache_airflow_providers_amazon-9.15.0.dist-info → apache_airflow_providers_amazon-9.18.1rc1.dist-info}/RECORD +138 -136
  135. apache_airflow_providers_amazon-9.18.1rc1.dist-info/licenses/NOTICE +5 -0
  136. {apache_airflow_providers_amazon-9.15.0.dist-info → apache_airflow_providers_amazon-9.18.1rc1.dist-info}/WHEEL +0 -0
  137. {apache_airflow_providers_amazon-9.15.0.dist-info → apache_airflow_providers_amazon-9.18.1rc1.dist-info}/entry_points.txt +0 -0
  138. {airflow/providers/amazon → apache_airflow_providers_amazon-9.18.1rc1.dist-info/licenses}/LICENSE +0 -0
@@ -21,11 +21,11 @@ from datetime import timedelta
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
23
  from airflow.configuration import conf
24
- from airflow.exceptions import AirflowException
25
24
  from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook
26
25
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
26
  from airflow.providers.amazon.aws.triggers.batch import BatchJobTrigger
28
27
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
28
+ from airflow.providers.common.compat.sdk import AirflowException
29
29
 
30
30
  if TYPE_CHECKING:
31
31
  from airflow.utils.context import Context
@@ -22,7 +22,6 @@ from collections.abc import Sequence
22
22
  from typing import TYPE_CHECKING, Any, TypeVar
23
23
 
24
24
  from airflow.configuration import conf
25
- from airflow.exceptions import AirflowException
26
25
  from airflow.providers.amazon.aws.hooks.bedrock import BedrockAgentHook, BedrockHook
27
26
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
28
27
  from airflow.providers.amazon.aws.triggers.bedrock import (
@@ -34,6 +33,7 @@ from airflow.providers.amazon.aws.triggers.bedrock import (
34
33
  BedrockProvisionModelThroughputCompletedTrigger,
35
34
  )
36
35
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
36
+ from airflow.providers.common.compat.sdk import AirflowException
37
37
 
38
38
  if TYPE_CHECKING:
39
39
  from airflow.providers.amazon.aws.triggers.bedrock import BedrockBaseBatchInferenceTrigger
@@ -21,7 +21,6 @@ from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
23
  from airflow.configuration import conf
24
- from airflow.exceptions import AirflowException
25
24
  from airflow.providers.amazon.aws.hooks.comprehend import ComprehendHook
26
25
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
26
  from airflow.providers.amazon.aws.triggers.comprehend import (
@@ -29,6 +28,7 @@ from airflow.providers.amazon.aws.triggers.comprehend import (
29
28
  ComprehendPiiEntitiesDetectionJobCompletedTrigger,
30
29
  )
31
30
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
31
+ from airflow.providers.common.compat.sdk import AirflowException
32
32
 
33
33
  if TYPE_CHECKING:
34
34
  from airflow.utils.context import Context
@@ -20,10 +20,10 @@ from __future__ import annotations
20
20
  from collections.abc import Iterable, Sequence
21
21
  from typing import TYPE_CHECKING
22
22
 
23
- from airflow.exceptions import AirflowException
24
23
  from airflow.providers.amazon.aws.hooks.dms import DmsHook
25
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
26
25
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
26
+ from airflow.providers.common.compat.sdk import AirflowException
27
27
 
28
28
  if TYPE_CHECKING:
29
29
  from airflow.utils.context import Context
@@ -21,12 +21,12 @@ from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
23
  from airflow.configuration import conf
24
- from airflow.exceptions import AirflowException
25
24
  from airflow.providers.amazon.aws.hooks.ec2 import EC2Hook
26
25
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
26
  from airflow.providers.amazon.aws.triggers.ec2 import EC2StateSensorTrigger
28
27
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
29
28
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
29
+ from airflow.providers.common.compat.sdk import AirflowException
30
30
 
31
31
  if TYPE_CHECKING:
32
32
  from airflow.utils.context import Context
@@ -20,7 +20,6 @@ from collections.abc import Sequence
20
20
  from functools import cached_property
21
21
  from typing import TYPE_CHECKING
22
22
 
23
- from airflow.exceptions import AirflowException
24
23
  from airflow.providers.amazon.aws.hooks.ecs import (
25
24
  EcsClusterStates,
26
25
  EcsHook,
@@ -29,6 +28,7 @@ from airflow.providers.amazon.aws.hooks.ecs import (
29
28
  )
30
29
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
31
30
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
31
+ from airflow.providers.common.compat.sdk import AirflowException
32
32
 
33
33
  if TYPE_CHECKING:
34
34
  import boto3
@@ -23,7 +23,7 @@ from abc import abstractmethod
23
23
  from collections.abc import Sequence
24
24
  from typing import TYPE_CHECKING
25
25
 
26
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
26
+ from airflow.exceptions import AirflowProviderDeprecationWarning
27
27
  from airflow.providers.amazon.aws.hooks.eks import (
28
28
  ClusterStates,
29
29
  EksHook,
@@ -32,6 +32,7 @@ from airflow.providers.amazon.aws.hooks.eks import (
32
32
  )
33
33
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
34
34
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
35
+ from airflow.providers.common.compat.sdk import AirflowException
35
36
 
36
37
  if TYPE_CHECKING:
37
38
  from airflow.utils.context import Context
@@ -22,9 +22,6 @@ from datetime import timedelta
22
22
  from typing import TYPE_CHECKING, Any
23
23
 
24
24
  from airflow.configuration import conf
25
- from airflow.exceptions import (
26
- AirflowException,
27
- )
28
25
  from airflow.providers.amazon.aws.hooks.emr import EmrContainerHook, EmrHook, EmrServerlessHook
29
26
  from airflow.providers.amazon.aws.links.emr import EmrClusterLink, EmrLogsLink, get_log_uri
30
27
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
@@ -35,6 +32,7 @@ from airflow.providers.amazon.aws.triggers.emr import (
35
32
  )
36
33
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
37
34
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
35
+ from airflow.providers.common.compat.sdk import AirflowException
38
36
 
39
37
  if TYPE_CHECKING:
40
38
  from airflow.utils.context import Context
@@ -21,10 +21,10 @@ from collections.abc import Sequence
21
21
  from enum import Enum
22
22
  from typing import TYPE_CHECKING, Any
23
23
 
24
- from airflow.exceptions import AirflowException
25
24
  from airflow.providers.amazon.aws.hooks.glacier import GlacierHook
26
25
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
26
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
27
+ from airflow.providers.common.compat.sdk import AirflowException
28
28
 
29
29
  if TYPE_CHECKING:
30
30
  from airflow.utils.context import Context
@@ -21,7 +21,6 @@ from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
23
  from airflow.configuration import conf
24
- from airflow.exceptions import AirflowException
25
24
  from airflow.providers.amazon.aws.hooks.glue import GlueDataQualityHook, GlueJobHook
26
25
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
26
  from airflow.providers.amazon.aws.triggers.glue import (
@@ -31,6 +30,7 @@ from airflow.providers.amazon.aws.triggers.glue import (
31
30
  )
32
31
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
33
32
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
33
+ from airflow.providers.common.compat.sdk import AirflowException
34
34
 
35
35
  if TYPE_CHECKING:
36
36
  from airflow.utils.context import Context
@@ -22,12 +22,12 @@ from datetime import timedelta
22
22
  from typing import TYPE_CHECKING, Any
23
23
 
24
24
  from airflow.configuration import conf
25
- from airflow.exceptions import AirflowException
26
25
  from airflow.providers.amazon.aws.hooks.glue_catalog import GlueCatalogHook
27
26
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
28
27
  from airflow.providers.amazon.aws.triggers.glue import GlueCatalogPartitionTrigger
29
28
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
30
29
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
30
+ from airflow.providers.common.compat.sdk import AirflowException
31
31
 
32
32
  if TYPE_CHECKING:
33
33
  from airflow.utils.context import Context
@@ -20,10 +20,10 @@ from __future__ import annotations
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING
22
22
 
23
- from airflow.exceptions import AirflowException
24
23
  from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook
25
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
26
25
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
26
+ from airflow.providers.common.compat.sdk import AirflowException
27
27
 
28
28
  if TYPE_CHECKING:
29
29
  from airflow.utils.context import Context
@@ -20,13 +20,13 @@ from collections.abc import Sequence
20
20
  from typing import TYPE_CHECKING, Any
21
21
 
22
22
  from airflow.configuration import conf
23
- from airflow.exceptions import AirflowException
24
23
  from airflow.providers.amazon.aws.hooks.kinesis_analytics import KinesisAnalyticsV2Hook
25
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
26
25
  from airflow.providers.amazon.aws.triggers.kinesis_analytics import (
27
26
  KinesisAnalyticsV2ApplicationOperationCompleteTrigger,
28
27
  )
29
28
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
29
+ from airflow.providers.common.compat.sdk import AirflowException
30
30
 
31
31
  if TYPE_CHECKING:
32
32
  from airflow.utils.context import Context
@@ -20,11 +20,11 @@ from __future__ import annotations
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
- from airflow.exceptions import AirflowException
24
23
  from airflow.providers.amazon.aws.hooks.lambda_function import LambdaHook
25
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
26
25
  from airflow.providers.amazon.aws.utils import trim_none_values
27
26
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
27
+ from airflow.providers.common.compat.sdk import AirflowException
28
28
 
29
29
  if TYPE_CHECKING:
30
30
  from airflow.utils.context import Context
@@ -18,14 +18,14 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  from collections.abc import Collection, Sequence
21
- from typing import TYPE_CHECKING, Any
21
+ from typing import TYPE_CHECKING, Any, Literal
22
22
 
23
23
  from airflow.configuration import conf
24
- from airflow.exceptions import AirflowException
25
24
  from airflow.providers.amazon.aws.hooks.mwaa import MwaaHook
26
25
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
26
  from airflow.providers.amazon.aws.triggers.mwaa import MwaaDagRunCompletedTrigger, MwaaTaskCompletedTrigger
28
27
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
28
+ from airflow.providers.common.compat.sdk import AirflowException
29
29
  from airflow.utils.state import DagRunState, TaskInstanceState
30
30
 
31
31
  if TYPE_CHECKING:
@@ -51,6 +51,8 @@ class MwaaDagRunSensor(AwsBaseSensor[MwaaHook]):
51
51
  ``{airflow.utils.state.DagRunState.SUCCESS}`` (templated)
52
52
  :param failure_states: Collection of DAG Run states that would make this task marked as failed and raise an
53
53
  AirflowException, default is ``{airflow.utils.state.DagRunState.FAILED}`` (templated)
54
+ :param airflow_version: The Airflow major version the MWAA environment runs.
55
+ This parameter is only used if the local web token method is used to call Airflow API. (templated)
54
56
  :param deferrable: If True, the sensor will operate in deferrable mode. This mode requires aiobotocore
55
57
  module to be installed.
56
58
  (default: False, but can be overridden in config file by setting default_deferrable to True)
@@ -75,6 +77,7 @@ class MwaaDagRunSensor(AwsBaseSensor[MwaaHook]):
75
77
  "external_dag_run_id",
76
78
  "success_states",
77
79
  "failure_states",
80
+ "airflow_version",
78
81
  "deferrable",
79
82
  "max_retries",
80
83
  "poke_interval",
@@ -88,6 +91,7 @@ class MwaaDagRunSensor(AwsBaseSensor[MwaaHook]):
88
91
  external_dag_run_id: str,
89
92
  success_states: Collection[str] | None = None,
90
93
  failure_states: Collection[str] | None = None,
94
+ airflow_version: Literal[2, 3] | None = None,
91
95
  deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
92
96
  poke_interval: int = 60,
93
97
  max_retries: int = 720,
@@ -104,6 +108,7 @@ class MwaaDagRunSensor(AwsBaseSensor[MwaaHook]):
104
108
  self.external_env_name = external_env_name
105
109
  self.external_dag_id = external_dag_id
106
110
  self.external_dag_run_id = external_dag_run_id
111
+ self.airflow_version = airflow_version
107
112
  self.deferrable = deferrable
108
113
  self.poke_interval = poke_interval
109
114
  self.max_retries = max_retries
@@ -119,6 +124,7 @@ class MwaaDagRunSensor(AwsBaseSensor[MwaaHook]):
119
124
  env_name=self.external_env_name,
120
125
  path=f"/dags/{self.external_dag_id}/dagRuns/{self.external_dag_run_id}",
121
126
  method="GET",
127
+ airflow_version=self.airflow_version,
122
128
  )
123
129
 
124
130
  # If RestApiStatusCode == 200, the RestApiResponse must have the "state" key, otherwise something terrible has
@@ -179,6 +185,8 @@ class MwaaTaskSensor(AwsBaseSensor[MwaaHook]):
179
185
  ``{airflow.utils.state.TaskInstanceState.SUCCESS}`` (templated)
180
186
  :param failure_states: Collection of task instance states that would make this task marked as failed and raise an
181
187
  AirflowException, default is ``{airflow.utils.state.TaskInstanceState.FAILED}`` (templated)
188
+ :param airflow_version: The Airflow major version the MWAA environment runs.
189
+ This parameter is only used if the local web token method is used to call Airflow API. (templated)
182
190
  :param deferrable: If True, the sensor will operate in deferrable mode. This mode requires aiobotocore
183
191
  module to be installed.
184
192
  (default: False, but can be overridden in config file by setting default_deferrable to True)
@@ -204,6 +212,7 @@ class MwaaTaskSensor(AwsBaseSensor[MwaaHook]):
204
212
  "external_task_id",
205
213
  "success_states",
206
214
  "failure_states",
215
+ "airflow_version",
207
216
  "deferrable",
208
217
  "max_retries",
209
218
  "poke_interval",
@@ -218,6 +227,7 @@ class MwaaTaskSensor(AwsBaseSensor[MwaaHook]):
218
227
  external_task_id: str,
219
228
  success_states: Collection[str] | None = None,
220
229
  failure_states: Collection[str] | None = None,
230
+ airflow_version: Literal[2, 3] | None = None,
221
231
  deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
222
232
  poke_interval: int = 60,
223
233
  max_retries: int = 720,
@@ -235,6 +245,7 @@ class MwaaTaskSensor(AwsBaseSensor[MwaaHook]):
235
245
  self.external_dag_id = external_dag_id
236
246
  self.external_dag_run_id = external_dag_run_id
237
247
  self.external_task_id = external_task_id
248
+ self.airflow_version = airflow_version
238
249
  self.deferrable = deferrable
239
250
  self.poke_interval = poke_interval
240
251
  self.max_retries = max_retries
@@ -252,6 +263,7 @@ class MwaaTaskSensor(AwsBaseSensor[MwaaHook]):
252
263
  env_name=self.external_env_name,
253
264
  path=f"/dags/{self.external_dag_id}/dagRuns/{self.external_dag_run_id}/taskInstances/{self.external_task_id}",
254
265
  method="GET",
266
+ airflow_version=self.airflow_version,
255
267
  )
256
268
  # If RestApiStatusCode == 200, the RestApiResponse must have the "state" key, otherwise something terrible has
257
269
  # happened in the API and KeyError would be raised
@@ -278,6 +290,7 @@ class MwaaTaskSensor(AwsBaseSensor[MwaaHook]):
278
290
  env_name=self.external_env_name,
279
291
  path=f"/dags/{self.external_dag_id}/dagRuns",
280
292
  method="GET",
293
+ airflow_version=self.airflow_version,
281
294
  )
282
295
  self.external_dag_run_id = response["RestApiResponse"]["dag_runs"][-1]["dag_run_id"]
283
296
 
@@ -20,13 +20,13 @@ from collections.abc import Sequence
20
20
  from typing import TYPE_CHECKING, Any
21
21
 
22
22
  from airflow.configuration import conf
23
- from airflow.exceptions import AirflowException
24
23
  from airflow.providers.amazon.aws.hooks.opensearch_serverless import OpenSearchServerlessHook
25
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
26
25
  from airflow.providers.amazon.aws.triggers.opensearch_serverless import (
27
26
  OpenSearchServerlessCollectionActiveTrigger,
28
27
  )
29
28
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
29
+ from airflow.providers.common.compat.sdk import AirflowException
30
30
  from airflow.utils.helpers import exactly_one
31
31
 
32
32
  if TYPE_CHECKING:
@@ -20,9 +20,9 @@ from __future__ import annotations
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING
22
22
 
23
- from airflow.exceptions import AirflowException
24
23
  from airflow.providers.amazon.aws.hooks.quicksight import QuickSightHook
25
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
25
+ from airflow.providers.common.compat.sdk import AirflowException
26
26
 
27
27
  if TYPE_CHECKING:
28
28
  from airflow.utils.context import Context
@@ -19,11 +19,11 @@ from __future__ import annotations
19
19
  from collections.abc import Sequence
20
20
  from typing import TYPE_CHECKING
21
21
 
22
- from airflow.exceptions import AirflowException, AirflowNotFoundException
23
22
  from airflow.providers.amazon.aws.hooks.rds import RdsHook
24
23
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
25
24
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
26
25
  from airflow.providers.amazon.aws.utils.rds import RdsDbType
26
+ from airflow.providers.common.compat.sdk import AirflowException, AirflowNotFoundException
27
27
 
28
28
  if TYPE_CHECKING:
29
29
  from airflow.utils.context import Context
@@ -21,12 +21,12 @@ from datetime import timedelta
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
23
  from airflow.configuration import conf
24
- from airflow.exceptions import AirflowException
25
24
  from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftHook
26
25
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
26
  from airflow.providers.amazon.aws.triggers.redshift_cluster import RedshiftClusterTrigger
28
27
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
29
28
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
29
+ from airflow.providers.common.compat.sdk import AirflowException
30
30
 
31
31
  if TYPE_CHECKING:
32
32
  from airflow.utils.context import Context
@@ -31,12 +31,11 @@ from airflow.providers.amazon.aws.utils import validate_execute_complete_event
31
31
  if TYPE_CHECKING:
32
32
  from airflow.utils.context import Context
33
33
 
34
- from airflow.exceptions import AirflowException
35
34
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
36
35
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
37
36
  from airflow.providers.amazon.aws.triggers.s3 import S3KeysUnchangedTrigger, S3KeyTrigger
38
37
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
39
- from airflow.sensors.base import poke_mode_only
38
+ from airflow.providers.common.compat.sdk import AirflowException, poke_mode_only
40
39
 
41
40
 
42
41
  class S3KeySensor(AwsBaseSensor[S3Hook]):
@@ -215,6 +214,7 @@ class S3KeySensor(AwsBaseSensor[S3Hook]):
215
214
  poke_interval=self.poke_interval,
216
215
  should_check_fn=bool(self.check_fn),
217
216
  use_regex=self.use_regex,
217
+ metadata_keys=self.metadata_keys,
218
218
  ),
219
219
  method_name="execute_complete",
220
220
  )
@@ -226,7 +226,7 @@ class S3KeySensor(AwsBaseSensor[S3Hook]):
226
226
  Relies on trigger to throw an exception, otherwise it assumes execution was successful.
227
227
  """
228
228
  if event["status"] == "running":
229
- found_keys = self.check_fn(event["files"]) # type: ignore[misc]
229
+ found_keys = self.check_fn(event["files"], **context) # type: ignore[misc]
230
230
  if not found_keys:
231
231
  self._defer()
232
232
  elif event["status"] == "error":
@@ -20,10 +20,10 @@ import time
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING
22
22
 
23
- from airflow.exceptions import AirflowException
24
23
  from airflow.providers.amazon.aws.hooks.sagemaker import LogState, SageMakerHook
25
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
26
25
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
26
+ from airflow.providers.common.compat.sdk import AirflowException
27
27
 
28
28
  if TYPE_CHECKING:
29
29
  from airflow.utils.context import Context
@@ -21,11 +21,10 @@ from __future__ import annotations
21
21
 
22
22
  from typing import TYPE_CHECKING
23
23
 
24
- from airflow.exceptions import AirflowException
25
24
  from airflow.providers.amazon.aws.hooks.sagemaker_unified_studio import (
26
25
  SageMakerNotebookHook,
27
26
  )
28
- from airflow.providers.amazon.version_compat import BaseSensorOperator
27
+ from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator
29
28
 
30
29
  if TYPE_CHECKING:
31
30
  from airflow.utils.context import Context
@@ -24,13 +24,13 @@ from datetime import timedelta
24
24
  from typing import TYPE_CHECKING, Any
25
25
 
26
26
  from airflow.configuration import conf
27
- from airflow.exceptions import AirflowException
28
27
  from airflow.providers.amazon.aws.hooks.sqs import SqsHook
29
28
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
30
29
  from airflow.providers.amazon.aws.triggers.sqs import SqsSensorTrigger
31
30
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
32
31
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
33
32
  from airflow.providers.amazon.aws.utils.sqs import process_response
33
+ from airflow.providers.common.compat.sdk import AirflowException
34
34
 
35
35
  if TYPE_CHECKING:
36
36
  from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection
@@ -21,7 +21,6 @@ from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
23
  from airflow.configuration import conf
24
- from airflow.exceptions import AirflowException
25
24
  from airflow.providers.amazon.aws.hooks.ssm import SsmHook
26
25
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
26
  from airflow.providers.amazon.aws.triggers.ssm import SsmRunCommandTrigger
@@ -34,32 +33,45 @@ if TYPE_CHECKING:
34
33
 
35
34
  class SsmRunCommandCompletedSensor(AwsBaseSensor[SsmHook]):
36
35
  """
37
- Poll the state of an AWS SSM Run Command until all instance jobs reach a terminal state. Fails if any instance job ends in a failed state.
36
+ Poll the state of an AWS SSM Run Command until completion.
37
+
38
+ Waits until all instance jobs reach a terminal state. Fails if any
39
+ instance job ends in a failed state.
38
40
 
39
41
  .. seealso::
40
- For more information on how to use this sensor, take a look at the guide:
42
+ For more information on how to use this sensor, take a look at the
43
+ guide:
41
44
  :ref:`howto/sensor:SsmRunCommandCompletedSensor`
42
45
 
43
46
  :param command_id: The ID of the AWS SSM Run Command.
44
-
45
- :param deferrable: If True, the sensor will operate in deferrable mode. This mode requires aiobotocore
46
- module to be installed.
47
- (default: False, but can be overridden in config file by setting default_deferrable to True)
48
- :param poke_interval: Polling period in seconds to check for the status of the job. (default: 120)
49
- :param max_retries: Number of times before returning the current state. (default: 75)
47
+ :param deferrable: If True, the sensor will operate in deferrable mode.
48
+ This mode requires aiobotocore module to be installed.
49
+ (default: False, but can be overridden in config file by setting
50
+ default_deferrable to True)
51
+ :param poke_interval: Polling period in seconds to check for the status
52
+ of the job. (default: 120)
53
+ :param max_retries: Number of times before returning the current state.
54
+ (default: 75)
50
55
  :param aws_conn_id: The Airflow connection used for AWS credentials.
51
- If this is ``None`` or empty then the default boto3 behaviour is used. If
52
- running Airflow in a distributed manner and aws_conn_id is None or
56
+ If this is ``None`` or empty then the default boto3 behaviour is used.
57
+ If running Airflow in a distributed manner and aws_conn_id is None or
53
58
  empty, then default boto3 configuration would be used (and must be
54
59
  maintained on each worker node).
55
- :param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
60
+ :param region_name: AWS region_name. If not specified then the default
61
+ boto3 behaviour is used.
56
62
  :param verify: Whether or not to verify SSL certificates. See:
57
63
  https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
58
- :param botocore_config: Configuration dictionary (key-values) for botocore client. See:
64
+ :param botocore_config: Configuration dictionary (key-values) for botocore
65
+ client. See:
59
66
  https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
60
67
  """
61
68
 
62
- INTERMEDIATE_STATES: tuple[str, ...] = ("Pending", "Delayed", "InProgress", "Cancelling")
69
+ INTERMEDIATE_STATES: tuple[str, ...] = (
70
+ "Pending",
71
+ "Delayed",
72
+ "InProgress",
73
+ "Cancelling",
74
+ )
63
75
  FAILURE_STATES: tuple[str, ...] = ("Cancelled", "TimedOut", "Failed")
64
76
  SUCCESS_STATES: tuple[str, ...] = ("Success",)
65
77
  FAILURE_MESSAGE = "SSM run command sensor failed."
@@ -89,14 +101,18 @@ class SsmRunCommandCompletedSensor(AwsBaseSensor[SsmHook]):
89
101
  command_invocations = response.get("CommandInvocations", [])
90
102
 
91
103
  if not command_invocations:
92
- self.log.info("No command invocations found for command_id=%s yet, waiting...", self.command_id)
104
+ self.log.info(
105
+ "No command invocations found",
106
+ "command_id=%s yet, waiting...",
107
+ self.command_id,
108
+ )
93
109
  return False
94
110
 
95
111
  for invocation in command_invocations:
96
112
  state = invocation["Status"]
97
113
 
98
114
  if state in self.FAILURE_STATES:
99
- raise AirflowException(self.FAILURE_MESSAGE)
115
+ raise RuntimeError(self.FAILURE_MESSAGE)
100
116
 
101
117
  if state in self.INTERMEDIATE_STATES:
102
118
  return False
@@ -122,6 +138,6 @@ class SsmRunCommandCompletedSensor(AwsBaseSensor[SsmHook]):
122
138
  event = validate_execute_complete_event(event)
123
139
 
124
140
  if event["status"] != "success":
125
- raise AirflowException(f"Error while running run command: {event}")
141
+ raise RuntimeError(f"Error while running run command: {event}")
126
142
 
127
143
  self.log.info("SSM run command `%s` completed.", event["command_id"])
@@ -20,10 +20,10 @@ import json
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING
22
22
 
23
- from airflow.exceptions import AirflowException
24
23
  from airflow.providers.amazon.aws.hooks.step_function import StepFunctionHook
25
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
26
25
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
26
+ from airflow.providers.common.compat.sdk import AirflowException
27
27
 
28
28
  if TYPE_CHECKING:
29
29
  from airflow.utils.context import Context
@@ -23,7 +23,7 @@ from collections.abc import Sequence
23
23
  from typing import TYPE_CHECKING
24
24
 
25
25
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
26
- from airflow.providers.amazon.version_compat import BaseOperator
26
+ from airflow.providers.common.compat.sdk import BaseOperator
27
27
 
28
28
  try:
29
29
  from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
@@ -49,12 +49,12 @@ class AzureBlobStorageToS3Operator(BaseOperator):
49
49
  :param prefix: Prefix string which filters objects whose name begin with
50
50
  this prefix. (templated)
51
51
  :param delimiter: The delimiter by which you want to filter the objects. (templated)
52
- For e.g to lists the CSV files from in a directory in GCS you would use
52
+ For e.g. to lists the CSV files from in a directory in GCS you would use
53
53
  delimiter='.csv'.
54
54
  :param aws_conn_id: Connection id of the S3 connection to use
55
55
  :param dest_s3_key: The base S3 key to be used to store the files. (templated)
56
56
  :param dest_verify: Whether or not to verify SSL certificates for S3 connection.
57
- By default SSL certificates are verified.
57
+ By default, SSL certificates are verified.
58
58
  You can provide the following values:
59
59
 
60
60
  - ``False``: do not validate SSL certificates. SSL will still be used
@@ -22,8 +22,8 @@ from __future__ import annotations
22
22
  from collections.abc import Sequence
23
23
 
24
24
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
25
- from airflow.providers.amazon.version_compat import BaseOperator
26
- from airflow.utils.types import NOTSET, ArgNotSet
25
+ from airflow.providers.amazon.version_compat import NOTSET, ArgNotSet, is_arg_set
26
+ from airflow.providers.common.compat.sdk import BaseOperator
27
27
 
28
28
 
29
29
  class AwsToAwsBaseOperator(BaseOperator):
@@ -55,7 +55,7 @@ class AwsToAwsBaseOperator(BaseOperator):
55
55
  self.source_aws_conn_id = source_aws_conn_id
56
56
  self.dest_aws_conn_id = dest_aws_conn_id
57
57
  self.source_aws_conn_id = source_aws_conn_id
58
- if isinstance(dest_aws_conn_id, ArgNotSet):
59
- self.dest_aws_conn_id = self.source_aws_conn_id
60
- else:
58
+ if is_arg_set(dest_aws_conn_id):
61
59
  self.dest_aws_conn_id = dest_aws_conn_id
60
+ else:
61
+ self.dest_aws_conn_id = self.source_aws_conn_id
@@ -36,8 +36,8 @@ from airflow.providers.amazon.aws.transfers.base import AwsToAwsBaseOperator
36
36
  from airflow.utils.helpers import prune_dict
37
37
 
38
38
  if TYPE_CHECKING:
39
- from airflow.utils.context import Context
40
- from airflow.utils.types import ArgNotSet
39
+ from airflow.providers.amazon.version_compat import ArgNotSet
40
+ from airflow.sdk import Context
41
41
 
42
42
 
43
43
  class JSONEncoder(json.JSONEncoder):
@@ -216,9 +216,9 @@ class DynamoDBToS3Operator(AwsToAwsBaseOperator):
216
216
  scan_kwargs = copy(self.dynamodb_scan_kwargs) if self.dynamodb_scan_kwargs else {}
217
217
  err = None
218
218
  f: IO[Any]
219
- with NamedTemporaryFile() as f:
219
+ with NamedTemporaryFile() as f_tmp:
220
220
  try:
221
- f = self._scan_dynamodb_and_upload_to_s3(f, scan_kwargs, table)
221
+ f = self._scan_dynamodb_and_upload_to_s3(f_tmp, scan_kwargs, table)
222
222
  except Exception as e:
223
223
  err = e
224
224
  raise e
@@ -24,7 +24,7 @@ from tempfile import NamedTemporaryFile
24
24
  from typing import TYPE_CHECKING
25
25
 
26
26
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
27
- from airflow.providers.amazon.version_compat import BaseOperator
27
+ from airflow.providers.common.compat.sdk import BaseOperator
28
28
  from airflow.providers.exasol.hooks.exasol import ExasolHook
29
29
 
30
30
  if TYPE_CHECKING: