apache-airflow-providers-amazon 9.1.0rc4__py3-none-any.whl → 9.2.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. airflow/providers/amazon/__init__.py +3 -3
  2. airflow/providers/amazon/aws/auth_manager/avp/facade.py +2 -1
  3. airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +4 -12
  4. airflow/providers/amazon/aws/executors/batch/batch_executor.py +4 -3
  5. airflow/providers/amazon/aws/executors/batch/utils.py +3 -3
  6. airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +2 -1
  7. airflow/providers/amazon/aws/executors/ecs/utils.py +3 -3
  8. airflow/providers/amazon/aws/fs/s3.py +2 -2
  9. airflow/providers/amazon/aws/hooks/appflow.py +15 -5
  10. airflow/providers/amazon/aws/hooks/athena.py +2 -1
  11. airflow/providers/amazon/aws/hooks/dms.py +161 -0
  12. airflow/providers/amazon/aws/hooks/dynamodb.py +2 -1
  13. airflow/providers/amazon/aws/hooks/eks.py +2 -1
  14. airflow/providers/amazon/aws/hooks/kinesis.py +1 -1
  15. airflow/providers/amazon/aws/hooks/logs.py +2 -1
  16. airflow/providers/amazon/aws/hooks/redshift_cluster.py +4 -3
  17. airflow/providers/amazon/aws/hooks/redshift_data.py +2 -1
  18. airflow/providers/amazon/aws/hooks/redshift_sql.py +2 -6
  19. airflow/providers/amazon/aws/hooks/s3.py +7 -1
  20. airflow/providers/amazon/aws/hooks/sagemaker.py +2 -1
  21. airflow/providers/amazon/aws/hooks/ses.py +2 -1
  22. airflow/providers/amazon/aws/notifications/sns.py +1 -1
  23. airflow/providers/amazon/aws/notifications/sqs.py +1 -1
  24. airflow/providers/amazon/aws/operators/athena.py +2 -1
  25. airflow/providers/amazon/aws/operators/base_aws.py +1 -1
  26. airflow/providers/amazon/aws/operators/batch.py +2 -1
  27. airflow/providers/amazon/aws/operators/bedrock.py +2 -1
  28. airflow/providers/amazon/aws/operators/cloud_formation.py +2 -1
  29. airflow/providers/amazon/aws/operators/comprehend.py +2 -1
  30. airflow/providers/amazon/aws/operators/datasync.py +2 -1
  31. airflow/providers/amazon/aws/operators/dms.py +531 -1
  32. airflow/providers/amazon/aws/operators/ec2.py +2 -1
  33. airflow/providers/amazon/aws/operators/ecs.py +4 -1
  34. airflow/providers/amazon/aws/operators/eks.py +4 -3
  35. airflow/providers/amazon/aws/operators/emr.py +31 -8
  36. airflow/providers/amazon/aws/operators/eventbridge.py +2 -1
  37. airflow/providers/amazon/aws/operators/glacier.py +2 -1
  38. airflow/providers/amazon/aws/operators/glue.py +2 -1
  39. airflow/providers/amazon/aws/operators/glue_crawler.py +2 -1
  40. airflow/providers/amazon/aws/operators/glue_databrew.py +2 -1
  41. airflow/providers/amazon/aws/operators/kinesis_analytics.py +2 -1
  42. airflow/providers/amazon/aws/operators/lambda_function.py +2 -1
  43. airflow/providers/amazon/aws/operators/neptune.py +2 -1
  44. airflow/providers/amazon/aws/operators/quicksight.py +2 -1
  45. airflow/providers/amazon/aws/operators/rds.py +2 -1
  46. airflow/providers/amazon/aws/operators/redshift_cluster.py +2 -1
  47. airflow/providers/amazon/aws/operators/s3.py +7 -1
  48. airflow/providers/amazon/aws/operators/sagemaker.py +2 -1
  49. airflow/providers/amazon/aws/operators/sns.py +2 -1
  50. airflow/providers/amazon/aws/operators/sqs.py +2 -1
  51. airflow/providers/amazon/aws/operators/step_function.py +2 -1
  52. airflow/providers/amazon/aws/sensors/athena.py +2 -1
  53. airflow/providers/amazon/aws/sensors/base_aws.py +1 -1
  54. airflow/providers/amazon/aws/sensors/batch.py +2 -1
  55. airflow/providers/amazon/aws/sensors/bedrock.py +2 -1
  56. airflow/providers/amazon/aws/sensors/cloud_formation.py +2 -1
  57. airflow/providers/amazon/aws/sensors/comprehend.py +2 -1
  58. airflow/providers/amazon/aws/sensors/dms.py +2 -1
  59. airflow/providers/amazon/aws/sensors/dynamodb.py +2 -1
  60. airflow/providers/amazon/aws/sensors/ec2.py +2 -1
  61. airflow/providers/amazon/aws/sensors/ecs.py +2 -1
  62. airflow/providers/amazon/aws/sensors/eks.py +2 -1
  63. airflow/providers/amazon/aws/sensors/emr.py +2 -1
  64. airflow/providers/amazon/aws/sensors/glacier.py +2 -1
  65. airflow/providers/amazon/aws/sensors/glue.py +2 -1
  66. airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +2 -1
  67. airflow/providers/amazon/aws/sensors/glue_crawler.py +2 -1
  68. airflow/providers/amazon/aws/sensors/kinesis_analytics.py +2 -1
  69. airflow/providers/amazon/aws/sensors/lambda_function.py +2 -1
  70. airflow/providers/amazon/aws/sensors/opensearch_serverless.py +2 -1
  71. airflow/providers/amazon/aws/sensors/quicksight.py +2 -1
  72. airflow/providers/amazon/aws/sensors/rds.py +2 -1
  73. airflow/providers/amazon/aws/sensors/redshift_cluster.py +2 -1
  74. airflow/providers/amazon/aws/sensors/s3.py +2 -1
  75. airflow/providers/amazon/aws/sensors/sagemaker.py +2 -1
  76. airflow/providers/amazon/aws/sensors/sqs.py +2 -1
  77. airflow/providers/amazon/aws/sensors/step_function.py +2 -1
  78. airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py +2 -1
  79. airflow/providers/amazon/aws/transfers/base.py +1 -1
  80. airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +2 -1
  81. airflow/providers/amazon/aws/transfers/exasol_to_s3.py +2 -1
  82. airflow/providers/amazon/aws/transfers/ftp_to_s3.py +2 -1
  83. airflow/providers/amazon/aws/transfers/gcs_to_s3.py +4 -3
  84. airflow/providers/amazon/aws/transfers/glacier_to_gcs.py +2 -1
  85. airflow/providers/amazon/aws/transfers/google_api_to_s3.py +4 -8
  86. airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py +2 -1
  87. airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py +2 -1
  88. airflow/providers/amazon/aws/transfers/local_to_s3.py +2 -1
  89. airflow/providers/amazon/aws/transfers/mongo_to_s3.py +2 -1
  90. airflow/providers/amazon/aws/transfers/redshift_to_s3.py +2 -1
  91. airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py +2 -1
  92. airflow/providers/amazon/aws/transfers/s3_to_ftp.py +2 -1
  93. airflow/providers/amazon/aws/transfers/s3_to_redshift.py +2 -1
  94. airflow/providers/amazon/aws/transfers/s3_to_sftp.py +2 -1
  95. airflow/providers/amazon/aws/transfers/s3_to_sql.py +2 -1
  96. airflow/providers/amazon/aws/transfers/salesforce_to_s3.py +2 -1
  97. airflow/providers/amazon/aws/transfers/sftp_to_s3.py +14 -1
  98. airflow/providers/amazon/aws/transfers/sql_to_s3.py +2 -1
  99. airflow/providers/amazon/aws/triggers/base.py +2 -1
  100. airflow/providers/amazon/aws/triggers/dms.py +221 -0
  101. airflow/providers/amazon/aws/triggers/glue.py +2 -1
  102. airflow/providers/amazon/aws/triggers/redshift_cluster.py +2 -1
  103. airflow/providers/amazon/aws/triggers/redshift_data.py +2 -1
  104. airflow/providers/amazon/aws/triggers/s3.py +2 -1
  105. airflow/providers/amazon/aws/triggers/sagemaker.py +2 -1
  106. airflow/providers/amazon/aws/triggers/sqs.py +2 -1
  107. airflow/providers/amazon/aws/utils/__init__.py +1 -15
  108. airflow/providers/amazon/aws/utils/task_log_fetcher.py +2 -1
  109. airflow/providers/amazon/aws/utils/waiter.py +20 -0
  110. airflow/providers/amazon/aws/waiters/dms.json +88 -0
  111. airflow/providers/amazon/get_provider_info.py +9 -4
  112. airflow/providers/amazon/version_compat.py +36 -0
  113. {apache_airflow_providers_amazon-9.1.0rc4.dist-info → apache_airflow_providers_amazon-9.2.0rc1.dist-info}/METADATA +11 -17
  114. {apache_airflow_providers_amazon-9.1.0rc4.dist-info → apache_airflow_providers_amazon-9.2.0rc1.dist-info}/RECORD +116 -113
  115. {apache_airflow_providers_amazon-9.1.0rc4.dist-info → apache_airflow_providers_amazon-9.2.0rc1.dist-info}/WHEEL +0 -0
  116. {apache_airflow_providers_amazon-9.1.0rc4.dist-info → apache_airflow_providers_amazon-9.2.0rc1.dist-info}/entry_points.txt +0 -0
@@ -19,8 +19,9 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
+ from collections.abc import Sequence
22
23
  from tempfile import NamedTemporaryFile
23
- from typing import TYPE_CHECKING, Sequence
24
+ from typing import TYPE_CHECKING
24
25
 
25
26
  from airflow.models import BaseOperator
26
27
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
@@ -17,8 +17,9 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
+ from collections.abc import Sequence
20
21
  from tempfile import NamedTemporaryFile
21
- from typing import TYPE_CHECKING, Sequence
22
+ from typing import TYPE_CHECKING
22
23
 
23
24
  from airflow.models import BaseOperator
24
25
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
@@ -20,7 +20,8 @@
20
20
  from __future__ import annotations
21
21
 
22
22
  import os
23
- from typing import TYPE_CHECKING, Sequence
23
+ from collections.abc import Sequence
24
+ from typing import TYPE_CHECKING
24
25
 
25
26
  from packaging.version import Version
26
27
 
@@ -123,9 +124,9 @@ class GCSToS3Operator(BaseOperator):
123
124
  self.s3_acl_policy = s3_acl_policy
124
125
  self.keep_directory_structure = keep_directory_structure
125
126
  try:
126
- from airflow.providers.google import __version__
127
+ from airflow.providers.google import __version__ as _GOOGLE_PROVIDER_VERSION
127
128
 
128
- if Version(__version__) >= Version("10.3.0"):
129
+ if Version(Version(_GOOGLE_PROVIDER_VERSION).base_version) >= Version("10.3.0"):
129
130
  self.__is_match_glob_supported = True
130
131
  else:
131
132
  self.__is_match_glob_supported = False
@@ -18,7 +18,8 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import tempfile
21
- from typing import TYPE_CHECKING, Sequence
21
+ from collections.abc import Sequence
22
+ from typing import TYPE_CHECKING
22
23
 
23
24
  from airflow.models import BaseOperator
24
25
  from airflow.providers.amazon.aws.hooks.glacier import GlacierHook
@@ -21,7 +21,8 @@ from __future__ import annotations
21
21
 
22
22
  import json
23
23
  import sys
24
- from typing import TYPE_CHECKING, Sequence
24
+ from collections.abc import Sequence
25
+ from typing import TYPE_CHECKING
25
26
 
26
27
  from airflow.models import BaseOperator
27
28
  from airflow.models.xcom import MAX_XCOM_SIZE, XCOM_RETURN_KEY
@@ -30,7 +31,6 @@ from airflow.providers.google.common.hooks.discovery_api import GoogleDiscoveryA
30
31
 
31
32
  if TYPE_CHECKING:
32
33
  from airflow.models import TaskInstance
33
- from airflow.serialization.pydantic.taskinstance import TaskInstancePydantic
34
34
  from airflow.utils.context import Context
35
35
 
36
36
 
@@ -174,9 +174,7 @@ class GoogleApiToS3Operator(BaseOperator):
174
174
  replace=self.s3_overwrite,
175
175
  )
176
176
 
177
- def _update_google_api_endpoint_params_via_xcom(
178
- self, task_instance: TaskInstance | TaskInstancePydantic
179
- ) -> None:
177
+ def _update_google_api_endpoint_params_via_xcom(self, task_instance: TaskInstance) -> None:
180
178
  if self.google_api_endpoint_params_via_xcom:
181
179
  google_api_endpoint_params = task_instance.xcom_pull(
182
180
  task_ids=self.google_api_endpoint_params_via_xcom_task_ids,
@@ -184,9 +182,7 @@ class GoogleApiToS3Operator(BaseOperator):
184
182
  )
185
183
  self.google_api_endpoint_params.update(google_api_endpoint_params)
186
184
 
187
- def _expose_google_api_response_via_xcom(
188
- self, task_instance: TaskInstance | TaskInstancePydantic, data: dict
189
- ) -> None:
185
+ def _expose_google_api_response_via_xcom(self, task_instance: TaskInstance, data: dict) -> None:
190
186
  if sys.getsizeof(data) < MAX_XCOM_SIZE:
191
187
  task_instance.xcom_push(key=self.google_api_response_via_xcom or XCOM_RETURN_KEY, value=data)
192
188
  else:
@@ -20,7 +20,8 @@
20
20
  from __future__ import annotations
21
21
 
22
22
  import json
23
- from typing import TYPE_CHECKING, Callable, Sequence
23
+ from collections.abc import Sequence
24
+ from typing import TYPE_CHECKING, Callable
24
25
 
25
26
  from airflow.models import BaseOperator
26
27
  from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
@@ -19,7 +19,8 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
- from typing import TYPE_CHECKING, Sequence
22
+ from collections.abc import Sequence
23
+ from typing import TYPE_CHECKING
23
24
 
24
25
  from airflow.models import BaseOperator
25
26
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
@@ -17,7 +17,8 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- from typing import TYPE_CHECKING, Sequence
20
+ from collections.abc import Sequence
21
+ from typing import TYPE_CHECKING
21
22
 
22
23
  from airflow.models import BaseOperator
23
24
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
@@ -18,7 +18,8 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import json
21
- from typing import TYPE_CHECKING, Any, Iterable, Sequence, cast
21
+ from collections.abc import Iterable, Sequence
22
+ from typing import TYPE_CHECKING, Any, cast
22
23
 
23
24
  from bson import json_util
24
25
 
@@ -20,7 +20,8 @@
20
20
  from __future__ import annotations
21
21
 
22
22
  import re
23
- from typing import TYPE_CHECKING, Iterable, Mapping, Sequence
23
+ from collections.abc import Iterable, Mapping, Sequence
24
+ from typing import TYPE_CHECKING
24
25
 
25
26
  from airflow.exceptions import AirflowException
26
27
  from airflow.models import BaseOperator
@@ -17,7 +17,8 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- from typing import TYPE_CHECKING, Any, Literal, Sequence, TypedDict
20
+ from collections.abc import Sequence
21
+ from typing import TYPE_CHECKING, Any, Literal, TypedDict
21
22
 
22
23
  from botocore.exceptions import ClientError, WaiterError
23
24
 
@@ -17,8 +17,9 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
+ from collections.abc import Sequence
20
21
  from tempfile import NamedTemporaryFile
21
- from typing import TYPE_CHECKING, Sequence
22
+ from typing import TYPE_CHECKING
22
23
 
23
24
  from airflow.models import BaseOperator
24
25
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
@@ -16,7 +16,8 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
- from typing import TYPE_CHECKING, Iterable, Sequence
19
+ from collections.abc import Iterable, Sequence
20
+ from typing import TYPE_CHECKING
20
21
 
21
22
  from airflow.exceptions import AirflowException
22
23
  from airflow.models import BaseOperator
@@ -17,8 +17,9 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
+ from collections.abc import Sequence
20
21
  from tempfile import NamedTemporaryFile
21
- from typing import TYPE_CHECKING, Sequence
22
+ from typing import TYPE_CHECKING
22
23
  from urllib.parse import urlsplit
23
24
 
24
25
  from airflow.models import BaseOperator
@@ -16,9 +16,10 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
+ from collections.abc import Iterable, Sequence
19
20
  from functools import cached_property
20
21
  from tempfile import NamedTemporaryFile
21
- from typing import TYPE_CHECKING, Callable, Iterable, Sequence
22
+ from typing import TYPE_CHECKING, Callable
22
23
 
23
24
  from airflow.exceptions import AirflowException
24
25
  from airflow.hooks.base import BaseHook
@@ -18,7 +18,8 @@ from __future__ import annotations
18
18
 
19
19
  import os
20
20
  import tempfile
21
- from typing import TYPE_CHECKING, Sequence
21
+ from collections.abc import Sequence
22
+ from typing import TYPE_CHECKING
22
23
 
23
24
  from airflow.models import BaseOperator
24
25
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
@@ -17,8 +17,9 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
+ from collections.abc import Sequence
20
21
  from tempfile import NamedTemporaryFile
21
- from typing import TYPE_CHECKING, Sequence
22
+ from typing import TYPE_CHECKING
22
23
  from urllib.parse import urlsplit
23
24
 
24
25
  from airflow.models import BaseOperator
@@ -49,6 +50,8 @@ class SFTPToS3Operator(BaseOperator):
49
50
  uploading the file to S3.
50
51
  :param use_temp_file: If True, copies file first to local,
51
52
  if False streams file from SFTP to S3.
53
+ :param fail_on_file_not_exist: If True, operator fails when file does not exist,
54
+ if False, operator will not fail and skips transfer. Default is True.
52
55
  """
53
56
 
54
57
  template_fields: Sequence[str] = ("s3_key", "sftp_path", "s3_bucket")
@@ -62,6 +65,7 @@ class SFTPToS3Operator(BaseOperator):
62
65
  sftp_conn_id: str = "ssh_default",
63
66
  s3_conn_id: str = "aws_default",
64
67
  use_temp_file: bool = True,
68
+ fail_on_file_not_exist: bool = True,
65
69
  **kwargs,
66
70
  ) -> None:
67
71
  super().__init__(**kwargs)
@@ -71,6 +75,7 @@ class SFTPToS3Operator(BaseOperator):
71
75
  self.s3_key = s3_key
72
76
  self.s3_conn_id = s3_conn_id
73
77
  self.use_temp_file = use_temp_file
78
+ self.fail_on_file_not_exist = fail_on_file_not_exist
74
79
 
75
80
  @staticmethod
76
81
  def get_s3_key(s3_key: str) -> str:
@@ -85,6 +90,14 @@ class SFTPToS3Operator(BaseOperator):
85
90
 
86
91
  sftp_client = ssh_hook.get_conn().open_sftp()
87
92
 
93
+ try:
94
+ sftp_client.stat(self.sftp_path)
95
+ except FileNotFoundError:
96
+ if self.fail_on_file_not_exist:
97
+ raise
98
+ self.log.info("File %s not found on SFTP server. Skipping transfer.", self.sftp_path)
99
+ return
100
+
88
101
  if self.use_temp_file:
89
102
  with NamedTemporaryFile("w") as f:
90
103
  sftp_client.get(self.sftp_path, f.name)
@@ -19,8 +19,9 @@ from __future__ import annotations
19
19
 
20
20
  import enum
21
21
  from collections import namedtuple
22
+ from collections.abc import Iterable, Mapping, Sequence
22
23
  from tempfile import NamedTemporaryFile
23
- from typing import TYPE_CHECKING, Any, Iterable, Mapping, Sequence, cast
24
+ from typing import TYPE_CHECKING, Any, cast
24
25
 
25
26
  from typing_extensions import Literal
26
27
 
@@ -18,7 +18,8 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  from abc import abstractmethod
21
- from typing import TYPE_CHECKING, Any, AsyncIterator
21
+ from collections.abc import AsyncIterator
22
+ from typing import TYPE_CHECKING, Any
22
23
 
23
24
  from airflow.providers.amazon.aws.utils.waiter_with_logging import async_wait
24
25
  from airflow.triggers.base import BaseTrigger, TriggerEvent
@@ -0,0 +1,221 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from typing import TYPE_CHECKING
20
+
21
+ from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
22
+ from airflow.providers.amazon.aws.hooks.dms import DmsHook
23
+ from airflow.providers.amazon.aws.triggers.base import AwsBaseWaiterTrigger
24
+
25
+ if TYPE_CHECKING:
26
+ from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
27
+
28
+
29
+ class DmsReplicationTerminalStatusTrigger(AwsBaseWaiterTrigger):
30
+ """
31
+ Trigger when an AWS DMS Serverless replication is in a terminal state.
32
+
33
+ :param replication_config_arn: The ARN of the replication config.
34
+ :param waiter_delay: The amount of time in seconds to wait between attempts.
35
+ :param waiter_max_attempts: The maximum number of attempts to be made.
36
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
37
+ """
38
+
39
+ def __init__(
40
+ self,
41
+ replication_config_arn: str,
42
+ waiter_delay: int = 30,
43
+ waiter_max_attempts: int = 60,
44
+ aws_conn_id: str | None = "aws_default",
45
+ ) -> None:
46
+ super().__init__(
47
+ serialized_fields={"replication_config_arn": replication_config_arn},
48
+ waiter_name="replication_terminal_status",
49
+ waiter_delay=waiter_delay,
50
+ waiter_args={"Filters": [{"Name": "replication-config-arn", "Values": [replication_config_arn]}]},
51
+ waiter_max_attempts=waiter_max_attempts,
52
+ failure_message="Replication failed to reach terminal status.",
53
+ status_message="Status replication is",
54
+ status_queries=["Replications[0].Status"],
55
+ return_key="replication_config_arn",
56
+ return_value=replication_config_arn,
57
+ aws_conn_id=aws_conn_id,
58
+ )
59
+
60
+ def hook(self) -> AwsGenericHook:
61
+ return DmsHook(
62
+ self.aws_conn_id,
63
+ verify=self.verify,
64
+ config=self.botocore_config,
65
+ )
66
+
67
+
68
+ class DmsReplicationConfigDeletedTrigger(AwsBaseWaiterTrigger):
69
+ """
70
+ Trigger when an AWS DMS Serverless replication config is deleted.
71
+
72
+ :param replication_config_arn: The ARN of the replication config.
73
+ :param waiter_delay: The amount of time in seconds to wait between attempts.
74
+ :param waiter_max_attempts: The maximum number of attempts to be made.
75
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
76
+ """
77
+
78
+ def __init__(
79
+ self,
80
+ replication_config_arn: str,
81
+ waiter_delay: int = 30,
82
+ waiter_max_attempts: int = 60,
83
+ aws_conn_id: str | None = "aws_default",
84
+ ) -> None:
85
+ super().__init__(
86
+ serialized_fields={"replication_config_arn": replication_config_arn},
87
+ waiter_name="replication_config_deleted",
88
+ waiter_delay=waiter_delay,
89
+ waiter_args={"Filters": [{"Name": "replication-config-arn", "Values": [replication_config_arn]}]},
90
+ waiter_max_attempts=waiter_max_attempts,
91
+ failure_message="Replication config failed to be deleted.",
92
+ status_message="Status replication config is",
93
+ status_queries=["ReplicationConfigs[0].Status"],
94
+ return_key="replication_config_arn",
95
+ return_value=replication_config_arn,
96
+ aws_conn_id=aws_conn_id,
97
+ )
98
+
99
+ def hook(self) -> AwsGenericHook:
100
+ return DmsHook(
101
+ self.aws_conn_id,
102
+ verify=self.verify,
103
+ config=self.botocore_config,
104
+ )
105
+
106
+
107
+ class DmsReplicationCompleteTrigger(AwsBaseWaiterTrigger):
108
+ """
109
+ Trigger when an AWS DMS Serverless replication completes.
110
+
111
+ :param replication_config_arn: The ARN of the replication config.
112
+ :param waiter_delay: The amount of time in seconds to wait between attempts.
113
+ :param waiter_max_attempts: The maximum number of attempts to be made.
114
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
115
+ """
116
+
117
+ def __init__(
118
+ self,
119
+ replication_config_arn: str,
120
+ waiter_delay: int = 30,
121
+ waiter_max_attempts: int = 60,
122
+ aws_conn_id: str | None = "aws_default",
123
+ ) -> None:
124
+ super().__init__(
125
+ serialized_fields={"replication_config_arn": replication_config_arn},
126
+ waiter_name="replication_complete",
127
+ waiter_delay=waiter_delay,
128
+ waiter_args={"Filters": [{"Name": "replication-config-arn", "Values": [replication_config_arn]}]},
129
+ waiter_max_attempts=waiter_max_attempts,
130
+ failure_message="Replication failed to complete.",
131
+ status_message="Status replication is",
132
+ status_queries=["Replications[0].Status"],
133
+ return_key="replication_config_arn",
134
+ return_value=replication_config_arn,
135
+ aws_conn_id=aws_conn_id,
136
+ )
137
+
138
+ def hook(self) -> AwsGenericHook:
139
+ return DmsHook(
140
+ self.aws_conn_id,
141
+ verify=self.verify,
142
+ config=self.botocore_config,
143
+ )
144
+
145
+
146
+ class DmsReplicationStoppedTrigger(AwsBaseWaiterTrigger):
147
+ """
148
+ Trigger when an AWS DMS Serverless replication is stopped.
149
+
150
+ :param replication_config_arn: The ARN of the replication config.
151
+ :param waiter_delay: The amount of time in seconds to wait between attempts.
152
+ :param waiter_max_attempts: The maximum number of attempts to be made.
153
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
154
+ """
155
+
156
+ def __init__(
157
+ self,
158
+ replication_config_arn: str,
159
+ waiter_delay: int = 30,
160
+ waiter_max_attempts: int = 60,
161
+ aws_conn_id: str | None = "aws_default",
162
+ ) -> None:
163
+ super().__init__(
164
+ serialized_fields={"replication_config_arn": replication_config_arn},
165
+ waiter_name="replication_stopped",
166
+ waiter_delay=waiter_delay,
167
+ waiter_args={"Filters": [{"Name": "replication-config-arn", "Values": [replication_config_arn]}]},
168
+ waiter_max_attempts=waiter_max_attempts,
169
+ failure_message="Replication failed to stop.",
170
+ status_message="Status replication is",
171
+ status_queries=["Replications[0].Status"],
172
+ return_key="replication_config_arn",
173
+ return_value=replication_config_arn,
174
+ aws_conn_id=aws_conn_id,
175
+ )
176
+
177
+ def hook(self) -> AwsGenericHook:
178
+ return DmsHook(
179
+ self.aws_conn_id,
180
+ verify=self.verify,
181
+ config=self.botocore_config,
182
+ )
183
+
184
+
185
+ class DmsReplicationDeprovisionedTrigger(AwsBaseWaiterTrigger):
186
+ """
187
+ Trigger when an AWS DMS Serverless replication is de-provisioned.
188
+
189
+ :param replication_config_arn: The ARN of the replication config.
190
+ :param waiter_delay: The amount of time in seconds to wait between attempts.
191
+ :param waiter_max_attempts: The maximum number of attempts to be made.
192
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
193
+ """
194
+
195
+ def __init__(
196
+ self,
197
+ replication_config_arn: str,
198
+ waiter_delay: int = 30,
199
+ waiter_max_attempts: int = 60,
200
+ aws_conn_id: str | None = "aws_default",
201
+ ) -> None:
202
+ super().__init__(
203
+ serialized_fields={"replication_config_arn": replication_config_arn},
204
+ waiter_name="replication_deprovisioned",
205
+ waiter_delay=waiter_delay,
206
+ waiter_args={"Filters": [{"Name": "replication-config-arn", "Values": [replication_config_arn]}]},
207
+ waiter_max_attempts=waiter_max_attempts,
208
+ failure_message="Replication failed to deprovision.",
209
+ status_message="Status replication is",
210
+ status_queries=["Replications[0].ProvisionData.ProvisionState"],
211
+ return_key="replication_config_arn",
212
+ return_value=replication_config_arn,
213
+ aws_conn_id=aws_conn_id,
214
+ )
215
+
216
+ def hook(self) -> AwsGenericHook:
217
+ return DmsHook(
218
+ self.aws_conn_id,
219
+ verify=self.verify,
220
+ config=self.botocore_config,
221
+ )
@@ -18,8 +18,9 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import asyncio
21
+ from collections.abc import AsyncIterator
21
22
  from functools import cached_property
22
- from typing import TYPE_CHECKING, Any, AsyncIterator
23
+ from typing import TYPE_CHECKING, Any
23
24
 
24
25
  if TYPE_CHECKING:
25
26
  from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
@@ -17,7 +17,8 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  import asyncio
20
- from typing import TYPE_CHECKING, Any, AsyncIterator
20
+ from collections.abc import AsyncIterator
21
+ from typing import TYPE_CHECKING, Any
21
22
 
22
23
  from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftHook
23
24
  from airflow.providers.amazon.aws.triggers.base import AwsBaseWaiterTrigger
@@ -18,8 +18,9 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import asyncio
21
+ from collections.abc import AsyncIterator
21
22
  from functools import cached_property
22
- from typing import Any, AsyncIterator
23
+ from typing import Any
23
24
 
24
25
  from airflow.providers.amazon.aws.hooks.redshift_data import (
25
26
  ABORTED_STATE,
@@ -17,8 +17,9 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  import asyncio
20
+ from collections.abc import AsyncIterator
20
21
  from functools import cached_property
21
- from typing import TYPE_CHECKING, Any, AsyncIterator
22
+ from typing import TYPE_CHECKING, Any
22
23
 
23
24
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
24
25
  from airflow.triggers.base import BaseTrigger, TriggerEvent
@@ -19,9 +19,10 @@ from __future__ import annotations
19
19
 
20
20
  import asyncio
21
21
  from collections import Counter
22
+ from collections.abc import AsyncIterator
22
23
  from enum import IntEnum
23
24
  from functools import cached_property
24
- from typing import Any, AsyncIterator
25
+ from typing import Any
25
26
 
26
27
  from botocore.exceptions import WaiterError
27
28
 
@@ -17,7 +17,8 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  import asyncio
20
- from typing import TYPE_CHECKING, Any, AsyncIterator, Collection
20
+ from collections.abc import AsyncIterator, Collection
21
+ from typing import TYPE_CHECKING, Any
21
22
 
22
23
  from airflow.exceptions import AirflowException
23
24
  from airflow.providers.amazon.aws.hooks.sqs import SqsHook
@@ -31,21 +31,7 @@ log = logging.getLogger(__name__)
31
31
 
32
32
 
33
33
  def trim_none_values(obj: dict):
34
- from packaging.version import Version
35
-
36
- from airflow.version import version
37
-
38
- if Version(version) < Version("2.7"):
39
- # before version 2.7, the behavior is not the same.
40
- # Empty dict and lists are removed from the given dict.
41
- return {key: val for key, val in obj.items() if val is not None}
42
- else:
43
- # once airflow 2.6 rolls out of compatibility support for provider packages,
44
- # we can replace usages of this method with the core one in our code,
45
- # and uncomment this warning for users who may use it.
46
- # warnings.warn("use airflow.utils.helpers.prune_dict() instead",
47
- # AirflowProviderDeprecationWarning, stacklevel=2)
48
- return prune_dict(obj)
34
+ return prune_dict(obj)
49
35
 
50
36
 
51
37
  def datetime_to_epoch(date_time: datetime) -> int:
@@ -18,9 +18,10 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import time
21
+ from collections.abc import Generator
21
22
  from datetime import datetime, timedelta, timezone
22
23
  from threading import Event, Thread
23
- from typing import TYPE_CHECKING, Generator
24
+ from typing import TYPE_CHECKING
24
25
 
25
26
  from botocore.exceptions import ClientError, ConnectionClosedError
26
27
 
@@ -19,6 +19,7 @@ from __future__ import annotations
19
19
 
20
20
  import logging
21
21
  import time
22
+ from enum import Enum
22
23
  from typing import Callable
23
24
 
24
25
  from airflow.exceptions import AirflowException
@@ -83,3 +84,22 @@ def get_state(response, keys) -> str:
83
84
  if value is not None:
84
85
  value = value.get(key, None)
85
86
  return value
87
+
88
+
89
+ class WaitPolicy(str, Enum):
90
+ """
91
+ Used to control the waiting behaviour within EMRClusterJobFlowOperator.
92
+
93
+ Choices:
94
+ - WAIT_FOR_COMPLETION - Will wait for the cluster to report "Running" state
95
+ - WAIT_FOR_STEPS_COMPLETION - Will wait for the cluster to report "Terminated" state
96
+ """
97
+
98
+ WAIT_FOR_COMPLETION = "wait_for_completion"
99
+ WAIT_FOR_STEPS_COMPLETION = "wait_for_steps_completion"
100
+
101
+
102
+ WAITER_POLICY_NAME_MAPPING: dict[WaitPolicy, str] = {
103
+ WaitPolicy.WAIT_FOR_COMPLETION: "job_flow_waiting",
104
+ WaitPolicy.WAIT_FOR_STEPS_COMPLETION: "job_flow_terminated",
105
+ }