apache-airflow-providers-amazon 9.15.0__py3-none-any.whl → 9.18.1rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/amazon/__init__.py +3 -3
- airflow/providers/amazon/aws/auth_manager/avp/facade.py +1 -1
- airflow/providers/amazon/aws/auth_manager/routes/login.py +7 -1
- airflow/providers/amazon/aws/bundles/s3.py +1 -1
- airflow/providers/amazon/aws/exceptions.py +1 -1
- airflow/providers/amazon/aws/executors/aws_lambda/docker/app.py +5 -1
- airflow/providers/amazon/aws/executors/aws_lambda/lambda_executor.py +1 -2
- airflow/providers/amazon/aws/executors/batch/batch_executor.py +1 -2
- airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +1 -2
- airflow/providers/amazon/aws/hooks/athena.py +7 -3
- airflow/providers/amazon/aws/hooks/athena_sql.py +3 -3
- airflow/providers/amazon/aws/hooks/base_aws.py +3 -7
- airflow/providers/amazon/aws/hooks/batch_client.py +5 -7
- airflow/providers/amazon/aws/hooks/batch_waiters.py +1 -2
- airflow/providers/amazon/aws/hooks/chime.py +2 -2
- airflow/providers/amazon/aws/hooks/comprehend.py +1 -1
- airflow/providers/amazon/aws/hooks/datasync.py +3 -3
- airflow/providers/amazon/aws/hooks/dynamodb.py +1 -1
- airflow/providers/amazon/aws/hooks/ec2.py +1 -1
- airflow/providers/amazon/aws/hooks/elasticache_replication_group.py +1 -1
- airflow/providers/amazon/aws/hooks/emr.py +1 -1
- airflow/providers/amazon/aws/hooks/firehose.py +56 -0
- airflow/providers/amazon/aws/hooks/glue.py +9 -2
- airflow/providers/amazon/aws/hooks/glue_catalog.py +1 -1
- airflow/providers/amazon/aws/hooks/kinesis.py +31 -13
- airflow/providers/amazon/aws/hooks/logs.py +10 -2
- airflow/providers/amazon/aws/hooks/mwaa.py +38 -7
- airflow/providers/amazon/aws/hooks/quicksight.py +1 -1
- airflow/providers/amazon/aws/hooks/rds.py +1 -1
- airflow/providers/amazon/aws/hooks/redshift_sql.py +31 -8
- airflow/providers/amazon/aws/hooks/s3.py +14 -6
- airflow/providers/amazon/aws/hooks/sagemaker.py +1 -1
- airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py +1 -2
- airflow/providers/amazon/aws/hooks/ssm.py +34 -6
- airflow/providers/amazon/aws/hooks/step_function.py +1 -1
- airflow/providers/amazon/aws/links/base_aws.py +1 -1
- airflow/providers/amazon/aws/links/emr.py +1 -1
- airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +50 -20
- airflow/providers/amazon/aws/operators/appflow.py +1 -1
- airflow/providers/amazon/aws/operators/athena.py +1 -1
- airflow/providers/amazon/aws/operators/base_aws.py +2 -2
- airflow/providers/amazon/aws/operators/batch.py +1 -1
- airflow/providers/amazon/aws/operators/bedrock.py +3 -1
- airflow/providers/amazon/aws/operators/cloud_formation.py +2 -2
- airflow/providers/amazon/aws/operators/comprehend.py +1 -1
- airflow/providers/amazon/aws/operators/datasync.py +1 -1
- airflow/providers/amazon/aws/operators/dms.py +1 -1
- airflow/providers/amazon/aws/operators/ec2.py +1 -1
- airflow/providers/amazon/aws/operators/ecs.py +1 -1
- airflow/providers/amazon/aws/operators/eks.py +2 -1
- airflow/providers/amazon/aws/operators/emr.py +22 -12
- airflow/providers/amazon/aws/operators/eventbridge.py +1 -1
- airflow/providers/amazon/aws/operators/glue.py +1 -1
- airflow/providers/amazon/aws/operators/glue_crawler.py +1 -1
- airflow/providers/amazon/aws/operators/glue_databrew.py +1 -1
- airflow/providers/amazon/aws/operators/kinesis_analytics.py +1 -1
- airflow/providers/amazon/aws/operators/lambda_function.py +1 -1
- airflow/providers/amazon/aws/operators/mwaa.py +13 -4
- airflow/providers/amazon/aws/operators/neptune.py +1 -1
- airflow/providers/amazon/aws/operators/rds.py +1 -1
- airflow/providers/amazon/aws/operators/redshift_cluster.py +1 -1
- airflow/providers/amazon/aws/operators/redshift_data.py +1 -1
- airflow/providers/amazon/aws/operators/s3.py +1 -1
- airflow/providers/amazon/aws/operators/sagemaker.py +1 -1
- airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py +1 -2
- airflow/providers/amazon/aws/operators/ssm.py +122 -17
- airflow/providers/amazon/aws/operators/step_function.py +1 -1
- airflow/providers/amazon/aws/secrets/secrets_manager.py +3 -4
- airflow/providers/amazon/aws/sensors/athena.py +1 -1
- airflow/providers/amazon/aws/sensors/base_aws.py +2 -2
- airflow/providers/amazon/aws/sensors/batch.py +1 -1
- airflow/providers/amazon/aws/sensors/bedrock.py +1 -1
- airflow/providers/amazon/aws/sensors/comprehend.py +1 -1
- airflow/providers/amazon/aws/sensors/dms.py +1 -1
- airflow/providers/amazon/aws/sensors/ec2.py +1 -1
- airflow/providers/amazon/aws/sensors/ecs.py +1 -1
- airflow/providers/amazon/aws/sensors/eks.py +2 -1
- airflow/providers/amazon/aws/sensors/emr.py +1 -3
- airflow/providers/amazon/aws/sensors/glacier.py +1 -1
- airflow/providers/amazon/aws/sensors/glue.py +1 -1
- airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +1 -1
- airflow/providers/amazon/aws/sensors/glue_crawler.py +1 -1
- airflow/providers/amazon/aws/sensors/kinesis_analytics.py +1 -1
- airflow/providers/amazon/aws/sensors/lambda_function.py +1 -1
- airflow/providers/amazon/aws/sensors/mwaa.py +15 -2
- airflow/providers/amazon/aws/sensors/opensearch_serverless.py +1 -1
- airflow/providers/amazon/aws/sensors/quicksight.py +1 -1
- airflow/providers/amazon/aws/sensors/rds.py +1 -1
- airflow/providers/amazon/aws/sensors/redshift_cluster.py +1 -1
- airflow/providers/amazon/aws/sensors/s3.py +3 -3
- airflow/providers/amazon/aws/sensors/sagemaker.py +1 -1
- airflow/providers/amazon/aws/sensors/sagemaker_unified_studio.py +1 -2
- airflow/providers/amazon/aws/sensors/sqs.py +1 -1
- airflow/providers/amazon/aws/sensors/ssm.py +33 -17
- airflow/providers/amazon/aws/sensors/step_function.py +1 -1
- airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py +3 -3
- airflow/providers/amazon/aws/transfers/base.py +5 -5
- airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +4 -4
- airflow/providers/amazon/aws/transfers/exasol_to_s3.py +1 -1
- airflow/providers/amazon/aws/transfers/ftp_to_s3.py +1 -1
- airflow/providers/amazon/aws/transfers/gcs_to_s3.py +48 -6
- airflow/providers/amazon/aws/transfers/glacier_to_gcs.py +1 -1
- airflow/providers/amazon/aws/transfers/google_api_to_s3.py +2 -5
- airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py +1 -1
- airflow/providers/amazon/aws/transfers/http_to_s3.py +1 -1
- airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py +1 -1
- airflow/providers/amazon/aws/transfers/local_to_s3.py +1 -1
- airflow/providers/amazon/aws/transfers/mongo_to_s3.py +1 -1
- airflow/providers/amazon/aws/transfers/redshift_to_s3.py +6 -7
- airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py +1 -2
- airflow/providers/amazon/aws/transfers/s3_to_ftp.py +1 -1
- airflow/providers/amazon/aws/transfers/s3_to_redshift.py +6 -7
- airflow/providers/amazon/aws/transfers/s3_to_sftp.py +1 -1
- airflow/providers/amazon/aws/transfers/s3_to_sql.py +1 -2
- airflow/providers/amazon/aws/transfers/salesforce_to_s3.py +1 -1
- airflow/providers/amazon/aws/transfers/sftp_to_s3.py +1 -1
- airflow/providers/amazon/aws/transfers/sql_to_s3.py +8 -9
- airflow/providers/amazon/aws/triggers/bedrock.py +1 -1
- airflow/providers/amazon/aws/triggers/ecs.py +1 -1
- airflow/providers/amazon/aws/triggers/eks.py +1 -1
- airflow/providers/amazon/aws/triggers/s3.py +29 -2
- airflow/providers/amazon/aws/triggers/sagemaker.py +1 -1
- airflow/providers/amazon/aws/triggers/sqs.py +1 -1
- airflow/providers/amazon/aws/triggers/ssm.py +17 -1
- airflow/providers/amazon/aws/utils/__init__.py +1 -1
- airflow/providers/amazon/aws/utils/connection_wrapper.py +3 -6
- airflow/providers/amazon/aws/utils/mixins.py +1 -1
- airflow/providers/amazon/aws/utils/waiter.py +3 -3
- airflow/providers/amazon/aws/utils/waiter_with_logging.py +1 -1
- airflow/providers/amazon/aws/waiters/emr.json +6 -6
- airflow/providers/amazon/get_provider_info.py +19 -1
- airflow/providers/amazon/version_compat.py +19 -16
- {apache_airflow_providers_amazon-9.15.0.dist-info → apache_airflow_providers_amazon-9.18.1rc1.dist-info}/METADATA +29 -19
- {apache_airflow_providers_amazon-9.15.0.dist-info → apache_airflow_providers_amazon-9.18.1rc1.dist-info}/RECORD +138 -136
- apache_airflow_providers_amazon-9.18.1rc1.dist-info/licenses/NOTICE +5 -0
- {apache_airflow_providers_amazon-9.15.0.dist-info → apache_airflow_providers_amazon-9.18.1rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_amazon-9.15.0.dist-info → apache_airflow_providers_amazon-9.18.1rc1.dist-info}/entry_points.txt +0 -0
- {airflow/providers/amazon → apache_airflow_providers_amazon-9.18.1rc1.dist-info/licenses}/LICENSE +0 -0
|
@@ -18,6 +18,9 @@
|
|
|
18
18
|
|
|
19
19
|
from __future__ import annotations
|
|
20
20
|
|
|
21
|
+
import warnings
|
|
22
|
+
from typing import Literal
|
|
23
|
+
|
|
21
24
|
import requests
|
|
22
25
|
from botocore.exceptions import ClientError
|
|
23
26
|
|
|
@@ -55,6 +58,7 @@ class MwaaHook(AwsBaseHook):
|
|
|
55
58
|
body: dict | None = None,
|
|
56
59
|
query_params: dict | None = None,
|
|
57
60
|
generate_local_token: bool = False,
|
|
61
|
+
airflow_version: Literal[2, 3] | None = None,
|
|
58
62
|
) -> dict:
|
|
59
63
|
"""
|
|
60
64
|
Invoke the REST API on the Airflow webserver with the specified inputs.
|
|
@@ -70,6 +74,8 @@ class MwaaHook(AwsBaseHook):
|
|
|
70
74
|
:param generate_local_token: If True, only the local web token method is used without trying boto's
|
|
71
75
|
`invoke_rest_api` first. If False, the local web token method is used as a fallback after trying
|
|
72
76
|
boto's `invoke_rest_api`
|
|
77
|
+
:param airflow_version: The Airflow major version the MWAA environment runs.
|
|
78
|
+
This parameter is only used if the local web token method is used to call Airflow API.
|
|
73
79
|
"""
|
|
74
80
|
# Filter out keys with None values because Airflow REST API doesn't accept requests otherwise
|
|
75
81
|
body = {k: v for k, v in body.items() if v is not None} if body else {}
|
|
@@ -83,7 +89,7 @@ class MwaaHook(AwsBaseHook):
|
|
|
83
89
|
}
|
|
84
90
|
|
|
85
91
|
if generate_local_token:
|
|
86
|
-
return self._invoke_rest_api_using_local_session_token(**api_kwargs)
|
|
92
|
+
return self._invoke_rest_api_using_local_session_token(airflow_version, **api_kwargs)
|
|
87
93
|
|
|
88
94
|
try:
|
|
89
95
|
response = self.conn.invoke_rest_api(**api_kwargs)
|
|
@@ -100,7 +106,7 @@ class MwaaHook(AwsBaseHook):
|
|
|
100
106
|
self.log.info(
|
|
101
107
|
"Access Denied due to missing airflow:InvokeRestApi in IAM policy. Trying again by generating local token..."
|
|
102
108
|
)
|
|
103
|
-
return self._invoke_rest_api_using_local_session_token(**api_kwargs)
|
|
109
|
+
return self._invoke_rest_api_using_local_session_token(airflow_version, **api_kwargs)
|
|
104
110
|
to_log = e.response
|
|
105
111
|
# ResponseMetadata is removed because it contains data that is either very unlikely to be
|
|
106
112
|
# useful in XComs and logs, or redundant given the data already included in the response
|
|
@@ -110,14 +116,35 @@ class MwaaHook(AwsBaseHook):
|
|
|
110
116
|
|
|
111
117
|
def _invoke_rest_api_using_local_session_token(
|
|
112
118
|
self,
|
|
119
|
+
airflow_version: Literal[2, 3] | None = None,
|
|
113
120
|
**api_kwargs,
|
|
114
121
|
) -> dict:
|
|
122
|
+
if not airflow_version:
|
|
123
|
+
warnings.warn(
|
|
124
|
+
"The parameter ``airflow_version`` in ``MwaaHook.invoke_rest_api`` is not "
|
|
125
|
+
"specified and the local web token method is being used. "
|
|
126
|
+
"The default Airflow version being used is 2 but this value will change in the future. "
|
|
127
|
+
"To avoid any unexpected behavior, please explicitly specify the Airflow version.",
|
|
128
|
+
FutureWarning,
|
|
129
|
+
stacklevel=3,
|
|
130
|
+
)
|
|
131
|
+
airflow_version = 2
|
|
132
|
+
|
|
115
133
|
try:
|
|
116
|
-
session, hostname = self._get_session_conn(api_kwargs["Name"])
|
|
134
|
+
session, hostname, login_response = self._get_session_conn(api_kwargs["Name"], airflow_version)
|
|
135
|
+
|
|
136
|
+
headers = {}
|
|
137
|
+
if airflow_version == 3:
|
|
138
|
+
headers = {
|
|
139
|
+
"Authorization": f"Bearer {login_response.cookies['_token']}",
|
|
140
|
+
"Content-Type": "application/json",
|
|
141
|
+
}
|
|
117
142
|
|
|
143
|
+
api_version = "v1" if airflow_version == 2 else "v2"
|
|
118
144
|
response = session.request(
|
|
119
145
|
method=api_kwargs["Method"],
|
|
120
|
-
url=f"https://{hostname}/api/
|
|
146
|
+
url=f"https://{hostname}/api/{api_version}{api_kwargs['Path']}",
|
|
147
|
+
headers=headers,
|
|
121
148
|
params=api_kwargs["QueryParameters"],
|
|
122
149
|
json=api_kwargs["Body"],
|
|
123
150
|
timeout=10,
|
|
@@ -134,15 +161,19 @@ class MwaaHook(AwsBaseHook):
|
|
|
134
161
|
}
|
|
135
162
|
|
|
136
163
|
# Based on: https://docs.aws.amazon.com/mwaa/latest/userguide/access-mwaa-apache-airflow-rest-api.html#create-web-server-session-token
|
|
137
|
-
def _get_session_conn(self, env_name: str) -> tuple:
|
|
164
|
+
def _get_session_conn(self, env_name: str, airflow_version: Literal[2, 3]) -> tuple:
|
|
138
165
|
create_token_response = self.conn.create_web_login_token(Name=env_name)
|
|
139
166
|
web_server_hostname = create_token_response["WebServerHostname"]
|
|
140
167
|
web_token = create_token_response["WebToken"]
|
|
141
168
|
|
|
142
|
-
login_url =
|
|
169
|
+
login_url = (
|
|
170
|
+
f"https://{web_server_hostname}/aws_mwaa/login"
|
|
171
|
+
if airflow_version == 2
|
|
172
|
+
else f"https://{web_server_hostname}/pluginsv2/aws_mwaa/login"
|
|
173
|
+
)
|
|
143
174
|
login_payload = {"token": web_token}
|
|
144
175
|
session = requests.Session()
|
|
145
176
|
login_response = session.post(login_url, data=login_payload, timeout=10)
|
|
146
177
|
login_response.raise_for_status()
|
|
147
178
|
|
|
148
|
-
return session, web_server_hostname
|
|
179
|
+
return session, web_server_hostname, login_response
|
|
@@ -21,8 +21,8 @@ import time
|
|
|
21
21
|
|
|
22
22
|
from botocore.exceptions import ClientError
|
|
23
23
|
|
|
24
|
-
from airflow.exceptions import AirflowException
|
|
25
24
|
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
|
|
25
|
+
from airflow.providers.common.compat.sdk import AirflowException
|
|
26
26
|
|
|
27
27
|
|
|
28
28
|
class QuickSightHook(AwsBaseHook):
|
|
@@ -23,9 +23,9 @@ import time
|
|
|
23
23
|
from collections.abc import Callable
|
|
24
24
|
from typing import TYPE_CHECKING
|
|
25
25
|
|
|
26
|
-
from airflow.exceptions import AirflowException, AirflowNotFoundException
|
|
27
26
|
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
|
|
28
27
|
from airflow.providers.amazon.aws.utils.waiter_with_logging import wait
|
|
28
|
+
from airflow.providers.common.compat.sdk import AirflowException, AirflowNotFoundException
|
|
29
29
|
|
|
30
30
|
if TYPE_CHECKING:
|
|
31
31
|
from mypy_boto3_rds import RDSClient # noqa: F401
|
|
@@ -20,12 +20,13 @@ from functools import cached_property
|
|
|
20
20
|
from typing import TYPE_CHECKING
|
|
21
21
|
|
|
22
22
|
import redshift_connector
|
|
23
|
-
|
|
23
|
+
import tenacity
|
|
24
|
+
from redshift_connector import Connection as RedshiftConnection, InterfaceError, OperationalError
|
|
24
25
|
from sqlalchemy import create_engine
|
|
25
26
|
from sqlalchemy.engine.url import URL
|
|
26
27
|
|
|
27
|
-
from airflow.exceptions import AirflowException
|
|
28
28
|
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
|
|
29
|
+
from airflow.providers.common.compat.sdk import AirflowException
|
|
29
30
|
from airflow.providers.common.sql.hooks.sql import DbApiHook
|
|
30
31
|
|
|
31
32
|
if TYPE_CHECKING:
|
|
@@ -51,7 +52,7 @@ class RedshiftSQLHook(DbApiHook):
|
|
|
51
52
|
:ref:`Amazon Redshift connection id<howto/connection:redshift>`
|
|
52
53
|
|
|
53
54
|
.. note::
|
|
54
|
-
get_sqlalchemy_engine() and get_uri() depend on sqlalchemy-amazon-redshift
|
|
55
|
+
get_sqlalchemy_engine() and get_uri() depend on sqlalchemy-amazon-redshift.
|
|
55
56
|
"""
|
|
56
57
|
|
|
57
58
|
conn_name_attr = "redshift_conn_id"
|
|
@@ -155,10 +156,21 @@ class RedshiftSQLHook(DbApiHook):
|
|
|
155
156
|
if "user" in conn_params:
|
|
156
157
|
conn_params["username"] = conn_params.pop("user")
|
|
157
158
|
|
|
158
|
-
#
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
159
|
+
# Use URL.create for SQLAlchemy 2 compatibility
|
|
160
|
+
username = conn_params.get("username")
|
|
161
|
+
password = conn_params.get("password")
|
|
162
|
+
host = conn_params.get("host")
|
|
163
|
+
port = conn_params.get("port")
|
|
164
|
+
database = conn_params.get("database")
|
|
165
|
+
|
|
166
|
+
return URL.create(
|
|
167
|
+
drivername="postgresql",
|
|
168
|
+
username=str(username) if username is not None else None,
|
|
169
|
+
password=str(password) if password is not None else None,
|
|
170
|
+
host=str(host) if host is not None else None,
|
|
171
|
+
port=int(port) if port is not None else None,
|
|
172
|
+
database=str(database) if database is not None else None,
|
|
173
|
+
).render_as_string(hide_password=False)
|
|
162
174
|
|
|
163
175
|
def get_sqlalchemy_engine(self, engine_kwargs=None):
|
|
164
176
|
"""Overridden to pass Redshift-specific arguments."""
|
|
@@ -195,6 +207,14 @@ class RedshiftSQLHook(DbApiHook):
|
|
|
195
207
|
pk_columns = [row[0] for row in self.get_records(sql, (schema, table))]
|
|
196
208
|
return pk_columns or None
|
|
197
209
|
|
|
210
|
+
@tenacity.retry(
|
|
211
|
+
stop=tenacity.stop_after_attempt(5),
|
|
212
|
+
wait=tenacity.wait_exponential(max=20),
|
|
213
|
+
# OperationalError is thrown when the connection times out
|
|
214
|
+
# InterfaceError is thrown when the connection is refused
|
|
215
|
+
retry=tenacity.retry_if_exception_type((OperationalError, InterfaceError)),
|
|
216
|
+
reraise=True,
|
|
217
|
+
)
|
|
198
218
|
def get_conn(self) -> RedshiftConnection:
|
|
199
219
|
"""Get a ``redshift_connector.Connection`` object."""
|
|
200
220
|
conn_params = self._get_conn_params()
|
|
@@ -237,7 +257,10 @@ class RedshiftSQLHook(DbApiHook):
|
|
|
237
257
|
region_name = AwsBaseHook(aws_conn_id=self.aws_conn_id).region_name
|
|
238
258
|
identifier = f"{cluster_identifier}.{region_name}"
|
|
239
259
|
if not cluster_identifier:
|
|
240
|
-
|
|
260
|
+
if connection.host:
|
|
261
|
+
identifier = self._get_identifier_from_hostname(connection.host)
|
|
262
|
+
else:
|
|
263
|
+
raise AirflowException("Host is required when cluster_identifier is not provided.")
|
|
241
264
|
return f"{identifier}:{port}"
|
|
242
265
|
|
|
243
266
|
def _get_identifier_from_hostname(self, hostname: str) -> str:
|
|
@@ -43,26 +43,25 @@ from urllib.parse import urlsplit
|
|
|
43
43
|
from uuid import uuid4
|
|
44
44
|
|
|
45
45
|
if TYPE_CHECKING:
|
|
46
|
+
from aiobotocore.client import AioBaseClient
|
|
46
47
|
from mypy_boto3_s3.service_resource import (
|
|
47
48
|
Bucket as S3Bucket,
|
|
48
49
|
Object as S3ResourceObject,
|
|
49
50
|
)
|
|
50
51
|
|
|
51
|
-
from airflow.
|
|
52
|
-
|
|
53
|
-
with suppress(ImportError):
|
|
54
|
-
from aiobotocore.client import AioBaseClient
|
|
52
|
+
from airflow.providers.amazon.version_compat import ArgNotSet
|
|
55
53
|
|
|
56
54
|
|
|
57
55
|
from asgiref.sync import sync_to_async
|
|
58
56
|
from boto3.s3.transfer import S3Transfer, TransferConfig
|
|
59
57
|
from botocore.exceptions import ClientError
|
|
60
58
|
|
|
61
|
-
from airflow.exceptions import
|
|
59
|
+
from airflow.exceptions import AirflowProviderDeprecationWarning
|
|
62
60
|
from airflow.providers.amazon.aws.exceptions import S3HookUriParseFailure
|
|
63
61
|
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
|
|
64
62
|
from airflow.providers.amazon.aws.utils.tags import format_tags
|
|
65
63
|
from airflow.providers.common.compat.lineage.hook import get_hook_lineage_collector
|
|
64
|
+
from airflow.providers.common.compat.sdk import AirflowException, AirflowNotFoundException
|
|
66
65
|
from airflow.utils.helpers import chunks
|
|
67
66
|
|
|
68
67
|
logger = logging.getLogger(__name__)
|
|
@@ -945,7 +944,14 @@ class S3Hook(AwsBaseHook):
|
|
|
945
944
|
stacklevel=2,
|
|
946
945
|
)
|
|
947
946
|
|
|
948
|
-
return list(
|
|
947
|
+
return list(
|
|
948
|
+
self.iter_file_metadata(
|
|
949
|
+
prefix=prefix,
|
|
950
|
+
bucket_name=bucket_name,
|
|
951
|
+
page_size=page_size,
|
|
952
|
+
max_items=max_items,
|
|
953
|
+
)
|
|
954
|
+
)
|
|
949
955
|
|
|
950
956
|
@provide_bucket_name
|
|
951
957
|
def iter_file_metadata(
|
|
@@ -1774,6 +1780,8 @@ class S3Hook(AwsBaseHook):
|
|
|
1774
1780
|
local_s3_objects = []
|
|
1775
1781
|
s3_bucket = self.get_bucket(bucket_name)
|
|
1776
1782
|
for obj in s3_bucket.objects.filter(Prefix=s3_prefix):
|
|
1783
|
+
if obj.key.endswith("/"):
|
|
1784
|
+
continue
|
|
1777
1785
|
obj_path = Path(obj.key)
|
|
1778
1786
|
local_target_path = local_dir.joinpath(obj_path.relative_to(s3_prefix))
|
|
1779
1787
|
if not local_target_path.parent.exists():
|
|
@@ -31,11 +31,11 @@ from typing import Any, cast
|
|
|
31
31
|
from asgiref.sync import sync_to_async
|
|
32
32
|
from botocore.exceptions import ClientError
|
|
33
33
|
|
|
34
|
-
from airflow.exceptions import AirflowException
|
|
35
34
|
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
|
|
36
35
|
from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
|
|
37
36
|
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
|
|
38
37
|
from airflow.providers.amazon.aws.utils.tags import format_tags
|
|
38
|
+
from airflow.providers.common.compat.sdk import AirflowException
|
|
39
39
|
|
|
40
40
|
try:
|
|
41
41
|
from airflow.sdk import timezone
|
|
@@ -24,9 +24,8 @@ import time
|
|
|
24
24
|
from sagemaker_studio import ClientConfig
|
|
25
25
|
from sagemaker_studio.sagemaker_studio_api import SageMakerStudioAPI
|
|
26
26
|
|
|
27
|
-
from airflow.exceptions import AirflowException
|
|
28
27
|
from airflow.providers.amazon.aws.utils.sagemaker_unified_studio import is_local_runner
|
|
29
|
-
from airflow.providers.
|
|
28
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseHook
|
|
30
29
|
|
|
31
30
|
|
|
32
31
|
class SageMakerNotebookHook(BaseHook):
|
|
@@ -20,7 +20,7 @@ from __future__ import annotations
|
|
|
20
20
|
from typing import TYPE_CHECKING
|
|
21
21
|
|
|
22
22
|
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
|
|
23
|
-
from airflow.
|
|
23
|
+
from airflow.providers.amazon.version_compat import NOTSET, ArgNotSet, is_arg_set
|
|
24
24
|
|
|
25
25
|
if TYPE_CHECKING:
|
|
26
26
|
from airflow.sdk.execution_time.secrets_masker import mask_secret
|
|
@@ -38,7 +38,8 @@ class SsmHook(AwsBaseHook):
|
|
|
38
38
|
"""
|
|
39
39
|
Interact with Amazon Systems Manager (SSM).
|
|
40
40
|
|
|
41
|
-
Provide thin wrapper around
|
|
41
|
+
Provide thin wrapper around
|
|
42
|
+
:external+boto3:py:class:`boto3.client("ssm") <SSM.Client>`.
|
|
42
43
|
|
|
43
44
|
Additional arguments (such as ``aws_conn_id``) may be specified and
|
|
44
45
|
are passed down to the underlying AwsBaseHook.
|
|
@@ -53,7 +54,9 @@ class SsmHook(AwsBaseHook):
|
|
|
53
54
|
|
|
54
55
|
def get_parameter_value(self, parameter: str, default: str | ArgNotSet = NOTSET) -> str:
|
|
55
56
|
"""
|
|
56
|
-
Return the provided Parameter or an optional default
|
|
57
|
+
Return the provided Parameter or an optional default.
|
|
58
|
+
|
|
59
|
+
If it is encrypted, then decrypt and mask.
|
|
57
60
|
|
|
58
61
|
.. seealso::
|
|
59
62
|
- :external+boto3:py:meth:`SSM.Client.get_parameter`
|
|
@@ -68,6 +71,31 @@ class SsmHook(AwsBaseHook):
|
|
|
68
71
|
mask_secret(value)
|
|
69
72
|
return value
|
|
70
73
|
except self.conn.exceptions.ParameterNotFound:
|
|
71
|
-
if
|
|
72
|
-
|
|
73
|
-
|
|
74
|
+
if is_arg_set(default):
|
|
75
|
+
return default
|
|
76
|
+
raise
|
|
77
|
+
|
|
78
|
+
def get_command_invocation(self, command_id: str, instance_id: str) -> dict:
|
|
79
|
+
"""
|
|
80
|
+
Get the output of a command invocation for a specific instance.
|
|
81
|
+
|
|
82
|
+
.. seealso::
|
|
83
|
+
- :external+boto3:py:meth:`SSM.Client.get_command_invocation`
|
|
84
|
+
|
|
85
|
+
:param command_id: The ID of the command.
|
|
86
|
+
:param instance_id: The ID of the instance.
|
|
87
|
+
:return: The command invocation details including output.
|
|
88
|
+
"""
|
|
89
|
+
return self.conn.get_command_invocation(CommandId=command_id, InstanceId=instance_id)
|
|
90
|
+
|
|
91
|
+
def list_command_invocations(self, command_id: str) -> dict:
|
|
92
|
+
"""
|
|
93
|
+
List all command invocations for a given command ID.
|
|
94
|
+
|
|
95
|
+
.. seealso::
|
|
96
|
+
- :external+boto3:py:meth:`SSM.Client.list_command_invocations`
|
|
97
|
+
|
|
98
|
+
:param command_id: The ID of the command.
|
|
99
|
+
:return: Response from SSM list_command_invocations API.
|
|
100
|
+
"""
|
|
101
|
+
return self.conn.list_command_invocations(CommandId=command_id)
|
|
@@ -18,8 +18,8 @@ from __future__ import annotations
|
|
|
18
18
|
|
|
19
19
|
import json
|
|
20
20
|
|
|
21
|
-
from airflow.exceptions import AirflowFailException
|
|
22
21
|
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
|
|
22
|
+
from airflow.providers.common.compat.sdk import AirflowFailException
|
|
23
23
|
|
|
24
24
|
|
|
25
25
|
class StepFunctionHook(AwsBaseHook):
|
|
@@ -20,7 +20,7 @@ from __future__ import annotations
|
|
|
20
20
|
from typing import TYPE_CHECKING, ClassVar
|
|
21
21
|
|
|
22
22
|
from airflow.providers.amazon.aws.utils.suppress import return_on_error
|
|
23
|
-
from airflow.providers.
|
|
23
|
+
from airflow.providers.common.compat.sdk import BaseOperatorLink, XCom
|
|
24
24
|
|
|
25
25
|
if TYPE_CHECKING:
|
|
26
26
|
from airflow.models import BaseOperator
|
|
@@ -19,10 +19,10 @@ from __future__ import annotations
|
|
|
19
19
|
from typing import TYPE_CHECKING, Any
|
|
20
20
|
from urllib.parse import ParseResult, quote_plus, urlparse
|
|
21
21
|
|
|
22
|
-
from airflow.exceptions import AirflowException
|
|
23
22
|
from airflow.providers.amazon.aws.hooks.emr import EmrServerlessHook
|
|
24
23
|
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
|
|
25
24
|
from airflow.providers.amazon.aws.links.base_aws import BASE_AWS_CONSOLE_LINK, BaseAwsLink
|
|
25
|
+
from airflow.providers.common.compat.sdk import AirflowException
|
|
26
26
|
from airflow.utils.helpers import exactly_one
|
|
27
27
|
|
|
28
28
|
if TYPE_CHECKING:
|
|
@@ -22,6 +22,7 @@ import copy
|
|
|
22
22
|
import json
|
|
23
23
|
import logging
|
|
24
24
|
import os
|
|
25
|
+
from collections.abc import Generator
|
|
25
26
|
from datetime import date, datetime, timedelta, timezone
|
|
26
27
|
from functools import cached_property
|
|
27
28
|
from pathlib import Path
|
|
@@ -40,8 +41,15 @@ if TYPE_CHECKING:
|
|
|
40
41
|
import structlog.typing
|
|
41
42
|
|
|
42
43
|
from airflow.models.taskinstance import TaskInstance
|
|
44
|
+
from airflow.providers.amazon.aws.hooks.logs import CloudWatchLogEvent
|
|
43
45
|
from airflow.sdk.types import RuntimeTaskInstanceProtocol as RuntimeTI
|
|
44
|
-
from airflow.utils.log.file_task_handler import
|
|
46
|
+
from airflow.utils.log.file_task_handler import (
|
|
47
|
+
LogMessages,
|
|
48
|
+
LogResponse,
|
|
49
|
+
LogSourceInfo,
|
|
50
|
+
RawLogStream,
|
|
51
|
+
StreamingLogResponse,
|
|
52
|
+
)
|
|
45
53
|
|
|
46
54
|
|
|
47
55
|
def json_serialize_legacy(value: Any) -> str | None:
|
|
@@ -163,20 +171,31 @@ class CloudWatchRemoteLogIO(LoggingMixin): # noqa: D101
|
|
|
163
171
|
self.close()
|
|
164
172
|
return
|
|
165
173
|
|
|
166
|
-
def read(self, relative_path, ti: RuntimeTI) ->
|
|
167
|
-
logs
|
|
174
|
+
def read(self, relative_path: str, ti: RuntimeTI) -> LogResponse:
|
|
175
|
+
messages, logs = self.stream(relative_path, ti)
|
|
176
|
+
str_logs: list[str] = [f"{msg}\n" for group in logs for msg in group]
|
|
177
|
+
|
|
178
|
+
return messages, str_logs
|
|
179
|
+
|
|
180
|
+
def stream(self, relative_path: str, ti: RuntimeTI) -> StreamingLogResponse:
|
|
181
|
+
logs: list[RawLogStream] = []
|
|
168
182
|
messages = [
|
|
169
183
|
f"Reading remote log from Cloudwatch log_group: {self.log_group} log_stream: {relative_path}"
|
|
170
184
|
]
|
|
171
185
|
try:
|
|
172
|
-
|
|
186
|
+
gen: RawLogStream = (
|
|
187
|
+
self._parse_log_event_as_dumped_json(event)
|
|
188
|
+
for event in self.get_cloudwatch_logs(relative_path, ti)
|
|
189
|
+
)
|
|
190
|
+
logs = [gen]
|
|
173
191
|
except Exception as e:
|
|
174
|
-
logs = None
|
|
175
192
|
messages.append(str(e))
|
|
176
193
|
|
|
177
194
|
return messages, logs
|
|
178
195
|
|
|
179
|
-
def get_cloudwatch_logs(
|
|
196
|
+
def get_cloudwatch_logs(
|
|
197
|
+
self, stream_name: str, task_instance: RuntimeTI
|
|
198
|
+
) -> Generator[CloudWatchLogEvent, None, None]:
|
|
180
199
|
"""
|
|
181
200
|
Return all logs from the given log stream.
|
|
182
201
|
|
|
@@ -192,29 +211,22 @@ class CloudWatchRemoteLogIO(LoggingMixin): # noqa: D101
|
|
|
192
211
|
if (end_date := getattr(task_instance, "end_date", None)) is None
|
|
193
212
|
else datetime_to_epoch_utc_ms(end_date + timedelta(seconds=30))
|
|
194
213
|
)
|
|
195
|
-
|
|
214
|
+
return self.hook.get_log_events(
|
|
196
215
|
log_group=self.log_group,
|
|
197
216
|
log_stream_name=stream_name,
|
|
198
217
|
end_time=end_time,
|
|
199
218
|
)
|
|
200
|
-
return "\n".join(self._event_to_str(event) for event in events)
|
|
201
219
|
|
|
202
|
-
def
|
|
220
|
+
def _parse_log_event_as_dumped_json(self, event: CloudWatchLogEvent) -> str:
|
|
203
221
|
event_dt = datetime.fromtimestamp(event["timestamp"] / 1000.0, tz=timezone.utc).isoformat()
|
|
204
|
-
|
|
222
|
+
event_msg = event["message"]
|
|
205
223
|
try:
|
|
206
|
-
message = json.loads(
|
|
224
|
+
message = json.loads(event_msg)
|
|
207
225
|
message["timestamp"] = event_dt
|
|
208
|
-
return message
|
|
209
226
|
except Exception:
|
|
210
|
-
|
|
227
|
+
message = {"timestamp": event_dt, "event": event_msg}
|
|
211
228
|
|
|
212
|
-
|
|
213
|
-
event_dt = datetime.fromtimestamp(event["timestamp"] / 1000.0, tz=timezone.utc)
|
|
214
|
-
# Format a datetime object to a string in Zulu time without milliseconds.
|
|
215
|
-
formatted_event_dt = event_dt.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
216
|
-
message = event["message"]
|
|
217
|
-
return f"[{formatted_event_dt}] {message}"
|
|
229
|
+
return json.dumps(message)
|
|
218
230
|
|
|
219
231
|
|
|
220
232
|
class CloudwatchTaskHandler(FileTaskHandler, LoggingMixin):
|
|
@@ -291,4 +303,22 @@ class CloudwatchTaskHandler(FileTaskHandler, LoggingMixin):
|
|
|
291
303
|
) -> tuple[LogSourceInfo, LogMessages]:
|
|
292
304
|
stream_name = self._render_filename(task_instance, try_number)
|
|
293
305
|
messages, logs = self.io.read(stream_name, task_instance)
|
|
294
|
-
|
|
306
|
+
|
|
307
|
+
messages = [
|
|
308
|
+
f"Reading remote log from Cloudwatch log_group: {self.io.log_group} log_stream: {stream_name}"
|
|
309
|
+
]
|
|
310
|
+
try:
|
|
311
|
+
events = self.io.get_cloudwatch_logs(stream_name, task_instance)
|
|
312
|
+
logs = ["\n".join(self._event_to_str(event) for event in events)]
|
|
313
|
+
except Exception as e:
|
|
314
|
+
logs = []
|
|
315
|
+
messages.append(str(e))
|
|
316
|
+
|
|
317
|
+
return messages, logs
|
|
318
|
+
|
|
319
|
+
def _event_to_str(self, event: CloudWatchLogEvent) -> str:
|
|
320
|
+
event_dt = datetime.fromtimestamp(event["timestamp"] / 1000.0, tz=timezone.utc)
|
|
321
|
+
# Format a datetime object to a string in Zulu time without milliseconds.
|
|
322
|
+
formatted_event_dt = event_dt.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
323
|
+
message = event["message"]
|
|
324
|
+
return f"[{formatted_event_dt}] {message}"
|
|
@@ -20,11 +20,11 @@ import time
|
|
|
20
20
|
from datetime import datetime, timedelta
|
|
21
21
|
from typing import TYPE_CHECKING, cast
|
|
22
22
|
|
|
23
|
-
from airflow.exceptions import AirflowException
|
|
24
23
|
from airflow.providers.amazon.aws.hooks.appflow import AppflowHook
|
|
25
24
|
from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
|
|
26
25
|
from airflow.providers.amazon.aws.utils import datetime_to_epoch_ms
|
|
27
26
|
from airflow.providers.amazon.aws.utils.mixins import AwsBaseHookMixin, AwsHookParams, aws_template_fields
|
|
27
|
+
from airflow.providers.common.compat.sdk import AirflowException
|
|
28
28
|
from airflow.providers.common.compat.standard.operators import ShortCircuitOperator
|
|
29
29
|
|
|
30
30
|
if TYPE_CHECKING:
|
|
@@ -22,13 +22,13 @@ from typing import TYPE_CHECKING, Any
|
|
|
22
22
|
from urllib.parse import urlparse
|
|
23
23
|
|
|
24
24
|
from airflow.configuration import conf
|
|
25
|
-
from airflow.exceptions import AirflowException
|
|
26
25
|
from airflow.providers.amazon.aws.hooks.athena import AthenaHook
|
|
27
26
|
from airflow.providers.amazon.aws.links.athena import AthenaQueryResultsLink
|
|
28
27
|
from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
|
|
29
28
|
from airflow.providers.amazon.aws.triggers.athena import AthenaTrigger
|
|
30
29
|
from airflow.providers.amazon.aws.utils import validate_execute_complete_event
|
|
31
30
|
from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
|
|
31
|
+
from airflow.providers.common.compat.sdk import AirflowException
|
|
32
32
|
|
|
33
33
|
if TYPE_CHECKING:
|
|
34
34
|
from airflow.providers.common.compat.openlineage.facet import BaseFacet, Dataset, DatasetFacet
|
|
@@ -25,8 +25,8 @@ from airflow.providers.amazon.aws.utils.mixins import (
|
|
|
25
25
|
AwsHookType,
|
|
26
26
|
aws_template_fields,
|
|
27
27
|
)
|
|
28
|
-
from airflow.providers.amazon.version_compat import
|
|
29
|
-
from airflow.
|
|
28
|
+
from airflow.providers.amazon.version_compat import NOTSET, ArgNotSet
|
|
29
|
+
from airflow.providers.common.compat.sdk import BaseOperator
|
|
30
30
|
|
|
31
31
|
|
|
32
32
|
class AwsBaseOperator(BaseOperator, AwsBaseHookMixin[AwsHookType]):
|
|
@@ -31,7 +31,6 @@ from datetime import timedelta
|
|
|
31
31
|
from typing import TYPE_CHECKING, Any
|
|
32
32
|
|
|
33
33
|
from airflow.configuration import conf
|
|
34
|
-
from airflow.exceptions import AirflowException
|
|
35
34
|
from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook
|
|
36
35
|
from airflow.providers.amazon.aws.links.batch import (
|
|
37
36
|
BatchJobDefinitionLink,
|
|
@@ -47,6 +46,7 @@ from airflow.providers.amazon.aws.triggers.batch import (
|
|
|
47
46
|
from airflow.providers.amazon.aws.utils import trim_none_values, validate_execute_complete_event
|
|
48
47
|
from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
|
|
49
48
|
from airflow.providers.amazon.aws.utils.task_log_fetcher import AwsTaskLogFetcher
|
|
49
|
+
from airflow.providers.common.compat.sdk import AirflowException
|
|
50
50
|
|
|
51
51
|
if TYPE_CHECKING:
|
|
52
52
|
from airflow.utils.context import Context
|
|
@@ -24,7 +24,6 @@ from typing import TYPE_CHECKING, Any
|
|
|
24
24
|
from botocore.exceptions import ClientError
|
|
25
25
|
|
|
26
26
|
from airflow.configuration import conf
|
|
27
|
-
from airflow.exceptions import AirflowException
|
|
28
27
|
from airflow.providers.amazon.aws.hooks.bedrock import (
|
|
29
28
|
BedrockAgentHook,
|
|
30
29
|
BedrockAgentRuntimeHook,
|
|
@@ -41,6 +40,7 @@ from airflow.providers.amazon.aws.triggers.bedrock import (
|
|
|
41
40
|
)
|
|
42
41
|
from airflow.providers.amazon.aws.utils import validate_execute_complete_event
|
|
43
42
|
from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
|
|
43
|
+
from airflow.providers.common.compat.sdk import AirflowException
|
|
44
44
|
from airflow.utils.helpers import prune_dict
|
|
45
45
|
from airflow.utils.timezone import utcnow
|
|
46
46
|
|
|
@@ -482,6 +482,8 @@ class BedrockCreateKnowledgeBaseOperator(AwsBaseOperator[BedrockAgentHook]):
|
|
|
482
482
|
# It may also be that permissions haven't even propagated yet to check for the index
|
|
483
483
|
or "server returned 401" in error_message
|
|
484
484
|
or "user does not have permissions" in error_message
|
|
485
|
+
or "status code: 403" in error_message
|
|
486
|
+
or "bad authorization" in error_message
|
|
485
487
|
)
|
|
486
488
|
if all(
|
|
487
489
|
[
|
|
@@ -32,7 +32,7 @@ if TYPE_CHECKING:
|
|
|
32
32
|
|
|
33
33
|
class CloudFormationCreateStackOperator(AwsBaseOperator[CloudFormationHook]):
|
|
34
34
|
"""
|
|
35
|
-
An operator that creates
|
|
35
|
+
An operator that creates an AWS CloudFormation stack.
|
|
36
36
|
|
|
37
37
|
.. seealso::
|
|
38
38
|
For more information on how to use this operator, take a look at the guide:
|
|
@@ -68,7 +68,7 @@ class CloudFormationCreateStackOperator(AwsBaseOperator[CloudFormationHook]):
|
|
|
68
68
|
|
|
69
69
|
class CloudFormationDeleteStackOperator(AwsBaseOperator[CloudFormationHook]):
|
|
70
70
|
"""
|
|
71
|
-
An operator that deletes
|
|
71
|
+
An operator that deletes an AWS CloudFormation stack.
|
|
72
72
|
|
|
73
73
|
.. seealso::
|
|
74
74
|
For more information on how to use this operator, take a look at the guide:
|
|
@@ -21,7 +21,6 @@ from functools import cached_property
|
|
|
21
21
|
from typing import TYPE_CHECKING, Any, ClassVar
|
|
22
22
|
|
|
23
23
|
from airflow.configuration import conf
|
|
24
|
-
from airflow.exceptions import AirflowException
|
|
25
24
|
from airflow.providers.amazon.aws.hooks.comprehend import ComprehendHook
|
|
26
25
|
from airflow.providers.amazon.aws.links.comprehend import (
|
|
27
26
|
ComprehendDocumentClassifierLink,
|
|
@@ -34,6 +33,7 @@ from airflow.providers.amazon.aws.triggers.comprehend import (
|
|
|
34
33
|
)
|
|
35
34
|
from airflow.providers.amazon.aws.utils import validate_execute_complete_event
|
|
36
35
|
from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
|
|
36
|
+
from airflow.providers.common.compat.sdk import AirflowException
|
|
37
37
|
from airflow.utils.timezone import utcnow
|
|
38
38
|
|
|
39
39
|
if TYPE_CHECKING:
|
|
@@ -23,11 +23,11 @@ import random
|
|
|
23
23
|
from collections.abc import Sequence
|
|
24
24
|
from typing import TYPE_CHECKING, Any
|
|
25
25
|
|
|
26
|
-
from airflow.exceptions import AirflowException, AirflowTaskTimeout
|
|
27
26
|
from airflow.providers.amazon.aws.hooks.datasync import DataSyncHook
|
|
28
27
|
from airflow.providers.amazon.aws.links.datasync import DataSyncTaskExecutionLink, DataSyncTaskLink
|
|
29
28
|
from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
|
|
30
29
|
from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
|
|
30
|
+
from airflow.providers.common.compat.sdk import AirflowException, AirflowTaskTimeout
|
|
31
31
|
|
|
32
32
|
if TYPE_CHECKING:
|
|
33
33
|
from airflow.utils.context import Context
|
|
@@ -22,7 +22,6 @@ from datetime import datetime
|
|
|
22
22
|
from typing import TYPE_CHECKING, Any, ClassVar
|
|
23
23
|
|
|
24
24
|
from airflow.configuration import conf
|
|
25
|
-
from airflow.exceptions import AirflowException
|
|
26
25
|
from airflow.providers.amazon.aws.hooks.dms import DmsHook
|
|
27
26
|
from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
|
|
28
27
|
from airflow.providers.amazon.aws.triggers.dms import (
|
|
@@ -33,6 +32,7 @@ from airflow.providers.amazon.aws.triggers.dms import (
|
|
|
33
32
|
DmsReplicationTerminalStatusTrigger,
|
|
34
33
|
)
|
|
35
34
|
from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
|
|
35
|
+
from airflow.providers.common.compat.sdk import AirflowException
|
|
36
36
|
from airflow.utils.context import Context
|
|
37
37
|
|
|
38
38
|
if TYPE_CHECKING:
|