apache-airflow-providers-amazon 8.6.0rc1__py3-none-any.whl → 8.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/amazon/__init__.py +1 -1
- airflow/providers/amazon/aws/hooks/appflow.py +2 -5
- airflow/providers/amazon/aws/hooks/athena.py +4 -3
- airflow/providers/amazon/aws/hooks/base_aws.py +28 -41
- airflow/providers/amazon/aws/hooks/batch_client.py +8 -6
- airflow/providers/amazon/aws/hooks/batch_waiters.py +4 -2
- airflow/providers/amazon/aws/hooks/chime.py +13 -8
- airflow/providers/amazon/aws/hooks/cloud_formation.py +5 -1
- airflow/providers/amazon/aws/hooks/datasync.py +9 -16
- airflow/providers/amazon/aws/hooks/ecr.py +4 -1
- airflow/providers/amazon/aws/hooks/ecs.py +4 -1
- airflow/providers/amazon/aws/hooks/redshift_cluster.py +8 -12
- airflow/providers/amazon/aws/hooks/redshift_data.py +1 -1
- airflow/providers/amazon/aws/hooks/s3.py +4 -6
- airflow/providers/amazon/aws/hooks/sagemaker.py +7 -8
- airflow/providers/amazon/aws/hooks/sns.py +0 -1
- airflow/providers/amazon/aws/links/emr.py +4 -3
- airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +4 -1
- airflow/providers/amazon/aws/log/s3_task_handler.py +1 -1
- airflow/providers/amazon/aws/notifications/chime.py +4 -1
- airflow/providers/amazon/aws/notifications/sns.py +94 -0
- airflow/providers/amazon/aws/notifications/sqs.py +100 -0
- airflow/providers/amazon/aws/operators/ecs.py +5 -5
- airflow/providers/amazon/aws/operators/glue.py +1 -1
- airflow/providers/amazon/aws/operators/rds.py +2 -2
- airflow/providers/amazon/aws/sensors/batch.py +7 -2
- airflow/providers/amazon/aws/sensors/dynamodb.py +1 -1
- airflow/providers/amazon/aws/sensors/ecs.py +2 -2
- airflow/providers/amazon/aws/sensors/s3.py +2 -2
- airflow/providers/amazon/aws/sensors/sqs.py +7 -6
- airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +2 -2
- airflow/providers/amazon/aws/transfers/ftp_to_s3.py +1 -1
- airflow/providers/amazon/aws/transfers/mongo_to_s3.py +3 -2
- airflow/providers/amazon/aws/transfers/redshift_to_s3.py +1 -3
- airflow/providers/amazon/aws/transfers/sql_to_s3.py +4 -2
- airflow/providers/amazon/aws/triggers/athena.py +5 -1
- airflow/providers/amazon/aws/triggers/base.py +4 -2
- airflow/providers/amazon/aws/triggers/batch.py +10 -11
- airflow/providers/amazon/aws/triggers/ecs.py +9 -6
- airflow/providers/amazon/aws/triggers/eks.py +4 -2
- airflow/providers/amazon/aws/triggers/emr.py +6 -4
- airflow/providers/amazon/aws/triggers/glue_crawler.py +4 -1
- airflow/providers/amazon/aws/triggers/lambda_function.py +5 -1
- airflow/providers/amazon/aws/triggers/rds.py +4 -2
- airflow/providers/amazon/aws/triggers/redshift_cluster.py +4 -1
- airflow/providers/amazon/aws/triggers/s3.py +4 -2
- airflow/providers/amazon/aws/triggers/sqs.py +6 -2
- airflow/providers/amazon/aws/triggers/step_function.py +5 -1
- airflow/providers/amazon/aws/utils/__init__.py +4 -2
- airflow/providers/amazon/aws/utils/redshift.py +3 -1
- airflow/providers/amazon/aws/utils/sqs.py +7 -12
- airflow/providers/amazon/aws/utils/suppress.py +74 -0
- airflow/providers/amazon/aws/utils/task_log_fetcher.py +4 -2
- airflow/providers/amazon/aws/utils/waiter_with_logging.py +4 -2
- airflow/providers/amazon/aws/waiters/base_waiter.py +5 -1
- airflow/providers/amazon/get_provider_info.py +20 -5
- {apache_airflow_providers_amazon-8.6.0rc1.dist-info → apache_airflow_providers_amazon-8.7.0.dist-info}/METADATA +8 -16
- {apache_airflow_providers_amazon-8.6.0rc1.dist-info → apache_airflow_providers_amazon-8.7.0.dist-info}/RECORD +63 -60
- {apache_airflow_providers_amazon-8.6.0rc1.dist-info → apache_airflow_providers_amazon-8.7.0.dist-info}/WHEEL +1 -1
- {apache_airflow_providers_amazon-8.6.0rc1.dist-info → apache_airflow_providers_amazon-8.7.0.dist-info}/LICENSE +0 -0
- {apache_airflow_providers_amazon-8.6.0rc1.dist-info → apache_airflow_providers_amazon-8.7.0.dist-info}/NOTICE +0 -0
- {apache_airflow_providers_amazon-8.6.0rc1.dist-info → apache_airflow_providers_amazon-8.7.0.dist-info}/entry_points.txt +0 -0
- {apache_airflow_providers_amazon-8.6.0rc1.dist-info → apache_airflow_providers_amazon-8.7.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,94 @@
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
3
|
+
# distributed with this work for additional information
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
6
|
+
# "License"); you may not use this file except in compliance
|
7
|
+
# with the License. You may obtain a copy of the License at
|
8
|
+
#
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
#
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
12
|
+
# software distributed under the License is distributed on an
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
14
|
+
# KIND, either express or implied. See the License for the
|
15
|
+
# specific language governing permissions and limitations
|
16
|
+
# under the License.
|
17
|
+
|
18
|
+
from __future__ import annotations
|
19
|
+
|
20
|
+
from functools import cached_property
|
21
|
+
from typing import Sequence
|
22
|
+
|
23
|
+
from airflow.exceptions import AirflowOptionalProviderFeatureException
|
24
|
+
from airflow.providers.amazon.aws.hooks.sns import SnsHook
|
25
|
+
|
26
|
+
try:
|
27
|
+
from airflow.notifications.basenotifier import BaseNotifier
|
28
|
+
except ImportError:
|
29
|
+
raise AirflowOptionalProviderFeatureException(
|
30
|
+
"Failed to import BaseNotifier. This feature is only available in Airflow versions >= 2.6.0"
|
31
|
+
)
|
32
|
+
|
33
|
+
|
34
|
+
class SnsNotifier(BaseNotifier):
|
35
|
+
"""
|
36
|
+
Amazon SNS (Simple Notification Service) Notifier.
|
37
|
+
|
38
|
+
.. seealso::
|
39
|
+
For more information on how to use this notifier, take a look at the guide:
|
40
|
+
:ref:`howto/notifier:SnsNotifier`
|
41
|
+
|
42
|
+
:param aws_conn_id: The :ref:`Amazon Web Services Connection id <howto/connection:aws>`
|
43
|
+
used for AWS credentials. If this is None or empty then the default boto3 behaviour is used.
|
44
|
+
:param target_arn: Either a TopicArn or an EndpointArn.
|
45
|
+
:param message: The message you want to send.
|
46
|
+
:param subject: The message subject you want to send.
|
47
|
+
:param message_attributes: The message attributes you want to send as a flat dict (data type will be
|
48
|
+
determined automatically).
|
49
|
+
:param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
|
50
|
+
"""
|
51
|
+
|
52
|
+
template_fields: Sequence[str] = (
|
53
|
+
"target_arn",
|
54
|
+
"message",
|
55
|
+
"subject",
|
56
|
+
"message_attributes",
|
57
|
+
"aws_conn_id",
|
58
|
+
"region_name",
|
59
|
+
)
|
60
|
+
|
61
|
+
def __init__(
|
62
|
+
self,
|
63
|
+
*,
|
64
|
+
aws_conn_id: str | None = SnsHook.default_conn_name,
|
65
|
+
target_arn: str,
|
66
|
+
message: str,
|
67
|
+
subject: str | None = None,
|
68
|
+
message_attributes: dict | None = None,
|
69
|
+
region_name: str | None = None,
|
70
|
+
):
|
71
|
+
super().__init__()
|
72
|
+
self.aws_conn_id = aws_conn_id
|
73
|
+
self.region_name = region_name
|
74
|
+
self.target_arn = target_arn
|
75
|
+
self.message = message
|
76
|
+
self.subject = subject
|
77
|
+
self.message_attributes = message_attributes
|
78
|
+
|
79
|
+
@cached_property
|
80
|
+
def hook(self) -> SnsHook:
|
81
|
+
"""Amazon SNS Hook (cached)."""
|
82
|
+
return SnsHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
|
83
|
+
|
84
|
+
def notify(self, context):
|
85
|
+
"""Publish the notification message to Amazon SNS."""
|
86
|
+
self.hook.publish_to_target(
|
87
|
+
target_arn=self.target_arn,
|
88
|
+
message=self.message,
|
89
|
+
subject=self.subject,
|
90
|
+
message_attributes=self.message_attributes,
|
91
|
+
)
|
92
|
+
|
93
|
+
|
94
|
+
send_sns_notification = SnsNotifier
|
@@ -0,0 +1,100 @@
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
3
|
+
# distributed with this work for additional information
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
6
|
+
# "License"); you may not use this file except in compliance
|
7
|
+
# with the License. You may obtain a copy of the License at
|
8
|
+
#
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
#
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
12
|
+
# software distributed under the License is distributed on an
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
14
|
+
# KIND, either express or implied. See the License for the
|
15
|
+
# specific language governing permissions and limitations
|
16
|
+
# under the License.
|
17
|
+
|
18
|
+
from __future__ import annotations
|
19
|
+
|
20
|
+
from functools import cached_property
|
21
|
+
from typing import Sequence
|
22
|
+
|
23
|
+
from airflow.exceptions import AirflowOptionalProviderFeatureException
|
24
|
+
from airflow.providers.amazon.aws.hooks.sqs import SqsHook
|
25
|
+
|
26
|
+
try:
|
27
|
+
from airflow.notifications.basenotifier import BaseNotifier
|
28
|
+
except ImportError:
|
29
|
+
raise AirflowOptionalProviderFeatureException(
|
30
|
+
"Failed to import BaseNotifier. This feature is only available in Airflow versions >= 2.6.0"
|
31
|
+
)
|
32
|
+
|
33
|
+
|
34
|
+
class SqsNotifier(BaseNotifier):
|
35
|
+
"""
|
36
|
+
Amazon SQS (Simple Queue Service) Notifier.
|
37
|
+
|
38
|
+
.. seealso::
|
39
|
+
For more information on how to use this notifier, take a look at the guide:
|
40
|
+
:ref:`howto/notifier:SqsNotifier`
|
41
|
+
|
42
|
+
:param aws_conn_id: The :ref:`Amazon Web Services Connection id <howto/connection:aws>`
|
43
|
+
used for AWS credentials. If this is None or empty then the default boto3 behaviour is used.
|
44
|
+
:param queue_url: The URL of the Amazon SQS queue to which a message is sent.
|
45
|
+
:param message_body: The message to send.
|
46
|
+
:param message_attributes: additional attributes for the message.
|
47
|
+
For details of the attributes parameter see :py:meth:`botocore.client.SQS.send_message`.
|
48
|
+
:param message_group_id: This parameter applies only to FIFO (first-in-first-out) queues.
|
49
|
+
For details of the attributes parameter see :py:meth:`botocore.client.SQS.send_message`.
|
50
|
+
:param delay_seconds: The length of time, in seconds, for which to delay a message.
|
51
|
+
:param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
|
52
|
+
"""
|
53
|
+
|
54
|
+
template_fields: Sequence[str] = (
|
55
|
+
"queue_url",
|
56
|
+
"message_body",
|
57
|
+
"message_attributes",
|
58
|
+
"message_group_id",
|
59
|
+
"delay_seconds",
|
60
|
+
"aws_conn_id",
|
61
|
+
"region_name",
|
62
|
+
)
|
63
|
+
|
64
|
+
def __init__(
|
65
|
+
self,
|
66
|
+
*,
|
67
|
+
aws_conn_id: str | None = SqsHook.default_conn_name,
|
68
|
+
queue_url: str,
|
69
|
+
message_body: str,
|
70
|
+
message_attributes: dict | None = None,
|
71
|
+
message_group_id: str | None = None,
|
72
|
+
delay_seconds: int = 0,
|
73
|
+
region_name: str | None = None,
|
74
|
+
):
|
75
|
+
super().__init__()
|
76
|
+
self.aws_conn_id = aws_conn_id
|
77
|
+
self.region_name = region_name
|
78
|
+
self.queue_url = queue_url
|
79
|
+
self.message_body = message_body
|
80
|
+
self.message_attributes = message_attributes or {}
|
81
|
+
self.message_group_id = message_group_id
|
82
|
+
self.delay_seconds = delay_seconds
|
83
|
+
|
84
|
+
@cached_property
|
85
|
+
def hook(self) -> SqsHook:
|
86
|
+
"""Amazon SQS Hook (cached)."""
|
87
|
+
return SqsHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
|
88
|
+
|
89
|
+
def notify(self, context):
|
90
|
+
"""Publish the notification message to Amazon SQS queue."""
|
91
|
+
self.hook.send_message(
|
92
|
+
queue_url=self.queue_url,
|
93
|
+
message_body=self.message_body,
|
94
|
+
delay_seconds=self.delay_seconds,
|
95
|
+
message_attributes=self.message_attributes,
|
96
|
+
message_group_id=self.message_group_id,
|
97
|
+
)
|
98
|
+
|
99
|
+
|
100
|
+
send_sqs_notification = SqsNotifier
|
@@ -18,14 +18,11 @@
|
|
18
18
|
from __future__ import annotations
|
19
19
|
|
20
20
|
import re
|
21
|
-
import sys
|
22
21
|
import warnings
|
23
22
|
from datetime import timedelta
|
24
23
|
from functools import cached_property
|
25
24
|
from typing import TYPE_CHECKING, Sequence
|
26
25
|
|
27
|
-
import boto3
|
28
|
-
|
29
26
|
from airflow.configuration import conf
|
30
27
|
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
|
31
28
|
from airflow.models import BaseOperator
|
@@ -43,6 +40,8 @@ from airflow.providers.amazon.aws.utils.task_log_fetcher import AwsTaskLogFetche
|
|
43
40
|
from airflow.utils.helpers import prune_dict
|
44
41
|
|
45
42
|
if TYPE_CHECKING:
|
43
|
+
import boto3
|
44
|
+
|
46
45
|
from airflow.models import TaskInstance
|
47
46
|
from airflow.utils.context import Context
|
48
47
|
|
@@ -476,7 +475,9 @@ class EcsRunTaskOperator(EcsBaseOperator):
|
|
476
475
|
number_logs_exception: int = 10,
|
477
476
|
wait_for_completion: bool = True,
|
478
477
|
waiter_delay: int = 6,
|
479
|
-
waiter_max_attempts: int =
|
478
|
+
waiter_max_attempts: int = 1000000 * 365 * 24 * 60 * 10,
|
479
|
+
# Set the default waiter duration to 1M years (attempts*delay)
|
480
|
+
# Airflow execution_timeout handles task timeout
|
480
481
|
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
|
481
482
|
**kwargs,
|
482
483
|
):
|
@@ -665,7 +666,6 @@ class EcsRunTaskOperator(EcsBaseOperator):
|
|
665
666
|
return
|
666
667
|
|
667
668
|
waiter = self.client.get_waiter("tasks_stopped")
|
668
|
-
waiter.config.max_attempts = sys.maxsize # timeout is managed by airflow
|
669
669
|
waiter.wait(
|
670
670
|
cluster=self.cluster,
|
671
671
|
tasks=[self.arn],
|
@@ -23,8 +23,6 @@ from datetime import timedelta
|
|
23
23
|
from functools import cached_property
|
24
24
|
from typing import TYPE_CHECKING, Any, Sequence
|
25
25
|
|
26
|
-
from mypy_boto3_rds.type_defs import TagTypeDef
|
27
|
-
|
28
26
|
from airflow.configuration import conf
|
29
27
|
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
|
30
28
|
from airflow.models import BaseOperator
|
@@ -39,6 +37,8 @@ from airflow.providers.amazon.aws.utils.tags import format_tags
|
|
39
37
|
from airflow.providers.amazon.aws.utils.waiter_with_logging import wait
|
40
38
|
|
41
39
|
if TYPE_CHECKING:
|
40
|
+
from mypy_boto3_rds.type_defs import TagTypeDef
|
41
|
+
|
42
42
|
from airflow.utils.context import Context
|
43
43
|
|
44
44
|
|
@@ -23,7 +23,7 @@ from typing import TYPE_CHECKING, Any, Sequence
|
|
23
23
|
from deprecated import deprecated
|
24
24
|
|
25
25
|
from airflow.configuration import conf
|
26
|
-
from airflow.exceptions import AirflowException
|
26
|
+
from airflow.exceptions import AirflowException, AirflowSkipException
|
27
27
|
from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook
|
28
28
|
from airflow.providers.amazon.aws.triggers.batch import BatchJobTrigger
|
29
29
|
from airflow.sensors.base import BaseSensorOperator
|
@@ -115,7 +115,12 @@ class BatchSensor(BaseSensorOperator):
|
|
115
115
|
Relies on trigger to throw an exception, otherwise it assumes execution was successful.
|
116
116
|
"""
|
117
117
|
if event["status"] != "success":
|
118
|
-
|
118
|
+
message = f"Error while running job: {event}"
|
119
|
+
# TODO: remove this if-else block when min_airflow_version is set to higher than the version that
|
120
|
+
# changed in https://github.com/apache/airflow/pull/33424 is released
|
121
|
+
if self.soft_fail:
|
122
|
+
raise AirflowSkipException(message)
|
123
|
+
raise AirflowException(message)
|
119
124
|
job_id = event["job_id"]
|
120
125
|
self.log.info("Batch Job %s complete", job_id)
|
121
126
|
|
@@ -84,7 +84,7 @@ class DynamoDBValueSensor(BaseSensorOperator):
|
|
84
84
|
key = {self.partition_key_name: self.partition_key_value}
|
85
85
|
msg = (
|
86
86
|
f"Checking table {self.table_name} for "
|
87
|
-
|
87
|
+
f"item Partition Key: {self.partition_key_name}={self.partition_key_value}"
|
88
88
|
)
|
89
89
|
|
90
90
|
if self.sort_key_name and self.sort_key_value:
|
@@ -19,8 +19,6 @@ from __future__ import annotations
|
|
19
19
|
from functools import cached_property
|
20
20
|
from typing import TYPE_CHECKING, Sequence
|
21
21
|
|
22
|
-
import boto3
|
23
|
-
|
24
22
|
from airflow.exceptions import AirflowException
|
25
23
|
from airflow.providers.amazon.aws.hooks.ecs import (
|
26
24
|
EcsClusterStates,
|
@@ -31,6 +29,8 @@ from airflow.providers.amazon.aws.hooks.ecs import (
|
|
31
29
|
from airflow.sensors.base import BaseSensorOperator
|
32
30
|
|
33
31
|
if TYPE_CHECKING:
|
32
|
+
import boto3
|
33
|
+
|
34
34
|
from airflow.utils.context import Context
|
35
35
|
|
36
36
|
DEFAULT_CONN_ID: str = "aws_default"
|
@@ -113,14 +113,14 @@ class S3KeySensor(BaseSensorOperator):
|
|
113
113
|
}]
|
114
114
|
"""
|
115
115
|
if self.wildcard_match:
|
116
|
-
prefix = re.split(r"[\[
|
116
|
+
prefix = re.split(r"[\[*?]", key, 1)[0]
|
117
117
|
keys = self.hook.get_file_metadata(prefix, bucket_name)
|
118
118
|
key_matches = [k for k in keys if fnmatch.fnmatch(k["Key"], key)]
|
119
119
|
if not key_matches:
|
120
120
|
return False
|
121
121
|
|
122
122
|
# Reduce the set of metadata to size only
|
123
|
-
files =
|
123
|
+
files = [{"Size": f["Size"]} for f in key_matches]
|
124
124
|
else:
|
125
125
|
obj = self.hook.head_object(key, bucket_name)
|
126
126
|
if obj is None:
|
@@ -19,19 +19,20 @@
|
|
19
19
|
from __future__ import annotations
|
20
20
|
|
21
21
|
from functools import cached_property
|
22
|
-
from typing import TYPE_CHECKING, Any, Collection,
|
22
|
+
from typing import TYPE_CHECKING, Any, Collection, Sequence
|
23
23
|
|
24
24
|
from deprecated import deprecated
|
25
|
+
from typing_extensions import Literal
|
25
26
|
|
26
27
|
from airflow.configuration import conf
|
27
28
|
from airflow.exceptions import AirflowException
|
28
|
-
from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection
|
29
29
|
from airflow.providers.amazon.aws.hooks.sqs import SqsHook
|
30
30
|
from airflow.providers.amazon.aws.triggers.sqs import SqsSensorTrigger
|
31
31
|
from airflow.providers.amazon.aws.utils.sqs import process_response
|
32
32
|
from airflow.sensors.base import BaseSensorOperator
|
33
33
|
|
34
34
|
if TYPE_CHECKING:
|
35
|
+
from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection
|
35
36
|
from airflow.utils.context import Context
|
36
37
|
from datetime import timedelta
|
37
38
|
|
@@ -203,12 +204,12 @@ class SqsSensor(BaseSensorOperator):
|
|
203
204
|
|
204
205
|
if "Successful" not in response:
|
205
206
|
raise AirflowException(f"Delete SQS Messages failed {response} for messages {messages}")
|
206
|
-
if
|
207
|
+
if message_batch:
|
208
|
+
context["ti"].xcom_push(key="messages", value=message_batch)
|
209
|
+
return True
|
210
|
+
else:
|
207
211
|
return False
|
208
212
|
|
209
|
-
context["ti"].xcom_push(key="messages", value=message_batch)
|
210
|
-
return True
|
211
|
-
|
212
213
|
@deprecated(reason="use `hook` property instead.")
|
213
214
|
def get_hook(self) -> SqsHook:
|
214
215
|
"""Create and return an SqsHook."""
|
@@ -19,11 +19,11 @@
|
|
19
19
|
from __future__ import annotations
|
20
20
|
|
21
21
|
import json
|
22
|
+
import os
|
22
23
|
from copy import copy
|
23
24
|
from datetime import datetime
|
24
25
|
from decimal import Decimal
|
25
26
|
from functools import cached_property
|
26
|
-
from os.path import getsize
|
27
27
|
from tempfile import NamedTemporaryFile
|
28
28
|
from typing import IO, TYPE_CHECKING, Any, Callable, Sequence
|
29
29
|
from uuid import uuid4
|
@@ -197,7 +197,7 @@ class DynamoDBToS3Operator(AwsToAwsBaseOperator):
|
|
197
197
|
scan_kwargs["ExclusiveStartKey"] = last_evaluated_key
|
198
198
|
|
199
199
|
# Upload the file to S3 if reach file size limit
|
200
|
-
if getsize(temp_file.name) >= self.file_size:
|
200
|
+
if os.path.getsize(temp_file.name) >= self.file_size:
|
201
201
|
_upload_file_to_s3(temp_file, self.s3_bucket_name, self.s3_key_prefix, self.dest_aws_conn_id)
|
202
202
|
temp_file.close()
|
203
203
|
|
@@ -127,7 +127,7 @@ class FTPToS3Operator(BaseOperator):
|
|
127
127
|
files = list_dir
|
128
128
|
else:
|
129
129
|
ftp_filename: str = self.ftp_filenames
|
130
|
-
files =
|
130
|
+
files = [f for f in list_dir if ftp_filename in f]
|
131
131
|
|
132
132
|
for file in files:
|
133
133
|
self.log.info("Moving file %s", file)
|
@@ -21,14 +21,15 @@ import json
|
|
21
21
|
from typing import TYPE_CHECKING, Any, Iterable, Sequence, cast
|
22
22
|
|
23
23
|
from bson import json_util
|
24
|
-
from pymongo.command_cursor import CommandCursor
|
25
|
-
from pymongo.cursor import Cursor
|
26
24
|
|
27
25
|
from airflow.models import BaseOperator
|
28
26
|
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
|
29
27
|
from airflow.providers.mongo.hooks.mongo import MongoHook
|
30
28
|
|
31
29
|
if TYPE_CHECKING:
|
30
|
+
from pymongo.command_cursor import CommandCursor
|
31
|
+
from pymongo.cursor import Cursor
|
32
|
+
|
32
33
|
from airflow.utils.context import Context
|
33
34
|
|
34
35
|
|
@@ -131,9 +131,7 @@ class RedshiftToS3Operator(BaseOperator):
|
|
131
131
|
)
|
132
132
|
|
133
133
|
if self.include_header and "HEADER" not in [uo.upper().strip() for uo in self.unload_options]:
|
134
|
-
self.unload_options =
|
135
|
-
"HEADER",
|
136
|
-
]
|
134
|
+
self.unload_options = [*self.unload_options, "HEADER"]
|
137
135
|
|
138
136
|
if self.redshift_data_api_kwargs:
|
139
137
|
for arg in ["sql", "parameters"]:
|
@@ -20,17 +20,19 @@ from __future__ import annotations
|
|
20
20
|
import enum
|
21
21
|
from collections import namedtuple
|
22
22
|
from tempfile import NamedTemporaryFile
|
23
|
-
from typing import TYPE_CHECKING, Iterable,
|
23
|
+
from typing import TYPE_CHECKING, Iterable, Mapping, Sequence
|
24
|
+
|
25
|
+
from typing_extensions import Literal
|
24
26
|
|
25
27
|
from airflow.exceptions import AirflowException
|
26
28
|
from airflow.hooks.base import BaseHook
|
27
29
|
from airflow.models import BaseOperator
|
28
30
|
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
|
29
|
-
from airflow.providers.common.sql.hooks.sql import DbApiHook
|
30
31
|
|
31
32
|
if TYPE_CHECKING:
|
32
33
|
import pandas as pd
|
33
34
|
|
35
|
+
from airflow.providers.common.sql.hooks.sql import DbApiHook
|
34
36
|
from airflow.utils.context import Context
|
35
37
|
|
36
38
|
|
@@ -16,10 +16,14 @@
|
|
16
16
|
# under the License.
|
17
17
|
from __future__ import annotations
|
18
18
|
|
19
|
+
from typing import TYPE_CHECKING
|
20
|
+
|
19
21
|
from airflow.providers.amazon.aws.hooks.athena import AthenaHook
|
20
|
-
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
|
21
22
|
from airflow.providers.amazon.aws.triggers.base import AwsBaseWaiterTrigger
|
22
23
|
|
24
|
+
if TYPE_CHECKING:
|
25
|
+
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
|
26
|
+
|
23
27
|
|
24
28
|
class AthenaTrigger(AwsBaseWaiterTrigger):
|
25
29
|
"""
|
@@ -18,12 +18,14 @@
|
|
18
18
|
from __future__ import annotations
|
19
19
|
|
20
20
|
from abc import abstractmethod
|
21
|
-
from typing import Any, AsyncIterator
|
21
|
+
from typing import TYPE_CHECKING, Any, AsyncIterator
|
22
22
|
|
23
|
-
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
|
24
23
|
from airflow.providers.amazon.aws.utils.waiter_with_logging import async_wait
|
25
24
|
from airflow.triggers.base import BaseTrigger, TriggerEvent
|
26
25
|
|
26
|
+
if TYPE_CHECKING:
|
27
|
+
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
|
28
|
+
|
27
29
|
|
28
30
|
class AwsBaseWaiterTrigger(BaseTrigger):
|
29
31
|
"""
|
@@ -19,16 +19,18 @@ from __future__ import annotations
|
|
19
19
|
import asyncio
|
20
20
|
import itertools
|
21
21
|
from functools import cached_property
|
22
|
-
from typing import Any
|
22
|
+
from typing import TYPE_CHECKING, Any
|
23
23
|
|
24
24
|
from botocore.exceptions import WaiterError
|
25
25
|
from deprecated import deprecated
|
26
26
|
|
27
|
-
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
|
28
27
|
from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook
|
29
28
|
from airflow.providers.amazon.aws.triggers.base import AwsBaseWaiterTrigger
|
30
29
|
from airflow.triggers.base import BaseTrigger, TriggerEvent
|
31
30
|
|
31
|
+
if TYPE_CHECKING:
|
32
|
+
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
|
33
|
+
|
32
34
|
|
33
35
|
@deprecated(reason="use BatchJobTrigger instead")
|
34
36
|
class BatchOperatorTrigger(BaseTrigger):
|
@@ -78,9 +80,7 @@ class BatchOperatorTrigger(BaseTrigger):
|
|
78
80
|
|
79
81
|
async with self.hook.async_conn as client:
|
80
82
|
waiter = self.hook.get_waiter("batch_job_complete", deferrable=True, client=client)
|
81
|
-
attempt
|
82
|
-
while attempt < self.max_retries:
|
83
|
-
attempt = attempt + 1
|
83
|
+
for attempt in range(1, 1 + self.max_retries):
|
84
84
|
try:
|
85
85
|
await waiter.wait(
|
86
86
|
jobs=[self.job_id],
|
@@ -89,7 +89,6 @@ class BatchOperatorTrigger(BaseTrigger):
|
|
89
89
|
"MaxAttempts": 1,
|
90
90
|
},
|
91
91
|
)
|
92
|
-
break
|
93
92
|
except WaiterError as error:
|
94
93
|
if "terminal failure" in str(error):
|
95
94
|
yield TriggerEvent(
|
@@ -103,11 +102,11 @@ class BatchOperatorTrigger(BaseTrigger):
|
|
103
102
|
self.max_retries,
|
104
103
|
)
|
105
104
|
await asyncio.sleep(int(self.poll_interval))
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
105
|
+
else:
|
106
|
+
yield TriggerEvent({"status": "success", "job_id": self.job_id})
|
107
|
+
break
|
108
|
+
else:
|
109
|
+
yield TriggerEvent({"status": "failure", "message": "Job Failed - max attempts reached."})
|
111
110
|
|
112
111
|
|
113
112
|
@deprecated(reason="use BatchJobTrigger instead")
|
@@ -18,18 +18,20 @@
|
|
18
18
|
from __future__ import annotations
|
19
19
|
|
20
20
|
import asyncio
|
21
|
-
from typing import Any, AsyncIterator
|
21
|
+
from typing import TYPE_CHECKING, Any, AsyncIterator
|
22
22
|
|
23
23
|
from botocore.exceptions import ClientError, WaiterError
|
24
24
|
|
25
25
|
from airflow import AirflowException
|
26
|
-
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
|
27
26
|
from airflow.providers.amazon.aws.hooks.ecs import EcsHook
|
28
27
|
from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
|
29
28
|
from airflow.providers.amazon.aws.triggers.base import AwsBaseWaiterTrigger
|
30
29
|
from airflow.providers.amazon.aws.utils.task_log_fetcher import AwsTaskLogFetcher
|
31
30
|
from airflow.triggers.base import BaseTrigger, TriggerEvent
|
32
31
|
|
32
|
+
if TYPE_CHECKING:
|
33
|
+
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
|
34
|
+
|
33
35
|
|
34
36
|
class ClusterActiveTrigger(AwsBaseWaiterTrigger):
|
35
37
|
"""
|
@@ -159,10 +161,11 @@ class TaskDoneTrigger(BaseTrigger):
|
|
159
161
|
)
|
160
162
|
|
161
163
|
async def run(self) -> AsyncIterator[TriggerEvent]:
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
164
|
+
async with EcsHook(
|
165
|
+
aws_conn_id=self.aws_conn_id, region_name=self.region
|
166
|
+
).async_conn as ecs_client, AwsLogsHook(
|
167
|
+
aws_conn_id=self.aws_conn_id, region_name=self.region
|
168
|
+
).async_conn as logs_client:
|
166
169
|
waiter = ecs_client.get_waiter("tasks_stopped")
|
167
170
|
logs_token = None
|
168
171
|
while self.waiter_max_attempts:
|
@@ -17,15 +17,17 @@
|
|
17
17
|
from __future__ import annotations
|
18
18
|
|
19
19
|
import warnings
|
20
|
-
from typing import Any
|
20
|
+
from typing import TYPE_CHECKING, Any
|
21
21
|
|
22
22
|
from airflow.exceptions import AirflowProviderDeprecationWarning
|
23
|
-
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
|
24
23
|
from airflow.providers.amazon.aws.hooks.eks import EksHook
|
25
24
|
from airflow.providers.amazon.aws.triggers.base import AwsBaseWaiterTrigger
|
26
25
|
from airflow.providers.amazon.aws.utils.waiter_with_logging import async_wait
|
27
26
|
from airflow.triggers.base import TriggerEvent
|
28
27
|
|
28
|
+
if TYPE_CHECKING:
|
29
|
+
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
|
30
|
+
|
29
31
|
|
30
32
|
class EksCreateClusterTrigger(AwsBaseWaiterTrigger):
|
31
33
|
"""
|
@@ -18,16 +18,18 @@ from __future__ import annotations
|
|
18
18
|
|
19
19
|
import asyncio
|
20
20
|
import warnings
|
21
|
-
from typing import Any
|
21
|
+
from typing import TYPE_CHECKING, Any
|
22
22
|
|
23
23
|
from botocore.exceptions import WaiterError
|
24
24
|
|
25
25
|
from airflow.exceptions import AirflowProviderDeprecationWarning
|
26
|
-
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
|
27
26
|
from airflow.providers.amazon.aws.hooks.emr import EmrContainerHook, EmrHook, EmrServerlessHook
|
28
27
|
from airflow.providers.amazon.aws.triggers.base import AwsBaseWaiterTrigger
|
29
28
|
from airflow.triggers.base import BaseTrigger, TriggerEvent
|
30
29
|
|
30
|
+
if TYPE_CHECKING:
|
31
|
+
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
|
32
|
+
|
31
33
|
|
32
34
|
class EmrAddStepsTrigger(BaseTrigger):
|
33
35
|
"""
|
@@ -240,7 +242,7 @@ class EmrContainerTrigger(AwsBaseWaiterTrigger):
|
|
240
242
|
)
|
241
243
|
|
242
244
|
def hook(self) -> AwsGenericHook:
|
243
|
-
return EmrContainerHook(self.aws_conn_id)
|
245
|
+
return EmrContainerHook(aws_conn_id=self.aws_conn_id)
|
244
246
|
|
245
247
|
|
246
248
|
class EmrStepSensorTrigger(AwsBaseWaiterTrigger):
|
@@ -280,7 +282,7 @@ class EmrStepSensorTrigger(AwsBaseWaiterTrigger):
|
|
280
282
|
)
|
281
283
|
|
282
284
|
def hook(self) -> AwsGenericHook:
|
283
|
-
return EmrHook(self.aws_conn_id)
|
285
|
+
return EmrHook(aws_conn_id=self.aws_conn_id)
|
284
286
|
|
285
287
|
|
286
288
|
class EmrServerlessCreateApplicationTrigger(AwsBaseWaiterTrigger):
|
@@ -17,12 +17,15 @@
|
|
17
17
|
from __future__ import annotations
|
18
18
|
|
19
19
|
import warnings
|
20
|
+
from typing import TYPE_CHECKING
|
20
21
|
|
21
22
|
from airflow.exceptions import AirflowProviderDeprecationWarning
|
22
|
-
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
|
23
23
|
from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook
|
24
24
|
from airflow.providers.amazon.aws.triggers.base import AwsBaseWaiterTrigger
|
25
25
|
|
26
|
+
if TYPE_CHECKING:
|
27
|
+
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
|
28
|
+
|
26
29
|
|
27
30
|
class GlueCrawlerCompleteTrigger(AwsBaseWaiterTrigger):
|
28
31
|
"""
|