apache-airflow-providers-amazon 8.16.0rc1__py3-none-any.whl → 8.17.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/amazon/__init__.py +1 -1
- airflow/providers/amazon/aws/auth_manager/avp/entities.py +1 -0
- airflow/providers/amazon/aws/auth_manager/avp/facade.py +34 -19
- airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +44 -1
- airflow/providers/amazon/aws/auth_manager/cli/__init__.py +16 -0
- airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py +178 -0
- airflow/providers/amazon/aws/auth_manager/cli/definition.py +62 -0
- airflow/providers/amazon/aws/auth_manager/cli/schema.json +171 -0
- airflow/providers/amazon/aws/auth_manager/constants.py +1 -0
- airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +77 -23
- airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py +17 -0
- airflow/providers/amazon/aws/executors/ecs/utils.py +1 -1
- airflow/providers/amazon/aws/executors/utils/__init__.py +16 -0
- airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py +60 -0
- airflow/providers/amazon/aws/hooks/athena_sql.py +168 -0
- airflow/providers/amazon/aws/hooks/base_aws.py +14 -0
- airflow/providers/amazon/aws/hooks/quicksight.py +33 -18
- airflow/providers/amazon/aws/hooks/redshift_data.py +66 -17
- airflow/providers/amazon/aws/hooks/redshift_sql.py +1 -1
- airflow/providers/amazon/aws/hooks/s3.py +18 -4
- airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +2 -2
- airflow/providers/amazon/aws/operators/batch.py +33 -15
- airflow/providers/amazon/aws/operators/cloud_formation.py +37 -26
- airflow/providers/amazon/aws/operators/datasync.py +19 -18
- airflow/providers/amazon/aws/operators/dms.py +57 -69
- airflow/providers/amazon/aws/operators/ec2.py +19 -5
- airflow/providers/amazon/aws/operators/emr.py +30 -10
- airflow/providers/amazon/aws/operators/eventbridge.py +57 -80
- airflow/providers/amazon/aws/operators/quicksight.py +17 -24
- airflow/providers/amazon/aws/operators/redshift_data.py +68 -19
- airflow/providers/amazon/aws/operators/s3.py +1 -1
- airflow/providers/amazon/aws/operators/sagemaker.py +42 -12
- airflow/providers/amazon/aws/sensors/cloud_formation.py +30 -25
- airflow/providers/amazon/aws/sensors/dms.py +31 -24
- airflow/providers/amazon/aws/sensors/dynamodb.py +15 -15
- airflow/providers/amazon/aws/sensors/quicksight.py +34 -24
- airflow/providers/amazon/aws/sensors/redshift_cluster.py +41 -3
- airflow/providers/amazon/aws/sensors/s3.py +13 -8
- airflow/providers/amazon/aws/triggers/redshift_cluster.py +54 -2
- airflow/providers/amazon/aws/triggers/redshift_data.py +113 -0
- airflow/providers/amazon/aws/triggers/s3.py +9 -4
- airflow/providers/amazon/get_provider_info.py +55 -16
- {apache_airflow_providers_amazon-8.16.0rc1.dist-info → apache_airflow_providers_amazon-8.17.0rc2.dist-info}/METADATA +15 -13
- {apache_airflow_providers_amazon-8.16.0rc1.dist-info → apache_airflow_providers_amazon-8.17.0rc2.dist-info}/RECORD +46 -38
- {apache_airflow_providers_amazon-8.16.0rc1.dist-info → apache_airflow_providers_amazon-8.17.0rc2.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_amazon-8.16.0rc1.dist-info → apache_airflow_providers_amazon-8.17.0rc2.dist-info}/entry_points.txt +0 -0
@@ -19,14 +19,15 @@ from __future__ import annotations
|
|
19
19
|
|
20
20
|
from typing import TYPE_CHECKING, Sequence
|
21
21
|
|
22
|
-
from airflow.models import BaseOperator
|
23
22
|
from airflow.providers.amazon.aws.hooks.dms import DmsHook
|
23
|
+
from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
|
24
|
+
from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
|
24
25
|
|
25
26
|
if TYPE_CHECKING:
|
26
27
|
from airflow.utils.context import Context
|
27
28
|
|
28
29
|
|
29
|
-
class DmsCreateTaskOperator(
|
30
|
+
class DmsCreateTaskOperator(AwsBaseOperator[DmsHook]):
|
30
31
|
"""
|
31
32
|
Creates AWS DMS replication task.
|
32
33
|
|
@@ -42,13 +43,19 @@ class DmsCreateTaskOperator(BaseOperator):
|
|
42
43
|
:param migration_type: Migration type ('full-load'|'cdc'|'full-load-and-cdc'), full-load by default.
|
43
44
|
:param create_task_kwargs: Extra arguments for DMS replication task creation.
|
44
45
|
:param aws_conn_id: The Airflow connection used for AWS credentials.
|
45
|
-
If this is None or empty then the default boto3 behaviour is used. If
|
46
|
+
If this is ``None`` or empty then the default boto3 behaviour is used. If
|
46
47
|
running Airflow in a distributed manner and aws_conn_id is None or
|
47
48
|
empty, then default boto3 configuration would be used (and must be
|
48
49
|
maintained on each worker node).
|
50
|
+
:param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
|
51
|
+
:param verify: Whether or not to verify SSL certificates. See:
|
52
|
+
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
|
53
|
+
:param botocore_config: Configuration dictionary (key-values) for botocore client. See:
|
54
|
+
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
|
49
55
|
"""
|
50
56
|
|
51
|
-
|
57
|
+
aws_hook_class = DmsHook
|
58
|
+
template_fields: Sequence[str] = aws_template_fields(
|
52
59
|
"replication_task_id",
|
53
60
|
"source_endpoint_arn",
|
54
61
|
"target_endpoint_arn",
|
@@ -57,7 +64,6 @@ class DmsCreateTaskOperator(BaseOperator):
|
|
57
64
|
"migration_type",
|
58
65
|
"create_task_kwargs",
|
59
66
|
)
|
60
|
-
template_ext: Sequence[str] = ()
|
61
67
|
template_fields_renderers = {
|
62
68
|
"table_mappings": "json",
|
63
69
|
"create_task_kwargs": "json",
|
@@ -92,9 +98,7 @@ class DmsCreateTaskOperator(BaseOperator):
|
|
92
98
|
|
93
99
|
:return: replication task arn
|
94
100
|
"""
|
95
|
-
|
96
|
-
|
97
|
-
task_arn = dms_hook.create_replication_task(
|
101
|
+
task_arn = self.hook.create_replication_task(
|
98
102
|
replication_task_id=self.replication_task_id,
|
99
103
|
source_endpoint_arn=self.source_endpoint_arn,
|
100
104
|
target_endpoint_arn=self.target_endpoint_arn,
|
@@ -108,7 +112,7 @@ class DmsCreateTaskOperator(BaseOperator):
|
|
108
112
|
return task_arn
|
109
113
|
|
110
114
|
|
111
|
-
class DmsDeleteTaskOperator(
|
115
|
+
class DmsDeleteTaskOperator(AwsBaseOperator[DmsHook]):
|
112
116
|
"""
|
113
117
|
Deletes AWS DMS replication task.
|
114
118
|
|
@@ -118,26 +122,23 @@ class DmsDeleteTaskOperator(BaseOperator):
|
|
118
122
|
|
119
123
|
:param replication_task_arn: Replication task ARN
|
120
124
|
:param aws_conn_id: The Airflow connection used for AWS credentials.
|
121
|
-
If this is None or empty then the default boto3 behaviour is used. If
|
125
|
+
If this is ``None`` or empty then the default boto3 behaviour is used. If
|
122
126
|
running Airflow in a distributed manner and aws_conn_id is None or
|
123
127
|
empty, then default boto3 configuration would be used (and must be
|
124
128
|
maintained on each worker node).
|
129
|
+
:param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
|
130
|
+
:param verify: Whether or not to verify SSL certificates. See:
|
131
|
+
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
|
132
|
+
:param botocore_config: Configuration dictionary (key-values) for botocore client. See:
|
133
|
+
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
|
125
134
|
"""
|
126
135
|
|
127
|
-
|
128
|
-
|
129
|
-
template_fields_renderers: dict[str, str] = {}
|
136
|
+
aws_hook_class = DmsHook
|
137
|
+
template_fields: Sequence[str] = aws_template_fields("replication_task_arn")
|
130
138
|
|
131
|
-
def __init__(
|
132
|
-
self,
|
133
|
-
*,
|
134
|
-
replication_task_arn: str | None = None,
|
135
|
-
aws_conn_id: str = "aws_default",
|
136
|
-
**kwargs,
|
137
|
-
):
|
139
|
+
def __init__(self, *, replication_task_arn: str | None = None, **kwargs):
|
138
140
|
super().__init__(**kwargs)
|
139
141
|
self.replication_task_arn = replication_task_arn
|
140
|
-
self.aws_conn_id = aws_conn_id
|
141
142
|
|
142
143
|
def execute(self, context: Context):
|
143
144
|
"""
|
@@ -145,12 +146,11 @@ class DmsDeleteTaskOperator(BaseOperator):
|
|
145
146
|
|
146
147
|
:return: replication task arn
|
147
148
|
"""
|
148
|
-
|
149
|
-
dms_hook.delete_replication_task(replication_task_arn=self.replication_task_arn)
|
149
|
+
self.hook.delete_replication_task(replication_task_arn=self.replication_task_arn)
|
150
150
|
self.log.info("DMS replication task(%s) has been deleted.", self.replication_task_arn)
|
151
151
|
|
152
152
|
|
153
|
-
class DmsDescribeTasksOperator(
|
153
|
+
class DmsDescribeTasksOperator(AwsBaseOperator[DmsHook]):
|
154
154
|
"""
|
155
155
|
Describes AWS DMS replication tasks.
|
156
156
|
|
@@ -160,26 +160,24 @@ class DmsDescribeTasksOperator(BaseOperator):
|
|
160
160
|
|
161
161
|
:param describe_tasks_kwargs: Describe tasks command arguments
|
162
162
|
:param aws_conn_id: The Airflow connection used for AWS credentials.
|
163
|
-
If this is None or empty then the default boto3 behaviour is used. If
|
163
|
+
If this is ``None`` or empty then the default boto3 behaviour is used. If
|
164
164
|
running Airflow in a distributed manner and aws_conn_id is None or
|
165
165
|
empty, then default boto3 configuration would be used (and must be
|
166
166
|
maintained on each worker node).
|
167
|
+
:param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
|
168
|
+
:param verify: Whether or not to verify SSL certificates. See:
|
169
|
+
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
|
170
|
+
:param botocore_config: Configuration dictionary (key-values) for botocore client. See:
|
171
|
+
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
|
167
172
|
"""
|
168
173
|
|
169
|
-
|
170
|
-
|
174
|
+
aws_hook_class = DmsHook
|
175
|
+
template_fields: Sequence[str] = aws_template_fields("describe_tasks_kwargs")
|
171
176
|
template_fields_renderers: dict[str, str] = {"describe_tasks_kwargs": "json"}
|
172
177
|
|
173
|
-
def __init__(
|
174
|
-
self,
|
175
|
-
*,
|
176
|
-
describe_tasks_kwargs: dict | None = None,
|
177
|
-
aws_conn_id: str = "aws_default",
|
178
|
-
**kwargs,
|
179
|
-
):
|
178
|
+
def __init__(self, *, describe_tasks_kwargs: dict | None = None, **kwargs):
|
180
179
|
super().__init__(**kwargs)
|
181
180
|
self.describe_tasks_kwargs = describe_tasks_kwargs or {}
|
182
|
-
self.aws_conn_id = aws_conn_id
|
183
181
|
|
184
182
|
def execute(self, context: Context) -> tuple[str | None, list]:
|
185
183
|
"""
|
@@ -187,11 +185,10 @@ class DmsDescribeTasksOperator(BaseOperator):
|
|
187
185
|
|
188
186
|
:return: Marker and list of replication tasks
|
189
187
|
"""
|
190
|
-
|
191
|
-
return dms_hook.describe_replication_tasks(**self.describe_tasks_kwargs)
|
188
|
+
return self.hook.describe_replication_tasks(**self.describe_tasks_kwargs)
|
192
189
|
|
193
190
|
|
194
|
-
class DmsStartTaskOperator(
|
191
|
+
class DmsStartTaskOperator(AwsBaseOperator[DmsHook]):
|
195
192
|
"""
|
196
193
|
Starts AWS DMS replication task.
|
197
194
|
|
@@ -204,18 +201,23 @@ class DmsStartTaskOperator(BaseOperator):
|
|
204
201
|
('start-replication'|'resume-processing'|'reload-target')
|
205
202
|
:param start_task_kwargs: Extra start replication task arguments
|
206
203
|
:param aws_conn_id: The Airflow connection used for AWS credentials.
|
207
|
-
If this is None or empty then the default boto3 behaviour is used. If
|
204
|
+
If this is ``None`` or empty then the default boto3 behaviour is used. If
|
208
205
|
running Airflow in a distributed manner and aws_conn_id is None or
|
209
206
|
empty, then default boto3 configuration would be used (and must be
|
210
207
|
maintained on each worker node).
|
208
|
+
:param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
|
209
|
+
:param verify: Whether or not to verify SSL certificates. See:
|
210
|
+
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
|
211
|
+
:param botocore_config: Configuration dictionary (key-values) for botocore client. See:
|
212
|
+
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
|
211
213
|
"""
|
212
214
|
|
213
|
-
|
215
|
+
aws_hook_class = DmsHook
|
216
|
+
template_fields: Sequence[str] = aws_template_fields(
|
214
217
|
"replication_task_arn",
|
215
218
|
"start_replication_task_type",
|
216
219
|
"start_task_kwargs",
|
217
220
|
)
|
218
|
-
template_ext: Sequence[str] = ()
|
219
221
|
template_fields_renderers = {"start_task_kwargs": "json"}
|
220
222
|
|
221
223
|
def __init__(
|
@@ -234,14 +236,8 @@ class DmsStartTaskOperator(BaseOperator):
|
|
234
236
|
self.aws_conn_id = aws_conn_id
|
235
237
|
|
236
238
|
def execute(self, context: Context):
|
237
|
-
"""
|
238
|
-
|
239
|
-
|
240
|
-
:return: replication task arn
|
241
|
-
"""
|
242
|
-
dms_hook = DmsHook(aws_conn_id=self.aws_conn_id)
|
243
|
-
|
244
|
-
dms_hook.start_replication_task(
|
239
|
+
"""Start AWS DMS replication task from Airflow."""
|
240
|
+
self.hook.start_replication_task(
|
245
241
|
replication_task_arn=self.replication_task_arn,
|
246
242
|
start_replication_task_type=self.start_replication_task_type,
|
247
243
|
**self.start_task_kwargs,
|
@@ -249,7 +245,7 @@ class DmsStartTaskOperator(BaseOperator):
|
|
249
245
|
self.log.info("DMS replication task(%s) is starting.", self.replication_task_arn)
|
250
246
|
|
251
247
|
|
252
|
-
class DmsStopTaskOperator(
|
248
|
+
class DmsStopTaskOperator(AwsBaseOperator[DmsHook]):
|
253
249
|
"""
|
254
250
|
Stops AWS DMS replication task.
|
255
251
|
|
@@ -259,33 +255,25 @@ class DmsStopTaskOperator(BaseOperator):
|
|
259
255
|
|
260
256
|
:param replication_task_arn: Replication task ARN
|
261
257
|
:param aws_conn_id: The Airflow connection used for AWS credentials.
|
262
|
-
If this is None or empty then the default boto3 behaviour is used. If
|
258
|
+
If this is ``None`` or empty then the default boto3 behaviour is used. If
|
263
259
|
running Airflow in a distributed manner and aws_conn_id is None or
|
264
260
|
empty, then default boto3 configuration would be used (and must be
|
265
261
|
maintained on each worker node).
|
262
|
+
:param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
|
263
|
+
:param verify: Whether or not to verify SSL certificates. See:
|
264
|
+
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
|
265
|
+
:param botocore_config: Configuration dictionary (key-values) for botocore client. See:
|
266
|
+
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
|
266
267
|
"""
|
267
268
|
|
268
|
-
|
269
|
-
|
270
|
-
template_fields_renderers: dict[str, str] = {}
|
269
|
+
aws_hook_class = DmsHook
|
270
|
+
template_fields: Sequence[str] = aws_template_fields("replication_task_arn")
|
271
271
|
|
272
|
-
def __init__(
|
273
|
-
self,
|
274
|
-
*,
|
275
|
-
replication_task_arn: str | None = None,
|
276
|
-
aws_conn_id: str = "aws_default",
|
277
|
-
**kwargs,
|
278
|
-
):
|
272
|
+
def __init__(self, *, replication_task_arn: str | None = None, **kwargs):
|
279
273
|
super().__init__(**kwargs)
|
280
274
|
self.replication_task_arn = replication_task_arn
|
281
|
-
self.aws_conn_id = aws_conn_id
|
282
275
|
|
283
276
|
def execute(self, context: Context):
|
284
|
-
"""
|
285
|
-
|
286
|
-
|
287
|
-
:return: replication task arn
|
288
|
-
"""
|
289
|
-
dms_hook = DmsHook(aws_conn_id=self.aws_conn_id)
|
290
|
-
dms_hook.stop_replication_task(replication_task_arn=self.replication_task_arn)
|
277
|
+
"""Stop AWS DMS replication task from Airflow."""
|
278
|
+
self.hook.stop_replication_task(replication_task_arn=self.replication_task_arn)
|
291
279
|
self.log.info("DMS replication task(%s) is stopping.", self.replication_task_arn)
|
@@ -183,22 +183,36 @@ class EC2CreateInstanceOperator(BaseOperator):
|
|
183
183
|
MaxCount=self.max_count,
|
184
184
|
**self.config,
|
185
185
|
)["Instances"]
|
186
|
-
|
187
|
-
for instance in instances
|
188
|
-
|
189
|
-
self.log.info("Created EC2 instance %s",
|
186
|
+
|
187
|
+
instance_ids = self._on_kill_instance_ids = [instance["InstanceId"] for instance in instances]
|
188
|
+
for instance_id in instance_ids:
|
189
|
+
self.log.info("Created EC2 instance %s", instance_id)
|
190
190
|
|
191
191
|
if self.wait_for_completion:
|
192
192
|
ec2_hook.get_waiter("instance_running").wait(
|
193
|
-
InstanceIds=[
|
193
|
+
InstanceIds=[instance_id],
|
194
194
|
WaiterConfig={
|
195
195
|
"Delay": self.poll_interval,
|
196
196
|
"MaxAttempts": self.max_attempts,
|
197
197
|
},
|
198
198
|
)
|
199
199
|
|
200
|
+
# leave "_on_kill_instance_ids" in place for finishing post-processing
|
200
201
|
return instance_ids
|
201
202
|
|
203
|
+
def on_kill(self) -> None:
|
204
|
+
instance_ids = getattr(self, "_on_kill_instance_ids", [])
|
205
|
+
|
206
|
+
if instance_ids:
|
207
|
+
self.log.info("on_kill: Terminating instance/s %s", ", ".join(instance_ids))
|
208
|
+
ec2_hook = EC2Hook(
|
209
|
+
aws_conn_id=self.aws_conn_id,
|
210
|
+
region_name=self.region_name,
|
211
|
+
api_type="client_type",
|
212
|
+
)
|
213
|
+
ec2_hook.conn.terminate_instances(InstanceIds=instance_ids)
|
214
|
+
super().on_kill()
|
215
|
+
|
202
216
|
|
203
217
|
class EC2TerminateInstanceOperator(BaseOperator):
|
204
218
|
"""
|
@@ -716,7 +716,9 @@ class EmrCreateJobFlowOperator(BaseOperator):
|
|
716
716
|
warnings.warn(
|
717
717
|
"The parameter waiter_countdown has been deprecated to standardize "
|
718
718
|
"naming conventions. Please use waiter_max_attempts instead. In the "
|
719
|
-
"future this will default to None and defer to the waiter's default value."
|
719
|
+
"future this will default to None and defer to the waiter's default value.",
|
720
|
+
AirflowProviderDeprecationWarning,
|
721
|
+
stacklevel=2,
|
720
722
|
)
|
721
723
|
# waiter_countdown defaults to never timing out, which is not supported
|
722
724
|
# by boto waiters, so we will set it here to "a very long time" for now.
|
@@ -725,7 +727,9 @@ class EmrCreateJobFlowOperator(BaseOperator):
|
|
725
727
|
warnings.warn(
|
726
728
|
"The parameter waiter_check_interval_seconds has been deprecated to "
|
727
729
|
"standardize naming conventions. Please use waiter_delay instead. In the "
|
728
|
-
"future this will default to None and defer to the waiter's default value."
|
730
|
+
"future this will default to None and defer to the waiter's default value.",
|
731
|
+
AirflowProviderDeprecationWarning,
|
732
|
+
stacklevel=2,
|
729
733
|
)
|
730
734
|
waiter_delay = waiter_check_interval_seconds
|
731
735
|
super().__init__(**kwargs)
|
@@ -1024,7 +1028,9 @@ class EmrServerlessCreateApplicationOperator(BaseOperator):
|
|
1024
1028
|
warnings.warn(
|
1025
1029
|
"The parameter waiter_check_interval_seconds has been deprecated to standardize "
|
1026
1030
|
"naming conventions. Please use waiter_delay instead. In the "
|
1027
|
-
"future this will default to None and defer to the waiter's default value."
|
1031
|
+
"future this will default to None and defer to the waiter's default value.",
|
1032
|
+
AirflowProviderDeprecationWarning,
|
1033
|
+
stacklevel=2,
|
1028
1034
|
)
|
1029
1035
|
if waiter_countdown is NOTSET:
|
1030
1036
|
waiter_max_attempts = 25 if waiter_max_attempts is NOTSET else waiter_max_attempts
|
@@ -1036,7 +1042,9 @@ class EmrServerlessCreateApplicationOperator(BaseOperator):
|
|
1036
1042
|
warnings.warn(
|
1037
1043
|
"The parameter waiter_countdown has been deprecated to standardize "
|
1038
1044
|
"naming conventions. Please use waiter_max_attempts instead. In the "
|
1039
|
-
"future this will default to None and defer to the waiter's default value."
|
1045
|
+
"future this will default to None and defer to the waiter's default value.",
|
1046
|
+
AirflowProviderDeprecationWarning,
|
1047
|
+
stacklevel=2,
|
1040
1048
|
)
|
1041
1049
|
self.aws_conn_id = aws_conn_id
|
1042
1050
|
self.release_label = release_label
|
@@ -1205,7 +1213,9 @@ class EmrServerlessStartJobOperator(BaseOperator):
|
|
1205
1213
|
warnings.warn(
|
1206
1214
|
"The parameter waiter_check_interval_seconds has been deprecated to standardize "
|
1207
1215
|
"naming conventions. Please use waiter_delay instead. In the "
|
1208
|
-
"future this will default to None and defer to the waiter's default value."
|
1216
|
+
"future this will default to None and defer to the waiter's default value.",
|
1217
|
+
AirflowProviderDeprecationWarning,
|
1218
|
+
stacklevel=2,
|
1209
1219
|
)
|
1210
1220
|
if waiter_countdown is NOTSET:
|
1211
1221
|
waiter_max_attempts = 25 if waiter_max_attempts is NOTSET else waiter_max_attempts
|
@@ -1217,7 +1227,9 @@ class EmrServerlessStartJobOperator(BaseOperator):
|
|
1217
1227
|
warnings.warn(
|
1218
1228
|
"The parameter waiter_countdown has been deprecated to standardize "
|
1219
1229
|
"naming conventions. Please use waiter_max_attempts instead. In the "
|
1220
|
-
"future this will default to None and defer to the waiter's default value."
|
1230
|
+
"future this will default to None and defer to the waiter's default value.",
|
1231
|
+
AirflowProviderDeprecationWarning,
|
1232
|
+
stacklevel=2,
|
1221
1233
|
)
|
1222
1234
|
self.aws_conn_id = aws_conn_id
|
1223
1235
|
self.application_id = application_id
|
@@ -1408,7 +1420,9 @@ class EmrServerlessStopApplicationOperator(BaseOperator):
|
|
1408
1420
|
warnings.warn(
|
1409
1421
|
"The parameter waiter_check_interval_seconds has been deprecated to standardize "
|
1410
1422
|
"naming conventions. Please use waiter_delay instead. In the "
|
1411
|
-
"future this will default to None and defer to the waiter's default value."
|
1423
|
+
"future this will default to None and defer to the waiter's default value.",
|
1424
|
+
AirflowProviderDeprecationWarning,
|
1425
|
+
stacklevel=2,
|
1412
1426
|
)
|
1413
1427
|
if waiter_countdown is NOTSET:
|
1414
1428
|
waiter_max_attempts = 25 if waiter_max_attempts is NOTSET else waiter_max_attempts
|
@@ -1420,7 +1434,9 @@ class EmrServerlessStopApplicationOperator(BaseOperator):
|
|
1420
1434
|
warnings.warn(
|
1421
1435
|
"The parameter waiter_countdown has been deprecated to standardize "
|
1422
1436
|
"naming conventions. Please use waiter_max_attempts instead. In the "
|
1423
|
-
"future this will default to None and defer to the waiter's default value."
|
1437
|
+
"future this will default to None and defer to the waiter's default value.",
|
1438
|
+
AirflowProviderDeprecationWarning,
|
1439
|
+
stacklevel=2,
|
1424
1440
|
)
|
1425
1441
|
self.aws_conn_id = aws_conn_id
|
1426
1442
|
self.application_id = application_id
|
@@ -1569,7 +1585,9 @@ class EmrServerlessDeleteApplicationOperator(EmrServerlessStopApplicationOperato
|
|
1569
1585
|
warnings.warn(
|
1570
1586
|
"The parameter waiter_check_interval_seconds has been deprecated to standardize "
|
1571
1587
|
"naming conventions. Please use waiter_delay instead. In the "
|
1572
|
-
"future this will default to None and defer to the waiter's default value."
|
1588
|
+
"future this will default to None and defer to the waiter's default value.",
|
1589
|
+
AirflowProviderDeprecationWarning,
|
1590
|
+
stacklevel=2,
|
1573
1591
|
)
|
1574
1592
|
if waiter_countdown is NOTSET:
|
1575
1593
|
waiter_max_attempts = 25 if waiter_max_attempts is NOTSET else waiter_max_attempts
|
@@ -1581,7 +1599,9 @@ class EmrServerlessDeleteApplicationOperator(EmrServerlessStopApplicationOperato
|
|
1581
1599
|
warnings.warn(
|
1582
1600
|
"The parameter waiter_countdown has been deprecated to standardize "
|
1583
1601
|
"naming conventions. Please use waiter_max_attempts instead. In the "
|
1584
|
-
"future this will default to None and defer to the waiter's default value."
|
1602
|
+
"future this will default to None and defer to the waiter's default value.",
|
1603
|
+
AirflowProviderDeprecationWarning,
|
1604
|
+
stacklevel=2,
|
1585
1605
|
)
|
1586
1606
|
self.wait_for_delete_completion = wait_for_completion
|
1587
1607
|
# super stops the app
|
@@ -16,19 +16,19 @@
|
|
16
16
|
# under the License.
|
17
17
|
from __future__ import annotations
|
18
18
|
|
19
|
-
from functools import cached_property
|
20
19
|
from typing import TYPE_CHECKING, Sequence
|
21
20
|
|
22
21
|
from airflow.exceptions import AirflowException
|
23
|
-
from airflow.models import BaseOperator
|
24
22
|
from airflow.providers.amazon.aws.hooks.eventbridge import EventBridgeHook
|
23
|
+
from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
|
24
|
+
from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
|
25
25
|
from airflow.utils.helpers import prune_dict
|
26
26
|
|
27
27
|
if TYPE_CHECKING:
|
28
28
|
from airflow.utils.context import Context
|
29
29
|
|
30
30
|
|
31
|
-
class EventBridgePutEventsOperator(
|
31
|
+
class EventBridgePutEventsOperator(AwsBaseOperator[EventBridgeHook]):
|
32
32
|
"""
|
33
33
|
Put Events onto Amazon EventBridge.
|
34
34
|
|
@@ -38,32 +38,25 @@ class EventBridgePutEventsOperator(BaseOperator):
|
|
38
38
|
|
39
39
|
:param entries: the list of events to be put onto EventBridge, each event is a dict (required)
|
40
40
|
:param endpoint_id: the URL subdomain of the endpoint
|
41
|
-
:param aws_conn_id:
|
42
|
-
|
43
|
-
|
41
|
+
:param aws_conn_id: The Airflow connection used for AWS credentials.
|
42
|
+
If this is ``None`` or empty then the default boto3 behaviour is used. If
|
43
|
+
running Airflow in a distributed manner and aws_conn_id is None or
|
44
|
+
empty, then default boto3 configuration would be used (and must be
|
45
|
+
maintained on each worker node).
|
46
|
+
:param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
|
47
|
+
:param verify: Whether or not to verify SSL certificates. See:
|
48
|
+
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
|
49
|
+
:param botocore_config: Configuration dictionary (key-values) for botocore client. See:
|
50
|
+
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.htmlt
|
44
51
|
"""
|
45
52
|
|
46
|
-
|
53
|
+
aws_hook_class = EventBridgeHook
|
54
|
+
template_fields: Sequence[str] = aws_template_fields("entries", "endpoint_id")
|
47
55
|
|
48
|
-
def __init__(
|
49
|
-
self,
|
50
|
-
*,
|
51
|
-
entries: list[dict],
|
52
|
-
endpoint_id: str | None = None,
|
53
|
-
aws_conn_id: str = "aws_default",
|
54
|
-
region_name: str | None = None,
|
55
|
-
**kwargs,
|
56
|
-
):
|
56
|
+
def __init__(self, *, entries: list[dict], endpoint_id: str | None = None, **kwargs):
|
57
57
|
super().__init__(**kwargs)
|
58
58
|
self.entries = entries
|
59
59
|
self.endpoint_id = endpoint_id
|
60
|
-
self.aws_conn_id = aws_conn_id
|
61
|
-
self.region_name = region_name
|
62
|
-
|
63
|
-
@cached_property
|
64
|
-
def hook(self) -> EventBridgeHook:
|
65
|
-
"""Create and return an EventBridgeHook."""
|
66
|
-
return EventBridgeHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
|
67
60
|
|
68
61
|
def execute(self, context: Context):
|
69
62
|
response = self.hook.conn.put_events(
|
@@ -90,7 +83,7 @@ class EventBridgePutEventsOperator(BaseOperator):
|
|
90
83
|
return [e["EventId"] for e in response["Entries"]]
|
91
84
|
|
92
85
|
|
93
|
-
class EventBridgePutRuleOperator(
|
86
|
+
class EventBridgePutRuleOperator(AwsBaseOperator[EventBridgeHook]):
|
94
87
|
"""
|
95
88
|
Create or update a specified EventBridge rule.
|
96
89
|
|
@@ -106,12 +99,20 @@ class EventBridgePutRuleOperator(BaseOperator):
|
|
106
99
|
:param schedule_expression: the scheduling expression (for example, a cron or rate expression)
|
107
100
|
:param state: indicates whether rule is set to be "ENABLED" or "DISABLED"
|
108
101
|
:param tags: list of key-value pairs to associate with the rule
|
109
|
-
:param
|
110
|
-
|
102
|
+
:param aws_conn_id: The Airflow connection used for AWS credentials.
|
103
|
+
If this is ``None`` or empty then the default boto3 behaviour is used. If
|
104
|
+
running Airflow in a distributed manner and aws_conn_id is None or
|
105
|
+
empty, then default boto3 configuration would be used (and must be
|
106
|
+
maintained on each worker node).
|
107
|
+
:param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
|
108
|
+
:param verify: Whether or not to verify SSL certificates. See:
|
109
|
+
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
|
110
|
+
:param botocore_config: Configuration dictionary (key-values) for botocore client. See:
|
111
|
+
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.htmlt
|
111
112
|
"""
|
112
113
|
|
113
|
-
|
114
|
-
|
114
|
+
aws_hook_class = EventBridgeHook
|
115
|
+
template_fields: Sequence[str] = aws_template_fields(
|
115
116
|
"name",
|
116
117
|
"description",
|
117
118
|
"event_bus_name",
|
@@ -120,7 +121,6 @@ class EventBridgePutRuleOperator(BaseOperator):
|
|
120
121
|
"schedule_expression",
|
121
122
|
"state",
|
122
123
|
"tags",
|
123
|
-
"region_name",
|
124
124
|
)
|
125
125
|
|
126
126
|
def __init__(
|
@@ -134,8 +134,6 @@ class EventBridgePutRuleOperator(BaseOperator):
|
|
134
134
|
schedule_expression: str | None = None,
|
135
135
|
state: str | None = None,
|
136
136
|
tags: list | None = None,
|
137
|
-
region_name: str | None = None,
|
138
|
-
aws_conn_id: str = "aws_default",
|
139
137
|
**kwargs,
|
140
138
|
):
|
141
139
|
super().__init__(**kwargs)
|
@@ -144,16 +142,9 @@ class EventBridgePutRuleOperator(BaseOperator):
|
|
144
142
|
self.event_bus_name = event_bus_name
|
145
143
|
self.event_pattern = event_pattern
|
146
144
|
self.role_arn = role_arn
|
147
|
-
self.region_name = region_name
|
148
145
|
self.schedule_expression = schedule_expression
|
149
146
|
self.state = state
|
150
147
|
self.tags = tags
|
151
|
-
self.aws_conn_id = aws_conn_id
|
152
|
-
|
153
|
-
@cached_property
|
154
|
-
def hook(self) -> EventBridgeHook:
|
155
|
-
"""Create and return an EventBridgeHook."""
|
156
|
-
return EventBridgeHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
|
157
148
|
|
158
149
|
def execute(self, context: Context):
|
159
150
|
self.log.info('Sending rule "%s" to EventBridge.', self.name)
|
@@ -170,7 +161,7 @@ class EventBridgePutRuleOperator(BaseOperator):
|
|
170
161
|
)
|
171
162
|
|
172
163
|
|
173
|
-
class EventBridgeEnableRuleOperator(
|
164
|
+
class EventBridgeEnableRuleOperator(AwsBaseOperator[EventBridgeHook]):
|
174
165
|
"""
|
175
166
|
Enable an EventBridge Rule.
|
176
167
|
|
@@ -180,32 +171,25 @@ class EventBridgeEnableRuleOperator(BaseOperator):
|
|
180
171
|
|
181
172
|
:param name: the name of the rule to enable
|
182
173
|
:param event_bus_name: the name or ARN of the event bus associated with the rule (default if omitted)
|
183
|
-
:param aws_conn_id:
|
184
|
-
|
185
|
-
|
174
|
+
:param aws_conn_id: The Airflow connection used for AWS credentials.
|
175
|
+
If this is ``None`` or empty then the default boto3 behaviour is used. If
|
176
|
+
running Airflow in a distributed manner and aws_conn_id is None or
|
177
|
+
empty, then default boto3 configuration would be used (and must be
|
178
|
+
maintained on each worker node).
|
179
|
+
:param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
|
180
|
+
:param verify: Whether or not to verify SSL certificates. See:
|
181
|
+
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
|
182
|
+
:param botocore_config: Configuration dictionary (key-values) for botocore client. See:
|
183
|
+
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.htmlt
|
186
184
|
"""
|
187
185
|
|
188
|
-
|
186
|
+
aws_hook_class = EventBridgeHook
|
187
|
+
template_fields: Sequence[str] = aws_template_fields("name", "event_bus_name")
|
189
188
|
|
190
|
-
def __init__(
|
191
|
-
self,
|
192
|
-
*,
|
193
|
-
name: str,
|
194
|
-
event_bus_name: str | None = None,
|
195
|
-
region_name: str | None = None,
|
196
|
-
aws_conn_id: str = "aws_default",
|
197
|
-
**kwargs,
|
198
|
-
):
|
189
|
+
def __init__(self, *, name: str, event_bus_name: str | None = None, **kwargs):
|
199
190
|
super().__init__(**kwargs)
|
200
191
|
self.name = name
|
201
192
|
self.event_bus_name = event_bus_name
|
202
|
-
self.region_name = region_name
|
203
|
-
self.aws_conn_id = aws_conn_id
|
204
|
-
|
205
|
-
@cached_property
|
206
|
-
def hook(self) -> EventBridgeHook:
|
207
|
-
"""Create and return an EventBridgeHook."""
|
208
|
-
return EventBridgeHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
|
209
193
|
|
210
194
|
def execute(self, context: Context):
|
211
195
|
self.hook.conn.enable_rule(
|
@@ -220,7 +204,7 @@ class EventBridgeEnableRuleOperator(BaseOperator):
|
|
220
204
|
self.log.info('Enabled rule "%s"', self.name)
|
221
205
|
|
222
206
|
|
223
|
-
class EventBridgeDisableRuleOperator(
|
207
|
+
class EventBridgeDisableRuleOperator(AwsBaseOperator[EventBridgeHook]):
|
224
208
|
"""
|
225
209
|
Disable an EventBridge Rule.
|
226
210
|
|
@@ -230,32 +214,25 @@ class EventBridgeDisableRuleOperator(BaseOperator):
|
|
230
214
|
|
231
215
|
:param name: the name of the rule to disable
|
232
216
|
:param event_bus_name: the name or ARN of the event bus associated with the rule (default if omitted)
|
233
|
-
:param aws_conn_id:
|
234
|
-
|
235
|
-
|
217
|
+
:param aws_conn_id: The Airflow connection used for AWS credentials.
|
218
|
+
If this is ``None`` or empty then the default boto3 behaviour is used. If
|
219
|
+
running Airflow in a distributed manner and aws_conn_id is None or
|
220
|
+
empty, then default boto3 configuration would be used (and must be
|
221
|
+
maintained on each worker node).
|
222
|
+
:param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
|
223
|
+
:param verify: Whether or not to verify SSL certificates. See:
|
224
|
+
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
|
225
|
+
:param botocore_config: Configuration dictionary (key-values) for botocore client. See:
|
226
|
+
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.htmlt
|
236
227
|
"""
|
237
228
|
|
238
|
-
|
229
|
+
aws_hook_class = EventBridgeHook
|
230
|
+
template_fields: Sequence[str] = aws_template_fields("name", "event_bus_name")
|
239
231
|
|
240
|
-
def __init__(
|
241
|
-
self,
|
242
|
-
*,
|
243
|
-
name: str,
|
244
|
-
event_bus_name: str | None = None,
|
245
|
-
region_name: str | None = None,
|
246
|
-
aws_conn_id: str = "aws_default",
|
247
|
-
**kwargs,
|
248
|
-
):
|
232
|
+
def __init__(self, *, name: str, event_bus_name: str | None = None, **kwargs):
|
249
233
|
super().__init__(**kwargs)
|
250
234
|
self.name = name
|
251
235
|
self.event_bus_name = event_bus_name
|
252
|
-
self.region_name = region_name
|
253
|
-
self.aws_conn_id = aws_conn_id
|
254
|
-
|
255
|
-
@cached_property
|
256
|
-
def hook(self) -> EventBridgeHook:
|
257
|
-
"""Create and return an EventBridgeHook."""
|
258
|
-
return EventBridgeHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
|
259
236
|
|
260
237
|
def execute(self, context: Context):
|
261
238
|
self.hook.conn.disable_rule(
|