apache-airflow-providers-amazon 8.3.1__py3-none-any.whl → 8.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. airflow/providers/amazon/__init__.py +4 -2
  2. airflow/providers/amazon/aws/hooks/base_aws.py +29 -12
  3. airflow/providers/amazon/aws/hooks/emr.py +17 -9
  4. airflow/providers/amazon/aws/hooks/eventbridge.py +27 -0
  5. airflow/providers/amazon/aws/hooks/redshift_data.py +10 -0
  6. airflow/providers/amazon/aws/hooks/sagemaker.py +24 -14
  7. airflow/providers/amazon/aws/notifications/chime.py +1 -1
  8. airflow/providers/amazon/aws/operators/eks.py +140 -7
  9. airflow/providers/amazon/aws/operators/emr.py +202 -22
  10. airflow/providers/amazon/aws/operators/eventbridge.py +87 -0
  11. airflow/providers/amazon/aws/operators/rds.py +120 -48
  12. airflow/providers/amazon/aws/operators/redshift_data.py +7 -0
  13. airflow/providers/amazon/aws/operators/sagemaker.py +75 -7
  14. airflow/providers/amazon/aws/operators/step_function.py +34 -2
  15. airflow/providers/amazon/aws/transfers/s3_to_redshift.py +1 -1
  16. airflow/providers/amazon/aws/triggers/batch.py +1 -1
  17. airflow/providers/amazon/aws/triggers/ecs.py +7 -5
  18. airflow/providers/amazon/aws/triggers/eks.py +174 -3
  19. airflow/providers/amazon/aws/triggers/emr.py +215 -1
  20. airflow/providers/amazon/aws/triggers/rds.py +161 -5
  21. airflow/providers/amazon/aws/triggers/sagemaker.py +84 -1
  22. airflow/providers/amazon/aws/triggers/step_function.py +59 -0
  23. airflow/providers/amazon/aws/utils/__init__.py +16 -1
  24. airflow/providers/amazon/aws/utils/rds.py +2 -2
  25. airflow/providers/amazon/aws/waiters/sagemaker.json +46 -0
  26. airflow/providers/amazon/aws/waiters/stepfunctions.json +36 -0
  27. airflow/providers/amazon/get_provider_info.py +21 -1
  28. {apache_airflow_providers_amazon-8.3.1.dist-info → apache_airflow_providers_amazon-8.4.0.dist-info}/METADATA +11 -11
  29. {apache_airflow_providers_amazon-8.3.1.dist-info → apache_airflow_providers_amazon-8.4.0.dist-info}/RECORD +34 -30
  30. {apache_airflow_providers_amazon-8.3.1.dist-info → apache_airflow_providers_amazon-8.4.0.dist-info}/WHEEL +1 -1
  31. {apache_airflow_providers_amazon-8.3.1.dist-info → apache_airflow_providers_amazon-8.4.0.dist-info}/LICENSE +0 -0
  32. {apache_airflow_providers_amazon-8.3.1.dist-info → apache_airflow_providers_amazon-8.4.0.dist-info}/NOTICE +0 -0
  33. {apache_airflow_providers_amazon-8.3.1.dist-info → apache_airflow_providers_amazon-8.4.0.dist-info}/entry_points.txt +0 -0
  34. {apache_airflow_providers_amazon-8.3.1.dist-info → apache_airflow_providers_amazon-8.4.0.dist-info}/top_level.txt +0 -0
@@ -16,19 +16,21 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
+ import warnings
19
20
  from typing import Any
20
21
 
22
+ from airflow.exceptions import AirflowProviderDeprecationWarning
23
+ from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
21
24
  from airflow.providers.amazon.aws.hooks.rds import RdsHook
25
+ from airflow.providers.amazon.aws.triggers.base import AwsBaseWaiterTrigger
26
+ from airflow.providers.amazon.aws.utils.rds import RdsDbType
22
27
  from airflow.providers.amazon.aws.utils.waiter_with_logging import async_wait
23
28
  from airflow.triggers.base import BaseTrigger, TriggerEvent
24
29
 
25
30
 
26
31
  class RdsDbInstanceTrigger(BaseTrigger):
27
32
  """
28
- Trigger for RdsCreateDbInstanceOperator and RdsDeleteDbInstanceOperator.
29
-
30
- The trigger will asynchronously poll the boto3 API and wait for the
31
- DB instance to be in the state specified by the waiter.
33
+ Deprecated Trigger for RDS operations. Do not use.
32
34
 
33
35
  :param waiter_name: Name of the waiter to use, for instance 'db_instance_available'
34
36
  or 'db_instance_deleted'.
@@ -36,7 +38,7 @@ class RdsDbInstanceTrigger(BaseTrigger):
36
38
  :param waiter_delay: The amount of time in seconds to wait between attempts.
37
39
  :param waiter_max_attempts: The maximum number of attempts to be made.
38
40
  :param aws_conn_id: The Airflow connection used for AWS credentials.
39
- :param hook_params: The parameters to pass to the RdsHook.
41
+ :param region_name: AWS region where the DB is located, if different from the default one.
40
42
  :param response: The response from the RdsHook, to be passed back to the operator.
41
43
  """
42
44
 
@@ -50,6 +52,12 @@ class RdsDbInstanceTrigger(BaseTrigger):
50
52
  region_name: str | None,
51
53
  response: dict[str, Any],
52
54
  ):
55
+ warnings.warn(
56
+ "This trigger is deprecated, please use the other RDS triggers "
57
+ "such as RdsDbDeletedTrigger, RdsDbStoppedTrigger or RdsDbAvailableTrigger",
58
+ AirflowProviderDeprecationWarning,
59
+ stacklevel=2,
60
+ )
53
61
  self.db_instance_identifier = db_instance_identifier
54
62
  self.waiter_delay = waiter_delay
55
63
  self.waiter_max_attempts = waiter_max_attempts
@@ -87,3 +95,151 @@ class RdsDbInstanceTrigger(BaseTrigger):
87
95
  status_args=["DBInstances[0].DBInstanceStatus"],
88
96
  )
89
97
  yield TriggerEvent({"status": "success", "response": self.response})
98
+
99
+
100
+ _waiter_arg = {
101
+ RdsDbType.INSTANCE: "DBInstanceIdentifier",
102
+ RdsDbType.CLUSTER: "DBClusterIdentifier",
103
+ }
104
+ _status_paths = {
105
+ RdsDbType.INSTANCE: ["DBInstances[].DBInstanceStatus", "DBInstances[].StatusInfos"],
106
+ RdsDbType.CLUSTER: ["DBClusters[].Status"],
107
+ }
108
+
109
+
110
+ class RdsDbAvailableTrigger(AwsBaseWaiterTrigger):
111
+ """
112
+ Trigger to wait asynchronously for a DB instance or cluster to be available.
113
+
114
+ :param db_identifier: The DB identifier for the DB instance or cluster to be polled.
115
+ :param waiter_delay: The amount of time in seconds to wait between attempts.
116
+ :param waiter_max_attempts: The maximum number of attempts to be made.
117
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
118
+ :param region_name: AWS region where the DB is located, if different from the default one.
119
+ :param response: The response from the RdsHook, to be passed back to the operator.
120
+ :param db_type: The type of DB: instance or cluster.
121
+ """
122
+
123
+ def __init__(
124
+ self,
125
+ db_identifier: str,
126
+ waiter_delay: int,
127
+ waiter_max_attempts: int,
128
+ aws_conn_id: str,
129
+ response: dict[str, Any],
130
+ db_type: RdsDbType,
131
+ region_name: str | None = None,
132
+ ) -> None:
133
+ super().__init__(
134
+ serialized_fields={
135
+ "db_identifier": db_identifier,
136
+ "response": response,
137
+ "db_type": db_type,
138
+ },
139
+ waiter_name=f"db_{db_type.value}_available",
140
+ waiter_args={_waiter_arg[db_type]: db_identifier},
141
+ failure_message="Error while waiting for DB to be available",
142
+ status_message="DB initialization in progress",
143
+ status_queries=_status_paths[db_type],
144
+ return_key="response",
145
+ return_value=response,
146
+ waiter_delay=waiter_delay,
147
+ waiter_max_attempts=waiter_max_attempts,
148
+ aws_conn_id=aws_conn_id,
149
+ region_name=region_name,
150
+ )
151
+
152
+ def hook(self) -> AwsGenericHook:
153
+ return RdsHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
154
+
155
+
156
+ class RdsDbDeletedTrigger(AwsBaseWaiterTrigger):
157
+ """
158
+ Trigger to wait asynchronously for a DB instance or cluster to be deleted.
159
+
160
+ :param db_identifier: The DB identifier for the DB instance or cluster to be polled.
161
+ :param waiter_delay: The amount of time in seconds to wait between attempts.
162
+ :param waiter_max_attempts: The maximum number of attempts to be made.
163
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
164
+ :param region_name: AWS region where the DB is located, if different from the default one.
165
+ :param response: The response from the RdsHook, to be passed back to the operator.
166
+ :param db_type: The type of DB: instance or cluster.
167
+ """
168
+
169
+ def __init__(
170
+ self,
171
+ db_identifier: str,
172
+ waiter_delay: int,
173
+ waiter_max_attempts: int,
174
+ aws_conn_id: str,
175
+ response: dict[str, Any],
176
+ db_type: RdsDbType,
177
+ region_name: str | None = None,
178
+ ) -> None:
179
+ super().__init__(
180
+ serialized_fields={
181
+ "db_identifier": db_identifier,
182
+ "response": response,
183
+ "db_type": db_type,
184
+ },
185
+ waiter_name=f"db_{db_type.value}_deleted",
186
+ waiter_args={_waiter_arg[db_type]: db_identifier},
187
+ failure_message="Error while deleting DB",
188
+ status_message="DB deletion in progress",
189
+ status_queries=_status_paths[db_type],
190
+ return_key="response",
191
+ return_value=response,
192
+ waiter_delay=waiter_delay,
193
+ waiter_max_attempts=waiter_max_attempts,
194
+ aws_conn_id=aws_conn_id,
195
+ region_name=region_name,
196
+ )
197
+
198
+ def hook(self) -> AwsGenericHook:
199
+ return RdsHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
200
+
201
+
202
+ class RdsDbStoppedTrigger(AwsBaseWaiterTrigger):
203
+ """
204
+ Trigger to wait asynchronously for a DB instance or cluster to be stopped.
205
+
206
+ :param db_identifier: The DB identifier for the DB instance or cluster to be polled.
207
+ :param waiter_delay: The amount of time in seconds to wait between attempts.
208
+ :param waiter_max_attempts: The maximum number of attempts to be made.
209
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
210
+ :param region_name: AWS region where the DB is located, if different from the default one.
211
+ :param response: The response from the RdsHook, to be passed back to the operator.
212
+ :param db_type: The type of DB: instance or cluster.
213
+ """
214
+
215
+ def __init__(
216
+ self,
217
+ db_identifier: str,
218
+ waiter_delay: int,
219
+ waiter_max_attempts: int,
220
+ aws_conn_id: str,
221
+ response: dict[str, Any],
222
+ db_type: RdsDbType,
223
+ region_name: str | None = None,
224
+ ) -> None:
225
+ super().__init__(
226
+ serialized_fields={
227
+ "db_identifier": db_identifier,
228
+ "response": response,
229
+ "db_type": db_type,
230
+ },
231
+ waiter_name=f"db_{db_type.value}_stopped",
232
+ waiter_args={_waiter_arg[db_type]: db_identifier},
233
+ failure_message="Error while stopping DB",
234
+ status_message="DB is being stopped",
235
+ status_queries=_status_paths[db_type],
236
+ return_key="response",
237
+ return_value=response,
238
+ waiter_delay=waiter_delay,
239
+ waiter_max_attempts=waiter_max_attempts,
240
+ aws_conn_id=aws_conn_id,
241
+ region_name=region_name,
242
+ )
243
+
244
+ def hook(self) -> AwsGenericHook:
245
+ return RdsHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
@@ -17,9 +17,15 @@
17
17
 
18
18
  from __future__ import annotations
19
19
 
20
+ import asyncio
21
+ from collections import Counter
22
+ from enum import IntEnum
20
23
  from functools import cached_property
21
- from typing import Any
24
+ from typing import Any, AsyncIterator
22
25
 
26
+ from botocore.exceptions import WaiterError
27
+
28
+ from airflow import AirflowException
23
29
  from airflow.providers.amazon.aws.hooks.sagemaker import SageMakerHook
24
30
  from airflow.providers.amazon.aws.utils.waiter_with_logging import async_wait
25
31
  from airflow.triggers.base import BaseTrigger, TriggerEvent
@@ -115,3 +121,80 @@ class SageMakerTrigger(BaseTrigger):
115
121
  status_args=[self._get_response_status_key(self.job_type)],
116
122
  )
117
123
  yield TriggerEvent({"status": "success", "message": "Job completed."})
124
+
125
+
126
+ class SageMakerPipelineTrigger(BaseTrigger):
127
+ """Trigger to wait for a sagemaker pipeline execution to finish."""
128
+
129
+ class Type(IntEnum):
130
+ """Type of waiter to use."""
131
+
132
+ COMPLETE = 1
133
+ STOPPED = 2
134
+
135
+ def __init__(
136
+ self,
137
+ waiter_type: Type,
138
+ pipeline_execution_arn: str,
139
+ waiter_delay: int,
140
+ waiter_max_attempts: int,
141
+ aws_conn_id: str,
142
+ ):
143
+ self.waiter_type = waiter_type
144
+ self.pipeline_execution_arn = pipeline_execution_arn
145
+ self.waiter_delay = waiter_delay
146
+ self.waiter_max_attempts = waiter_max_attempts
147
+ self.aws_conn_id = aws_conn_id
148
+
149
+ def serialize(self) -> tuple[str, dict[str, Any]]:
150
+ return (
151
+ self.__class__.__module__ + "." + self.__class__.__qualname__,
152
+ {
153
+ "waiter_type": self.waiter_type.value, # saving the int value here
154
+ "pipeline_execution_arn": self.pipeline_execution_arn,
155
+ "waiter_delay": self.waiter_delay,
156
+ "waiter_max_attempts": self.waiter_max_attempts,
157
+ "aws_conn_id": self.aws_conn_id,
158
+ },
159
+ )
160
+
161
+ _waiter_name = {
162
+ Type.COMPLETE: "PipelineExecutionComplete",
163
+ Type.STOPPED: "PipelineExecutionStopped",
164
+ }
165
+
166
+ async def run(self) -> AsyncIterator[TriggerEvent]:
167
+ attempts = 0
168
+ hook = SageMakerHook(aws_conn_id=self.aws_conn_id)
169
+ async with hook.async_conn as conn:
170
+ waiter = hook.get_waiter(self._waiter_name[self.waiter_type], deferrable=True, client=conn)
171
+ while attempts < self.waiter_max_attempts:
172
+ attempts = attempts + 1
173
+ try:
174
+ await waiter.wait(
175
+ PipelineExecutionArn=self.pipeline_execution_arn, WaiterConfig={"MaxAttempts": 1}
176
+ )
177
+ # we reach this point only if the waiter met a success criteria
178
+ yield TriggerEvent({"status": "success", "value": self.pipeline_execution_arn})
179
+ return
180
+ except WaiterError as error:
181
+ if "terminal failure" in str(error):
182
+ raise
183
+
184
+ self.log.info(
185
+ "Status of the pipeline execution: %s", error.last_response["PipelineExecutionStatus"]
186
+ )
187
+
188
+ res = await conn.list_pipeline_execution_steps(
189
+ PipelineExecutionArn=self.pipeline_execution_arn
190
+ )
191
+ count_by_state = Counter(s["StepStatus"] for s in res["PipelineExecutionSteps"])
192
+ running_steps = [
193
+ s["StepName"] for s in res["PipelineExecutionSteps"] if s["StepStatus"] == "Executing"
194
+ ]
195
+ self.log.info("State of the pipeline steps: %s", count_by_state)
196
+ self.log.info("Steps currently in progress: %s", running_steps)
197
+
198
+ await asyncio.sleep(int(self.waiter_delay))
199
+
200
+ raise AirflowException("Waiter error: max attempts reached")
@@ -0,0 +1,59 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
20
+ from airflow.providers.amazon.aws.hooks.step_function import StepFunctionHook
21
+ from airflow.providers.amazon.aws.triggers.base import AwsBaseWaiterTrigger
22
+
23
+
24
+ class StepFunctionsExecutionCompleteTrigger(AwsBaseWaiterTrigger):
25
+ """
26
+ Trigger to poll for the completion of a Step Functions execution.
27
+
28
+ :param execution_arn: ARN of the state machine to poll
29
+ :param waiter_delay: The amount of time in seconds to wait between attempts.
30
+ :param waiter_max_attempts: The maximum number of attempts to be made.
31
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
32
+ """
33
+
34
+ def __init__(
35
+ self,
36
+ *,
37
+ execution_arn: str,
38
+ waiter_delay: int = 60,
39
+ waiter_max_attempts: int = 30,
40
+ aws_conn_id: str | None = None,
41
+ region_name: str | None = None,
42
+ ) -> None:
43
+
44
+ super().__init__(
45
+ serialized_fields={"execution_arn": execution_arn, "region_name": region_name},
46
+ waiter_name="step_function_succeeded",
47
+ waiter_args={"executionArn": execution_arn},
48
+ failure_message="Step function failed",
49
+ status_message="Status of step function execution is",
50
+ status_queries=["status", "error", "cause"],
51
+ return_key="execution_arn",
52
+ return_value=execution_arn,
53
+ waiter_delay=waiter_delay,
54
+ waiter_max_attempts=waiter_max_attempts,
55
+ aws_conn_id=aws_conn_id,
56
+ )
57
+
58
+ def hook(self) -> AwsGenericHook:
59
+ return StepFunctionHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
@@ -21,13 +21,28 @@ import re
21
21
  from datetime import datetime
22
22
  from enum import Enum
23
23
 
24
+ from airflow.utils.helpers import prune_dict
24
25
  from airflow.version import version
25
26
 
26
27
  log = logging.getLogger(__name__)
27
28
 
28
29
 
29
30
  def trim_none_values(obj: dict):
30
- return {key: val for key, val in obj.items() if val is not None}
31
+ from packaging.version import Version
32
+
33
+ from airflow.version import version
34
+
35
+ if Version(version) < Version("2.7"):
36
+ # before version 2.7, the behavior is not the same.
37
+ # Empty dict and lists are removed from the given dict.
38
+ return {key: val for key, val in obj.items() if val is not None}
39
+ else:
40
+ # once airflow 2.6 rolls out of compatibility support for provider packages,
41
+ # we can replace usages of this method with the core one in our code,
42
+ # and uncomment this warning for users who may use it.
43
+ # warnings.warn("use airflow.utils.helpers.prune_dict() instead",
44
+ # AirflowProviderDeprecationWarning, stacklevel=2)
45
+ return prune_dict(obj)
31
46
 
32
47
 
33
48
  def datetime_to_epoch(date_time: datetime) -> int:
@@ -22,5 +22,5 @@ from enum import Enum
22
22
  class RdsDbType(Enum):
23
23
  """Only available types for the RDS."""
24
24
 
25
- INSTANCE: str = "instance"
26
- CLUSTER: str = "cluster"
25
+ INSTANCE = "instance"
26
+ CLUSTER = "cluster"
@@ -104,6 +104,52 @@
104
104
  "state": "failure"
105
105
  }
106
106
  ]
107
+ },
108
+ "PipelineExecutionComplete": {
109
+ "delay": 30,
110
+ "operation": "DescribePipelineExecution",
111
+ "maxAttempts": 60,
112
+ "description": "Wait until pipeline execution is Succeeded",
113
+ "acceptors": [
114
+ {
115
+ "matcher": "path",
116
+ "argument": "PipelineExecutionStatus",
117
+ "expected": "Succeeded",
118
+ "state": "success"
119
+ },
120
+ {
121
+ "matcher": "path",
122
+ "argument": "PipelineExecutionStatus",
123
+ "expected": "Failed",
124
+ "state": "failure"
125
+ },
126
+ {
127
+ "matcher": "path",
128
+ "argument": "PipelineExecutionStatus",
129
+ "expected": "Stopped",
130
+ "state": "failure"
131
+ }
132
+ ]
133
+ },
134
+ "PipelineExecutionStopped": {
135
+ "delay": 10,
136
+ "operation": "DescribePipelineExecution",
137
+ "maxAttempts": 120,
138
+ "description": "Wait until pipeline execution is Stopped",
139
+ "acceptors": [
140
+ {
141
+ "matcher": "path",
142
+ "argument": "PipelineExecutionStatus",
143
+ "expected": "Stopped",
144
+ "state": "success"
145
+ },
146
+ {
147
+ "matcher": "path",
148
+ "argument": "PipelineExecutionStatus",
149
+ "expected": "Failed",
150
+ "state": "failure"
151
+ }
152
+ ]
107
153
  }
108
154
  }
109
155
  }
@@ -0,0 +1,36 @@
1
+ {
2
+ "version": 2,
3
+ "waiters": {
4
+ "step_function_succeeded": {
5
+ "operation": "DescribeExecution",
6
+ "delay": 30,
7
+ "maxAttempts": 60,
8
+ "acceptors": [
9
+ {
10
+ "matcher": "path",
11
+ "argument": "status",
12
+ "expected": "SUCCEEDED",
13
+ "state": "success"
14
+ },
15
+ {
16
+ "matcher": "error",
17
+ "argument": "status",
18
+ "expected": "RUNNING",
19
+ "state": "retry"
20
+ },
21
+ {
22
+ "matcher": "path",
23
+ "argument": "status",
24
+ "expected": "FAILED",
25
+ "state": "failure"
26
+ },
27
+ {
28
+ "matcher": "path",
29
+ "argument": "status",
30
+ "expected": "ABORTED",
31
+ "state": "failure"
32
+ }
33
+ ]
34
+ }
35
+ }
36
+ }
@@ -29,6 +29,7 @@ def get_provider_info():
29
29
  "description": "Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).\n",
30
30
  "suspended": False,
31
31
  "versions": [
32
+ "8.4.0",
32
33
  "8.3.1",
33
34
  "8.3.0",
34
35
  "8.2.0",
@@ -78,7 +79,7 @@ def get_provider_info():
78
79
  "sqlalchemy_redshift>=0.8.6",
79
80
  "mypy-boto3-rds>=1.24.0",
80
81
  "mypy-boto3-redshift-data>=1.24.0",
81
- "mypy-boto3-appflow>=1.24.0",
82
+ "mypy-boto3-appflow>=1.24.0,<1.28.12",
82
83
  "asgiref",
83
84
  "mypy-boto3-s3>=1.24.0",
84
85
  ],
@@ -175,6 +176,13 @@ def get_provider_info():
175
176
  "logo": "/integration-logos/aws/Amazon-EMR_light-bg@4x.png",
176
177
  "tags": ["aws"],
177
178
  },
179
+ {
180
+ "integration-name": "Amazon EventBridge",
181
+ "external-doc-url": "https://docs.aws.amazon.com/eventbridge/latest/APIReference/Welcome.html",
182
+ "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/eventbridge.rst"],
183
+ "logo": "/integration-logos/aws/Amazon-EventBridge_64.png",
184
+ "tags": ["aws"],
185
+ },
178
186
  {
179
187
  "integration-name": "Amazon Glacier",
180
188
  "external-doc-url": "https://aws.amazon.com/glacier/",
@@ -370,6 +378,10 @@ def get_provider_info():
370
378
  "integration-name": "Amazon EMR on EKS",
371
379
  "python-modules": ["airflow.providers.amazon.aws.operators.emr"],
372
380
  },
381
+ {
382
+ "integration-name": "Amazon EventBridge",
383
+ "python-modules": ["airflow.providers.amazon.aws.operators.eventbridge"],
384
+ },
373
385
  {
374
386
  "integration-name": "Amazon Glacier",
375
387
  "python-modules": ["airflow.providers.amazon.aws.operators.glacier"],
@@ -566,6 +578,10 @@ def get_provider_info():
566
578
  "integration-name": "Amazon EMR on EKS",
567
579
  "python-modules": ["airflow.providers.amazon.aws.hooks.emr"],
568
580
  },
581
+ {
582
+ "integration-name": "Amazon EventBridge",
583
+ "python-modules": ["airflow.providers.amazon.aws.hooks.eventbridge"],
584
+ },
569
585
  {
570
586
  "integration-name": "Amazon Glacier",
571
587
  "python-modules": ["airflow.providers.amazon.aws.hooks.glacier"],
@@ -696,6 +712,10 @@ def get_provider_info():
696
712
  "integration-name": "Amazon RDS",
697
713
  "python-modules": ["airflow.providers.amazon.aws.triggers.rds"],
698
714
  },
715
+ {
716
+ "integration-name": "AWS Step Functions",
717
+ "python-modules": ["airflow.providers.amazon.aws.triggers.step_function"],
718
+ },
699
719
  ],
700
720
  "transfers": [
701
721
  {
@@ -1,14 +1,14 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 8.3.1
3
+ Version: 8.4.0
4
4
  Summary: Provider for Apache Airflow. Implements apache-airflow-providers-amazon package
5
5
  Home-page: https://airflow.apache.org/
6
6
  Download-URL: https://archive.apache.org/dist/airflow/providers
7
7
  Author: Apache Software Foundation
8
8
  Author-email: dev@airflow.apache.org
9
9
  License: Apache License 2.0
10
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.3.1/
11
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.3.1/changelog.html
10
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.4.0/
11
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.4.0/changelog.html
12
12
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
13
13
  Project-URL: Source Code, https://github.com/apache/airflow
14
14
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
@@ -37,7 +37,7 @@ Requires-Dist: apache-airflow (>=2.4.0)
37
37
  Requires-Dist: asgiref
38
38
  Requires-Dist: boto3 (>=1.24.0)
39
39
  Requires-Dist: jsonpath-ng (>=1.5.3)
40
- Requires-Dist: mypy-boto3-appflow (>=1.24.0)
40
+ Requires-Dist: mypy-boto3-appflow (<1.28.12,>=1.24.0)
41
41
  Requires-Dist: mypy-boto3-rds (>=1.24.0)
42
42
  Requires-Dist: mypy-boto3-redshift-data (>=1.24.0)
43
43
  Requires-Dist: mypy-boto3-s3 (>=1.24.0)
@@ -111,7 +111,7 @@ Requires-Dist: apache-airflow-providers-ssh ; extra == 'ssh'
111
111
 
112
112
  Package ``apache-airflow-providers-amazon``
113
113
 
114
- Release: ``8.3.1``
114
+ Release: ``8.4.0``
115
115
 
116
116
 
117
117
  Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
@@ -124,7 +124,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
124
124
  are in ``airflow.providers.amazon`` python package.
125
125
 
126
126
  You can find package information and changelog for the provider
127
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.3.1/>`_.
127
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.4.0/>`_.
128
128
 
129
129
 
130
130
  Installation
@@ -139,9 +139,9 @@ The package supports the following python versions: 3.8,3.9,3.10,3.11
139
139
  Requirements
140
140
  ------------
141
141
 
142
- ======================================= ==================
142
+ ======================================= =====================
143
143
  PIP package Version required
144
- ======================================= ==================
144
+ ======================================= =====================
145
145
  ``apache-airflow`` ``>=2.4.0``
146
146
  ``apache-airflow-providers-common-sql`` ``>=1.3.1``
147
147
  ``apache-airflow-providers-http``
@@ -153,10 +153,10 @@ PIP package Version required
153
153
  ``sqlalchemy_redshift`` ``>=0.8.6``
154
154
  ``mypy-boto3-rds`` ``>=1.24.0``
155
155
  ``mypy-boto3-redshift-data`` ``>=1.24.0``
156
- ``mypy-boto3-appflow`` ``>=1.24.0``
156
+ ``mypy-boto3-appflow`` ``>=1.24.0,<1.28.12``
157
157
  ``asgiref``
158
158
  ``mypy-boto3-s3`` ``>=1.24.0``
159
- ======================================= ==================
159
+ ======================================= =====================
160
160
 
161
161
  Cross provider package dependencies
162
162
  -----------------------------------
@@ -189,4 +189,4 @@ Dependent package
189
189
  ====================================================================================================================== ===================
190
190
 
191
191
  The changelog for the provider package can be found in the
192
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.3.1/changelog.html>`_.
192
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.4.0/changelog.html>`_.