apache-airflow-providers-amazon 9.5.0rc3__py3-none-any.whl → 9.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -17,36 +17,30 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  from collections.abc import Sequence
20
- from functools import cached_property
21
20
  from typing import TYPE_CHECKING
22
21
 
23
22
  from airflow.exceptions import AirflowException, AirflowNotFoundException
24
23
  from airflow.providers.amazon.aws.hooks.rds import RdsHook
24
+ from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
25
+ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
25
26
  from airflow.providers.amazon.aws.utils.rds import RdsDbType
26
- from airflow.sensors.base import BaseSensorOperator
27
27
 
28
28
  if TYPE_CHECKING:
29
29
  from airflow.utils.context import Context
30
30
 
31
31
 
32
- class RdsBaseSensor(BaseSensorOperator):
32
+ class RdsBaseSensor(AwsBaseSensor[RdsHook]):
33
33
  """Base operator that implements common functions for all sensors."""
34
34
 
35
+ aws_hook_class = RdsHook
35
36
  ui_color = "#ddbb77"
36
37
  ui_fgcolor = "#ffffff"
37
38
 
38
- def __init__(
39
- self, *args, aws_conn_id: str | None = "aws_conn_id", hook_params: dict | None = None, **kwargs
40
- ):
39
+ def __init__(self, *args, hook_params: dict | None = None, **kwargs):
41
40
  self.hook_params = hook_params or {}
42
- self.aws_conn_id = aws_conn_id
43
41
  self.target_statuses: list[str] = []
44
42
  super().__init__(*args, **kwargs)
45
43
 
46
- @cached_property
47
- def hook(self):
48
- return RdsHook(aws_conn_id=self.aws_conn_id, **self.hook_params)
49
-
50
44
 
51
45
  class RdsSnapshotExistenceSensor(RdsBaseSensor):
52
46
  """
@@ -59,9 +53,19 @@ class RdsSnapshotExistenceSensor(RdsBaseSensor):
59
53
  :param db_type: Type of the DB - either "instance" or "cluster"
60
54
  :param db_snapshot_identifier: The identifier for the DB snapshot
61
55
  :param target_statuses: Target status of snapshot
56
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
57
+ If this is ``None`` or empty then the default boto3 behaviour is used. If
58
+ running Airflow in a distributed manner and aws_conn_id is None or
59
+ empty, then default boto3 configuration would be used (and must be
60
+ maintained on each worker node).
61
+ :param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
62
+ :param verify: Whether or not to verify SSL certificates. See:
63
+ https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
64
+ :param botocore_config: Configuration dictionary (key-values) for botocore client. See:
65
+ https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
62
66
  """
63
67
 
64
- template_fields: Sequence[str] = (
68
+ template_fields: Sequence[str] = aws_template_fields(
65
69
  "db_snapshot_identifier",
66
70
  "target_statuses",
67
71
  )
@@ -72,10 +76,9 @@ class RdsSnapshotExistenceSensor(RdsBaseSensor):
72
76
  db_type: str,
73
77
  db_snapshot_identifier: str,
74
78
  target_statuses: list[str] | None = None,
75
- aws_conn_id: str | None = "aws_conn_id",
76
79
  **kwargs,
77
80
  ):
78
- super().__init__(aws_conn_id=aws_conn_id, **kwargs)
81
+ super().__init__(**kwargs)
79
82
  self.db_type = RdsDbType(db_type)
80
83
  self.db_snapshot_identifier = db_snapshot_identifier
81
84
  self.target_statuses = target_statuses or ["available"]
@@ -107,7 +110,9 @@ class RdsExportTaskExistenceSensor(RdsBaseSensor):
107
110
  :param error_statuses: Target error status of export task to fail the sensor
108
111
  """
109
112
 
110
- template_fields: Sequence[str] = ("export_task_identifier", "target_statuses", "error_statuses")
113
+ template_fields: Sequence[str] = aws_template_fields(
114
+ "export_task_identifier", "target_statuses", "error_statuses"
115
+ )
111
116
 
112
117
  def __init__(
113
118
  self,
@@ -115,10 +120,9 @@ class RdsExportTaskExistenceSensor(RdsBaseSensor):
115
120
  export_task_identifier: str,
116
121
  target_statuses: list[str] | None = None,
117
122
  error_statuses: list[str] | None = None,
118
- aws_conn_id: str | None = "aws_default",
119
123
  **kwargs,
120
124
  ):
121
- super().__init__(aws_conn_id=aws_conn_id, **kwargs)
125
+ super().__init__(**kwargs)
122
126
 
123
127
  self.export_task_identifier = export_task_identifier
124
128
  self.target_statuses = target_statuses or [
@@ -159,7 +163,7 @@ class RdsDbSensor(RdsBaseSensor):
159
163
  :param target_statuses: Target status of DB
160
164
  """
161
165
 
162
- template_fields: Sequence[str] = (
166
+ template_fields: Sequence[str] = aws_template_fields(
163
167
  "db_identifier",
164
168
  "db_type",
165
169
  "target_statuses",
@@ -171,10 +175,9 @@ class RdsDbSensor(RdsBaseSensor):
171
175
  db_identifier: str,
172
176
  db_type: RdsDbType | str = RdsDbType.INSTANCE,
173
177
  target_statuses: list[str] | None = None,
174
- aws_conn_id: str | None = "aws_default",
175
178
  **kwargs,
176
179
  ):
177
- super().__init__(aws_conn_id=aws_conn_id, **kwargs)
180
+ super().__init__(**kwargs)
178
181
  self.db_identifier = db_identifier
179
182
  self.target_statuses = target_statuses or ["available"]
180
183
  self.db_type = db_type
@@ -20,6 +20,7 @@ from typing import TYPE_CHECKING
20
20
 
21
21
  from airflow.providers.amazon.aws.hooks.bedrock import BedrockAgentHook, BedrockHook
22
22
  from airflow.providers.amazon.aws.triggers.base import AwsBaseWaiterTrigger
23
+ from airflow.utils.types import NOTSET, ArgNotSet
23
24
 
24
25
  if TYPE_CHECKING:
25
26
  from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
@@ -182,3 +183,100 @@ class BedrockIngestionJobTrigger(AwsBaseWaiterTrigger):
182
183
 
183
184
  def hook(self) -> AwsGenericHook:
184
185
  return BedrockAgentHook(aws_conn_id=self.aws_conn_id)
186
+
187
+
188
+ class BedrockBaseBatchInferenceTrigger(AwsBaseWaiterTrigger):
189
+ """
190
+ Trigger when a batch inference job is complete.
191
+
192
+ :param job_arn: The Amazon Resource Name (ARN) of the batch inference job.
193
+
194
+ :param waiter_delay: The amount of time in seconds to wait between attempts. (default: 120)
195
+ :param waiter_max_attempts: The maximum number of attempts to be made. (default: 75)
196
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
197
+ """
198
+
199
+ def __init__(
200
+ self,
201
+ *,
202
+ job_arn: str,
203
+ waiter_name: str | ArgNotSet = NOTSET, # This must be defined in the child class.
204
+ waiter_delay: int = 120,
205
+ waiter_max_attempts: int = 75,
206
+ aws_conn_id: str | None = None,
207
+ ) -> None:
208
+ if waiter_name == NOTSET:
209
+ raise NotImplementedError("Triggers must provide a waiter name.")
210
+
211
+ super().__init__(
212
+ serialized_fields={"job_arn": job_arn},
213
+ waiter_name=str(waiter_name), # Cast a string to a string to make mypy happy
214
+ waiter_args={"jobIdentifier": job_arn},
215
+ failure_message="Bedrock batch inference job failed.",
216
+ status_message="Status of Bedrock batch inference job is",
217
+ status_queries=["status"],
218
+ return_key="job_arn",
219
+ return_value=job_arn,
220
+ waiter_delay=waiter_delay,
221
+ waiter_max_attempts=waiter_max_attempts,
222
+ aws_conn_id=aws_conn_id,
223
+ )
224
+
225
+ def hook(self) -> AwsGenericHook:
226
+ return BedrockHook(aws_conn_id=self.aws_conn_id)
227
+
228
+
229
+ class BedrockBatchInferenceCompletedTrigger(BedrockBaseBatchInferenceTrigger):
230
+ """
231
+ Trigger when a batch inference job is complete.
232
+
233
+ :param job_arn: The Amazon Resource Name (ARN) of the batch inference job.
234
+
235
+ :param waiter_delay: The amount of time in seconds to wait between attempts. (default: 120)
236
+ :param waiter_max_attempts: The maximum number of attempts to be made. (default: 75)
237
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
238
+ """
239
+
240
+ def __init__(
241
+ self,
242
+ *,
243
+ job_arn: str,
244
+ waiter_delay: int = 120,
245
+ waiter_max_attempts: int = 75,
246
+ aws_conn_id: str | None = None,
247
+ ) -> None:
248
+ super().__init__(
249
+ waiter_name="batch_inference_complete",
250
+ job_arn=job_arn,
251
+ waiter_delay=waiter_delay,
252
+ waiter_max_attempts=waiter_max_attempts,
253
+ aws_conn_id=aws_conn_id,
254
+ )
255
+
256
+
257
+ class BedrockBatchInferenceScheduledTrigger(BedrockBaseBatchInferenceTrigger):
258
+ """
259
+ Trigger when a batch inference job is scheduled.
260
+
261
+ :param job_arn: The Amazon Resource Name (ARN) of the batch inference job.
262
+
263
+ :param waiter_delay: The amount of time in seconds to wait between attempts. (default: 120)
264
+ :param waiter_max_attempts: The maximum number of attempts to be made. (default: 75)
265
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
266
+ """
267
+
268
+ def __init__(
269
+ self,
270
+ *,
271
+ job_arn: str,
272
+ waiter_delay: int = 120,
273
+ waiter_max_attempts: int = 75,
274
+ aws_conn_id: str | None = None,
275
+ ) -> None:
276
+ super().__init__(
277
+ waiter_name="batch_inference_scheduled",
278
+ job_arn=job_arn,
279
+ waiter_delay=waiter_delay,
280
+ waiter_max_attempts=waiter_max_attempts,
281
+ aws_conn_id=aws_conn_id,
282
+ )
@@ -23,7 +23,7 @@ import time
23
23
  from typing import TYPE_CHECKING, Any
24
24
 
25
25
  import jmespath
26
- from botocore.exceptions import WaiterError
26
+ from botocore.exceptions import NoCredentialsError, WaiterError
27
27
 
28
28
  from airflow.exceptions import AirflowException
29
29
 
@@ -70,6 +70,10 @@ def wait(
70
70
  time.sleep(waiter_delay)
71
71
  try:
72
72
  waiter.wait(**args, WaiterConfig={"MaxAttempts": 1})
73
+
74
+ except NoCredentialsError as error:
75
+ log.info(str(error))
76
+
73
77
  except WaiterError as error:
74
78
  error_reason = str(error)
75
79
  last_response = error.last_response
@@ -131,6 +135,10 @@ async def async_wait(
131
135
  await asyncio.sleep(waiter_delay)
132
136
  try:
133
137
  await waiter.wait(**args, WaiterConfig={"MaxAttempts": 1})
138
+
139
+ except NoCredentialsError as error:
140
+ log.info(str(error))
141
+
134
142
  except WaiterError as error:
135
143
  error_reason = str(error)
136
144
  last_response = error.last_response
@@ -68,6 +68,140 @@
68
68
  "state": "failure"
69
69
  }
70
70
  ]
71
+ },
72
+ "batch_inference_complete": {
73
+ "delay": 120,
74
+ "maxAttempts": 75,
75
+ "operation": "GetModelInvocationJob",
76
+ "acceptors": [
77
+ {
78
+ "matcher": "path",
79
+ "argument": "status",
80
+ "expected": "Completed",
81
+ "state": "success"
82
+ },
83
+ {
84
+ "matcher": "path",
85
+ "argument": "status",
86
+ "expected": "Failed",
87
+ "state": "failure"
88
+ },
89
+ {
90
+ "matcher": "path",
91
+ "argument": "status",
92
+ "expected": "Stopped",
93
+ "state": "failure"
94
+ },
95
+ {
96
+ "matcher": "path",
97
+ "argument": "status",
98
+ "expected": "PartiallyCompleted",
99
+ "state": "failure"
100
+ },
101
+ {
102
+ "matcher": "path",
103
+ "argument": "status",
104
+ "expected": "Expired",
105
+ "state": "failure"
106
+ },
107
+ {
108
+ "matcher": "path",
109
+ "argument": "status",
110
+ "expected": "Stopping",
111
+ "state": "failure"
112
+ },
113
+ {
114
+ "matcher": "path",
115
+ "argument": "status",
116
+ "expected": "Submitted",
117
+ "state": "retry"
118
+ },
119
+ {
120
+ "matcher": "path",
121
+ "argument": "status",
122
+ "expected": "InProgress",
123
+ "state": "retry"
124
+ },
125
+ {
126
+ "matcher": "path",
127
+ "argument": "status",
128
+ "expected": "Validating",
129
+ "state": "retry"
130
+ },
131
+ {
132
+ "matcher": "path",
133
+ "argument": "status",
134
+ "expected": "Scheduled",
135
+ "state": "retry"
136
+ }
137
+ ]
138
+ },
139
+ "batch_inference_scheduled": {
140
+ "delay": 120,
141
+ "maxAttempts": 75,
142
+ "operation": "GetModelInvocationJob",
143
+ "acceptors": [
144
+ {
145
+ "matcher": "path",
146
+ "argument": "status",
147
+ "expected": "Completed",
148
+ "state": "success"
149
+ },
150
+ {
151
+ "matcher": "path",
152
+ "argument": "status",
153
+ "expected": "Failed",
154
+ "state": "failure"
155
+ },
156
+ {
157
+ "matcher": "path",
158
+ "argument": "status",
159
+ "expected": "Stopped",
160
+ "state": "failure"
161
+ },
162
+ {
163
+ "matcher": "path",
164
+ "argument": "status",
165
+ "expected": "Stopping",
166
+ "state": "failure"
167
+ },
168
+ {
169
+ "matcher": "path",
170
+ "argument": "status",
171
+ "expected": "PartiallyCompleted",
172
+ "state": "failure"
173
+ },
174
+ {
175
+ "matcher": "path",
176
+ "argument": "status",
177
+ "expected": "Expired",
178
+ "state": "failure"
179
+ },
180
+ {
181
+ "matcher": "path",
182
+ "argument": "status",
183
+ "expected": "Submitted",
184
+ "state": "retry"
185
+ },
186
+ {
187
+ "matcher": "path",
188
+ "argument": "status",
189
+ "expected": "InProgress",
190
+ "state": "retry"
191
+ },
192
+ {
193
+ "matcher": "path",
194
+ "argument": "status",
195
+ "expected": "Validating",
196
+ "state": "retry"
197
+ },
198
+ {
199
+ "matcher": "path",
200
+ "argument": "status",
201
+ "expected": "Scheduled",
202
+ "state": "success"
203
+ }
204
+ ]
71
205
  }
72
206
  }
73
207
  }
@@ -26,79 +26,6 @@ def get_provider_info():
26
26
  "package-name": "apache-airflow-providers-amazon",
27
27
  "name": "Amazon",
28
28
  "description": "Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).\n",
29
- "state": "ready",
30
- "source-date-epoch": 1743477760,
31
- "versions": [
32
- "9.5.0",
33
- "9.4.0",
34
- "9.2.0",
35
- "9.1.0",
36
- "9.0.0",
37
- "8.29.0",
38
- "8.28.0",
39
- "8.27.0",
40
- "8.26.0",
41
- "8.25.0",
42
- "8.24.0",
43
- "8.23.0",
44
- "8.22.0",
45
- "8.21.0",
46
- "8.20.0",
47
- "8.19.0",
48
- "8.18.0",
49
- "8.17.0",
50
- "8.16.0",
51
- "8.15.0",
52
- "8.14.0",
53
- "8.13.0",
54
- "8.12.0",
55
- "8.11.0",
56
- "8.10.0",
57
- "8.9.0",
58
- "8.8.0",
59
- "8.7.1",
60
- "8.7.0",
61
- "8.6.0",
62
- "8.5.1",
63
- "8.5.0",
64
- "8.4.0",
65
- "8.3.1",
66
- "8.3.0",
67
- "8.2.0",
68
- "8.1.0",
69
- "8.0.0",
70
- "7.4.1",
71
- "7.4.0",
72
- "7.3.0",
73
- "7.2.1",
74
- "7.2.0",
75
- "7.1.0",
76
- "7.0.0",
77
- "6.2.0",
78
- "6.1.0",
79
- "6.0.0",
80
- "5.1.0",
81
- "5.0.0",
82
- "4.1.0",
83
- "4.0.0",
84
- "3.4.0",
85
- "3.3.0",
86
- "3.2.0",
87
- "3.1.1",
88
- "3.0.0",
89
- "2.6.0",
90
- "2.5.0",
91
- "2.4.0",
92
- "2.3.0",
93
- "2.2.0",
94
- "2.1.0",
95
- "2.0.0",
96
- "1.4.0",
97
- "1.3.0",
98
- "1.2.0",
99
- "1.1.0",
100
- "1.0.0",
101
- ],
102
29
  "integrations": [
103
30
  {
104
31
  "integration-name": "Amazon Athena",
@@ -1370,55 +1297,4 @@ def get_provider_info():
1370
1297
  },
1371
1298
  },
1372
1299
  "executors": ["airflow.providers.amazon.aws.executors.ecs.ecs_executor.AwsEcsExecutor"],
1373
- "dependencies": [
1374
- "apache-airflow>=2.9.0",
1375
- "apache-airflow-providers-common-compat>=1.6.0",
1376
- "apache-airflow-providers-common-sql>=1.20.0",
1377
- "apache-airflow-providers-http",
1378
- "boto3>=1.37.0",
1379
- "botocore>=1.34.90",
1380
- "inflection>=0.5.1",
1381
- "watchtower>=3.0.0,!=3.3.0,<4",
1382
- "jsonpath_ng>=1.5.3",
1383
- "redshift_connector>=2.0.918",
1384
- "asgiref>=2.3.0",
1385
- "PyAthena>=3.0.10",
1386
- "jmespath>=0.7.0",
1387
- "python3-saml>=1.16.0",
1388
- "xmlsec!=1.3.15,>=1.3.14",
1389
- "sagemaker-studio>=1.0.9",
1390
- ],
1391
- "optional-dependencies": {
1392
- "pandas": ["pandas>=2.1.2,<2.2"],
1393
- "aiobotocore": ["aiobotocore[boto3]>=2.13.0"],
1394
- "cncf.kubernetes": ["apache-airflow-providers-cncf-kubernetes>=7.2.0"],
1395
- "s3fs": ["s3fs>=2023.10.0"],
1396
- "python3-saml": ["python3-saml>=1.16.0"],
1397
- "apache.hive": ["apache-airflow-providers-apache-hive"],
1398
- "exasol": ["apache-airflow-providers-exasol"],
1399
- "fab": ["apache-airflow-providers-fab"],
1400
- "ftp": ["apache-airflow-providers-ftp"],
1401
- "google": ["apache-airflow-providers-google"],
1402
- "imap": ["apache-airflow-providers-imap"],
1403
- "microsoft.azure": ["apache-airflow-providers-microsoft-azure"],
1404
- "mongo": ["apache-airflow-providers-mongo"],
1405
- "openlineage": ["apache-airflow-providers-openlineage"],
1406
- "salesforce": ["apache-airflow-providers-salesforce"],
1407
- "ssh": ["apache-airflow-providers-ssh"],
1408
- "standard": ["apache-airflow-providers-standard"],
1409
- },
1410
- "devel-dependencies": [
1411
- "aiobotocore>=2.13.0",
1412
- "aws_xray_sdk>=2.12.0",
1413
- "moto[cloudformation,glue]>=5.1.2",
1414
- "mypy-boto3-appflow>=1.37.0",
1415
- "mypy-boto3-rds>=1.34.90",
1416
- "mypy-boto3-redshift-data>=1.34.0",
1417
- "mypy-boto3-s3>=1.34.90",
1418
- "s3fs>=2023.10.0",
1419
- "openapi-schema-validator>=0.6.2",
1420
- "openapi-spec-validator>=0.7.1",
1421
- "opensearch-py>=2.2.0",
1422
- "responses>=0.25.0",
1423
- ],
1424
1300
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 9.5.0rc3
3
+ Version: 9.6.0
4
4
  Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
5
5
  Keywords: airflow-provider,amazon,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,23 +20,23 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.9.0rc0
24
- Requires-Dist: apache-airflow-providers-common-compat>=1.6.0rc0
25
- Requires-Dist: apache-airflow-providers-common-sql>=1.20.0rc0
23
+ Requires-Dist: apache-airflow>=2.9.0
24
+ Requires-Dist: apache-airflow-providers-common-compat>=1.6.0
25
+ Requires-Dist: apache-airflow-providers-common-sql>=1.20.0
26
26
  Requires-Dist: apache-airflow-providers-http
27
27
  Requires-Dist: boto3>=1.37.0
28
- Requires-Dist: botocore>=1.34.90
28
+ Requires-Dist: botocore>=1.37.0
29
29
  Requires-Dist: inflection>=0.5.1
30
- Requires-Dist: watchtower>=3.0.0,!=3.3.0,<4
30
+ Requires-Dist: watchtower>=3.3.1,<4
31
31
  Requires-Dist: jsonpath_ng>=1.5.3
32
- Requires-Dist: redshift_connector>=2.0.918
32
+ Requires-Dist: redshift_connector>=2.1.3
33
33
  Requires-Dist: asgiref>=2.3.0
34
- Requires-Dist: PyAthena>=3.0.10
34
+ Requires-Dist: PyAthena>=3.10.0
35
35
  Requires-Dist: jmespath>=0.7.0
36
36
  Requires-Dist: python3-saml>=1.16.0
37
37
  Requires-Dist: xmlsec!=1.3.15,>=1.3.14
38
38
  Requires-Dist: sagemaker-studio>=1.0.9
39
- Requires-Dist: aiobotocore[boto3]>=2.13.0 ; extra == "aiobotocore"
39
+ Requires-Dist: aiobotocore[boto3]>=2.20.0 ; extra == "aiobotocore"
40
40
  Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache-hive"
41
41
  Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0 ; extra == "cncf-kubernetes"
42
42
  Requires-Dist: apache-airflow-providers-exasol ; extra == "exasol"
@@ -54,8 +54,8 @@ Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
54
54
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
55
55
  Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
56
56
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
57
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.5.0/changelog.html
58
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.5.0
57
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.6.0/changelog.html
58
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.6.0
59
59
  Project-URL: Mastodon, https://fosstodon.org/@airflow
60
60
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
61
61
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -103,7 +103,7 @@ Provides-Extra: standard
103
103
 
104
104
  Package ``apache-airflow-providers-amazon``
105
105
 
106
- Release: ``9.5.0``
106
+ Release: ``9.6.0``
107
107
 
108
108
 
109
109
  Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
@@ -116,7 +116,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
116
116
  are in ``airflow.providers.amazon`` python package.
117
117
 
118
118
  You can find package information and changelog for the provider
119
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.5.0/>`_.
119
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.6.0/>`_.
120
120
 
121
121
  Installation
122
122
  ------------
@@ -130,26 +130,26 @@ The package supports the following python versions: 3.9,3.10,3.11,3.12
130
130
  Requirements
131
131
  ------------
132
132
 
133
- ========================================== ======================
133
+ ========================================== =====================
134
134
  PIP package Version required
135
- ========================================== ======================
135
+ ========================================== =====================
136
136
  ``apache-airflow`` ``>=2.9.0``
137
137
  ``apache-airflow-providers-common-compat`` ``>=1.6.0``
138
138
  ``apache-airflow-providers-common-sql`` ``>=1.20.0``
139
139
  ``apache-airflow-providers-http``
140
140
  ``boto3`` ``>=1.37.0``
141
- ``botocore`` ``>=1.34.90``
141
+ ``botocore`` ``>=1.37.0``
142
142
  ``inflection`` ``>=0.5.1``
143
- ``watchtower`` ``>=3.0.0,!=3.3.0,<4``
143
+ ``watchtower`` ``>=3.3.1,<4``
144
144
  ``jsonpath_ng`` ``>=1.5.3``
145
- ``redshift_connector`` ``>=2.0.918``
145
+ ``redshift_connector`` ``>=2.1.3``
146
146
  ``asgiref`` ``>=2.3.0``
147
- ``PyAthena`` ``>=3.0.10``
147
+ ``PyAthena`` ``>=3.10.0``
148
148
  ``jmespath`` ``>=0.7.0``
149
149
  ``python3-saml`` ``>=1.16.0``
150
150
  ``xmlsec`` ``>=1.3.14,!=1.3.15``
151
151
  ``sagemaker-studio`` ``>=1.0.9``
152
- ========================================== ======================
152
+ ========================================== =====================
153
153
 
154
154
  Cross provider package dependencies
155
155
  -----------------------------------
@@ -184,5 +184,5 @@ Dependent package
184
184
  ====================================================================================================================== ===================
185
185
 
186
186
  The changelog for the provider package can be found in the
187
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.5.0/changelog.html>`_.
187
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.6.0/changelog.html>`_.
188
188