apache-airflow-providers-amazon 9.5.0rc2__py3-none-any.whl → 9.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. airflow/providers/amazon/__init__.py +1 -1
  2. airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +13 -15
  3. airflow/providers/amazon/aws/auth_manager/router/login.py +4 -2
  4. airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +53 -1
  5. airflow/providers/amazon/aws/hooks/athena_sql.py +2 -2
  6. airflow/providers/amazon/aws/hooks/batch_client.py +1 -2
  7. airflow/providers/amazon/aws/hooks/batch_waiters.py +11 -3
  8. airflow/providers/amazon/aws/hooks/dms.py +3 -1
  9. airflow/providers/amazon/aws/hooks/glue.py +17 -2
  10. airflow/providers/amazon/aws/hooks/mwaa.py +1 -1
  11. airflow/providers/amazon/aws/hooks/redshift_cluster.py +9 -9
  12. airflow/providers/amazon/aws/hooks/redshift_data.py +1 -2
  13. airflow/providers/amazon/aws/hooks/s3.py +0 -4
  14. airflow/providers/amazon/aws/hooks/sagemaker.py +1 -1
  15. airflow/providers/amazon/aws/links/athena.py +1 -2
  16. airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +174 -54
  17. airflow/providers/amazon/aws/log/s3_task_handler.py +123 -86
  18. airflow/providers/amazon/aws/operators/bedrock.py +119 -0
  19. airflow/providers/amazon/aws/operators/ec2.py +1 -1
  20. airflow/providers/amazon/aws/operators/eks.py +3 -3
  21. airflow/providers/amazon/aws/operators/rds.py +83 -18
  22. airflow/providers/amazon/aws/operators/redshift_cluster.py +10 -3
  23. airflow/providers/amazon/aws/operators/sagemaker.py +3 -5
  24. airflow/providers/amazon/aws/sensors/bedrock.py +110 -0
  25. airflow/providers/amazon/aws/sensors/glacier.py +1 -1
  26. airflow/providers/amazon/aws/sensors/mwaa.py +2 -1
  27. airflow/providers/amazon/aws/sensors/rds.py +23 -20
  28. airflow/providers/amazon/aws/sensors/s3.py +1 -1
  29. airflow/providers/amazon/aws/sensors/step_function.py +2 -1
  30. airflow/providers/amazon/aws/transfers/mongo_to_s3.py +2 -2
  31. airflow/providers/amazon/aws/transfers/sql_to_s3.py +1 -1
  32. airflow/providers/amazon/aws/triggers/bedrock.py +98 -0
  33. airflow/providers/amazon/aws/utils/waiter_with_logging.py +9 -1
  34. airflow/providers/amazon/aws/waiters/bedrock.json +134 -0
  35. airflow/providers/amazon/get_provider_info.py +0 -124
  36. {apache_airflow_providers_amazon-9.5.0rc2.dist-info → apache_airflow_providers_amazon-9.6.0.dist-info}/METADATA +21 -21
  37. {apache_airflow_providers_amazon-9.5.0rc2.dist-info → apache_airflow_providers_amazon-9.6.0.dist-info}/RECORD +39 -39
  38. {apache_airflow_providers_amazon-9.5.0rc2.dist-info → apache_airflow_providers_amazon-9.6.0.dist-info}/WHEEL +1 -1
  39. {apache_airflow_providers_amazon-9.5.0rc2.dist-info → apache_airflow_providers_amazon-9.6.0.dist-info}/entry_points.txt +0 -0
@@ -20,6 +20,7 @@ from typing import TYPE_CHECKING
20
20
 
21
21
  from airflow.providers.amazon.aws.hooks.bedrock import BedrockAgentHook, BedrockHook
22
22
  from airflow.providers.amazon.aws.triggers.base import AwsBaseWaiterTrigger
23
+ from airflow.utils.types import NOTSET, ArgNotSet
23
24
 
24
25
  if TYPE_CHECKING:
25
26
  from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
@@ -182,3 +183,100 @@ class BedrockIngestionJobTrigger(AwsBaseWaiterTrigger):
182
183
 
183
184
  def hook(self) -> AwsGenericHook:
184
185
  return BedrockAgentHook(aws_conn_id=self.aws_conn_id)
186
+
187
+
188
+ class BedrockBaseBatchInferenceTrigger(AwsBaseWaiterTrigger):
189
+ """
190
+ Trigger when a batch inference job is complete.
191
+
192
+ :param job_arn: The Amazon Resource Name (ARN) of the batch inference job.
193
+
194
+ :param waiter_delay: The amount of time in seconds to wait between attempts. (default: 120)
195
+ :param waiter_max_attempts: The maximum number of attempts to be made. (default: 75)
196
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
197
+ """
198
+
199
+ def __init__(
200
+ self,
201
+ *,
202
+ job_arn: str,
203
+ waiter_name: str | ArgNotSet = NOTSET, # This must be defined in the child class.
204
+ waiter_delay: int = 120,
205
+ waiter_max_attempts: int = 75,
206
+ aws_conn_id: str | None = None,
207
+ ) -> None:
208
+ if waiter_name == NOTSET:
209
+ raise NotImplementedError("Triggers must provide a waiter name.")
210
+
211
+ super().__init__(
212
+ serialized_fields={"job_arn": job_arn},
213
+ waiter_name=str(waiter_name), # Cast a string to a string to make mypy happy
214
+ waiter_args={"jobIdentifier": job_arn},
215
+ failure_message="Bedrock batch inference job failed.",
216
+ status_message="Status of Bedrock batch inference job is",
217
+ status_queries=["status"],
218
+ return_key="job_arn",
219
+ return_value=job_arn,
220
+ waiter_delay=waiter_delay,
221
+ waiter_max_attempts=waiter_max_attempts,
222
+ aws_conn_id=aws_conn_id,
223
+ )
224
+
225
+ def hook(self) -> AwsGenericHook:
226
+ return BedrockHook(aws_conn_id=self.aws_conn_id)
227
+
228
+
229
+ class BedrockBatchInferenceCompletedTrigger(BedrockBaseBatchInferenceTrigger):
230
+ """
231
+ Trigger when a batch inference job is complete.
232
+
233
+ :param job_arn: The Amazon Resource Name (ARN) of the batch inference job.
234
+
235
+ :param waiter_delay: The amount of time in seconds to wait between attempts. (default: 120)
236
+ :param waiter_max_attempts: The maximum number of attempts to be made. (default: 75)
237
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
238
+ """
239
+
240
+ def __init__(
241
+ self,
242
+ *,
243
+ job_arn: str,
244
+ waiter_delay: int = 120,
245
+ waiter_max_attempts: int = 75,
246
+ aws_conn_id: str | None = None,
247
+ ) -> None:
248
+ super().__init__(
249
+ waiter_name="batch_inference_complete",
250
+ job_arn=job_arn,
251
+ waiter_delay=waiter_delay,
252
+ waiter_max_attempts=waiter_max_attempts,
253
+ aws_conn_id=aws_conn_id,
254
+ )
255
+
256
+
257
+ class BedrockBatchInferenceScheduledTrigger(BedrockBaseBatchInferenceTrigger):
258
+ """
259
+ Trigger when a batch inference job is scheduled.
260
+
261
+ :param job_arn: The Amazon Resource Name (ARN) of the batch inference job.
262
+
263
+ :param waiter_delay: The amount of time in seconds to wait between attempts. (default: 120)
264
+ :param waiter_max_attempts: The maximum number of attempts to be made. (default: 75)
265
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
266
+ """
267
+
268
+ def __init__(
269
+ self,
270
+ *,
271
+ job_arn: str,
272
+ waiter_delay: int = 120,
273
+ waiter_max_attempts: int = 75,
274
+ aws_conn_id: str | None = None,
275
+ ) -> None:
276
+ super().__init__(
277
+ waiter_name="batch_inference_scheduled",
278
+ job_arn=job_arn,
279
+ waiter_delay=waiter_delay,
280
+ waiter_max_attempts=waiter_max_attempts,
281
+ aws_conn_id=aws_conn_id,
282
+ )
@@ -23,7 +23,7 @@ import time
23
23
  from typing import TYPE_CHECKING, Any
24
24
 
25
25
  import jmespath
26
- from botocore.exceptions import WaiterError
26
+ from botocore.exceptions import NoCredentialsError, WaiterError
27
27
 
28
28
  from airflow.exceptions import AirflowException
29
29
 
@@ -70,6 +70,10 @@ def wait(
70
70
  time.sleep(waiter_delay)
71
71
  try:
72
72
  waiter.wait(**args, WaiterConfig={"MaxAttempts": 1})
73
+
74
+ except NoCredentialsError as error:
75
+ log.info(str(error))
76
+
73
77
  except WaiterError as error:
74
78
  error_reason = str(error)
75
79
  last_response = error.last_response
@@ -131,6 +135,10 @@ async def async_wait(
131
135
  await asyncio.sleep(waiter_delay)
132
136
  try:
133
137
  await waiter.wait(**args, WaiterConfig={"MaxAttempts": 1})
138
+
139
+ except NoCredentialsError as error:
140
+ log.info(str(error))
141
+
134
142
  except WaiterError as error:
135
143
  error_reason = str(error)
136
144
  last_response = error.last_response
@@ -68,6 +68,140 @@
68
68
  "state": "failure"
69
69
  }
70
70
  ]
71
+ },
72
+ "batch_inference_complete": {
73
+ "delay": 120,
74
+ "maxAttempts": 75,
75
+ "operation": "GetModelInvocationJob",
76
+ "acceptors": [
77
+ {
78
+ "matcher": "path",
79
+ "argument": "status",
80
+ "expected": "Completed",
81
+ "state": "success"
82
+ },
83
+ {
84
+ "matcher": "path",
85
+ "argument": "status",
86
+ "expected": "Failed",
87
+ "state": "failure"
88
+ },
89
+ {
90
+ "matcher": "path",
91
+ "argument": "status",
92
+ "expected": "Stopped",
93
+ "state": "failure"
94
+ },
95
+ {
96
+ "matcher": "path",
97
+ "argument": "status",
98
+ "expected": "PartiallyCompleted",
99
+ "state": "failure"
100
+ },
101
+ {
102
+ "matcher": "path",
103
+ "argument": "status",
104
+ "expected": "Expired",
105
+ "state": "failure"
106
+ },
107
+ {
108
+ "matcher": "path",
109
+ "argument": "status",
110
+ "expected": "Stopping",
111
+ "state": "failure"
112
+ },
113
+ {
114
+ "matcher": "path",
115
+ "argument": "status",
116
+ "expected": "Submitted",
117
+ "state": "retry"
118
+ },
119
+ {
120
+ "matcher": "path",
121
+ "argument": "status",
122
+ "expected": "InProgress",
123
+ "state": "retry"
124
+ },
125
+ {
126
+ "matcher": "path",
127
+ "argument": "status",
128
+ "expected": "Validating",
129
+ "state": "retry"
130
+ },
131
+ {
132
+ "matcher": "path",
133
+ "argument": "status",
134
+ "expected": "Scheduled",
135
+ "state": "retry"
136
+ }
137
+ ]
138
+ },
139
+ "batch_inference_scheduled": {
140
+ "delay": 120,
141
+ "maxAttempts": 75,
142
+ "operation": "GetModelInvocationJob",
143
+ "acceptors": [
144
+ {
145
+ "matcher": "path",
146
+ "argument": "status",
147
+ "expected": "Completed",
148
+ "state": "success"
149
+ },
150
+ {
151
+ "matcher": "path",
152
+ "argument": "status",
153
+ "expected": "Failed",
154
+ "state": "failure"
155
+ },
156
+ {
157
+ "matcher": "path",
158
+ "argument": "status",
159
+ "expected": "Stopped",
160
+ "state": "failure"
161
+ },
162
+ {
163
+ "matcher": "path",
164
+ "argument": "status",
165
+ "expected": "Stopping",
166
+ "state": "failure"
167
+ },
168
+ {
169
+ "matcher": "path",
170
+ "argument": "status",
171
+ "expected": "PartiallyCompleted",
172
+ "state": "failure"
173
+ },
174
+ {
175
+ "matcher": "path",
176
+ "argument": "status",
177
+ "expected": "Expired",
178
+ "state": "failure"
179
+ },
180
+ {
181
+ "matcher": "path",
182
+ "argument": "status",
183
+ "expected": "Submitted",
184
+ "state": "retry"
185
+ },
186
+ {
187
+ "matcher": "path",
188
+ "argument": "status",
189
+ "expected": "InProgress",
190
+ "state": "retry"
191
+ },
192
+ {
193
+ "matcher": "path",
194
+ "argument": "status",
195
+ "expected": "Validating",
196
+ "state": "retry"
197
+ },
198
+ {
199
+ "matcher": "path",
200
+ "argument": "status",
201
+ "expected": "Scheduled",
202
+ "state": "success"
203
+ }
204
+ ]
71
205
  }
72
206
  }
73
207
  }
@@ -26,79 +26,6 @@ def get_provider_info():
26
26
  "package-name": "apache-airflow-providers-amazon",
27
27
  "name": "Amazon",
28
28
  "description": "Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).\n",
29
- "state": "ready",
30
- "source-date-epoch": 1741507721,
31
- "versions": [
32
- "9.5.0",
33
- "9.4.0",
34
- "9.2.0",
35
- "9.1.0",
36
- "9.0.0",
37
- "8.29.0",
38
- "8.28.0",
39
- "8.27.0",
40
- "8.26.0",
41
- "8.25.0",
42
- "8.24.0",
43
- "8.23.0",
44
- "8.22.0",
45
- "8.21.0",
46
- "8.20.0",
47
- "8.19.0",
48
- "8.18.0",
49
- "8.17.0",
50
- "8.16.0",
51
- "8.15.0",
52
- "8.14.0",
53
- "8.13.0",
54
- "8.12.0",
55
- "8.11.0",
56
- "8.10.0",
57
- "8.9.0",
58
- "8.8.0",
59
- "8.7.1",
60
- "8.7.0",
61
- "8.6.0",
62
- "8.5.1",
63
- "8.5.0",
64
- "8.4.0",
65
- "8.3.1",
66
- "8.3.0",
67
- "8.2.0",
68
- "8.1.0",
69
- "8.0.0",
70
- "7.4.1",
71
- "7.4.0",
72
- "7.3.0",
73
- "7.2.1",
74
- "7.2.0",
75
- "7.1.0",
76
- "7.0.0",
77
- "6.2.0",
78
- "6.1.0",
79
- "6.0.0",
80
- "5.1.0",
81
- "5.0.0",
82
- "4.1.0",
83
- "4.0.0",
84
- "3.4.0",
85
- "3.3.0",
86
- "3.2.0",
87
- "3.1.1",
88
- "3.0.0",
89
- "2.6.0",
90
- "2.5.0",
91
- "2.4.0",
92
- "2.3.0",
93
- "2.2.0",
94
- "2.1.0",
95
- "2.0.0",
96
- "1.4.0",
97
- "1.3.0",
98
- "1.2.0",
99
- "1.1.0",
100
- "1.0.0",
101
- ],
102
29
  "integrations": [
103
30
  {
104
31
  "integration-name": "Amazon Athena",
@@ -1370,55 +1297,4 @@ def get_provider_info():
1370
1297
  },
1371
1298
  },
1372
1299
  "executors": ["airflow.providers.amazon.aws.executors.ecs.ecs_executor.AwsEcsExecutor"],
1373
- "dependencies": [
1374
- "apache-airflow>=2.9.0",
1375
- "apache-airflow-providers-common-compat>=1.6.0",
1376
- "apache-airflow-providers-common-sql>=1.20.0",
1377
- "apache-airflow-providers-http",
1378
- "boto3>=1.37.0",
1379
- "botocore>=1.34.90",
1380
- "inflection>=0.5.1",
1381
- "watchtower>=3.0.0,!=3.3.0,<4",
1382
- "jsonpath_ng>=1.5.3",
1383
- "redshift_connector>=2.0.918",
1384
- "asgiref>=2.3.0",
1385
- "PyAthena>=3.0.10",
1386
- "jmespath>=0.7.0",
1387
- "python3-saml>=1.16.0",
1388
- "xmlsec!=1.3.15,>=1.3.14",
1389
- "sagemaker-studio>=1.0.9",
1390
- ],
1391
- "optional-dependencies": {
1392
- "pandas": ["pandas>=2.1.2,<2.2"],
1393
- "aiobotocore": ["aiobotocore[boto3]>=2.13.0"],
1394
- "cncf.kubernetes": ["apache-airflow-providers-cncf-kubernetes>=7.2.0"],
1395
- "s3fs": ["s3fs>=2023.10.0"],
1396
- "python3-saml": ["python3-saml>=1.16.0"],
1397
- "apache.hive": ["apache-airflow-providers-apache-hive"],
1398
- "exasol": ["apache-airflow-providers-exasol"],
1399
- "fab": ["apache-airflow-providers-fab"],
1400
- "ftp": ["apache-airflow-providers-ftp"],
1401
- "google": ["apache-airflow-providers-google"],
1402
- "imap": ["apache-airflow-providers-imap"],
1403
- "microsoft.azure": ["apache-airflow-providers-microsoft-azure"],
1404
- "mongo": ["apache-airflow-providers-mongo"],
1405
- "openlineage": ["apache-airflow-providers-openlineage"],
1406
- "salesforce": ["apache-airflow-providers-salesforce"],
1407
- "ssh": ["apache-airflow-providers-ssh"],
1408
- "standard": ["apache-airflow-providers-standard"],
1409
- },
1410
- "devel-dependencies": [
1411
- "aiobotocore>=2.13.0",
1412
- "aws_xray_sdk>=2.12.0",
1413
- "moto[cloudformation,glue]>=5.0.0",
1414
- "mypy-boto3-appflow>=1.37.0",
1415
- "mypy-boto3-rds>=1.34.90",
1416
- "mypy-boto3-redshift-data>=1.34.0",
1417
- "mypy-boto3-s3>=1.34.90",
1418
- "s3fs>=2023.10.0",
1419
- "openapi-schema-validator>=0.6.2",
1420
- "openapi-spec-validator>=0.7.1",
1421
- "opensearch-py>=2.2.0",
1422
- "responses>=0.25.0",
1423
- ],
1424
1300
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 9.5.0rc2
3
+ Version: 9.6.0
4
4
  Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
5
5
  Keywords: airflow-provider,amazon,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,23 +20,23 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.9.0rc0
24
- Requires-Dist: apache-airflow-providers-common-compat>=1.6.0rc0
25
- Requires-Dist: apache-airflow-providers-common-sql>=1.20.0rc0
23
+ Requires-Dist: apache-airflow>=2.9.0
24
+ Requires-Dist: apache-airflow-providers-common-compat>=1.6.0
25
+ Requires-Dist: apache-airflow-providers-common-sql>=1.20.0
26
26
  Requires-Dist: apache-airflow-providers-http
27
27
  Requires-Dist: boto3>=1.37.0
28
- Requires-Dist: botocore>=1.34.90
28
+ Requires-Dist: botocore>=1.37.0
29
29
  Requires-Dist: inflection>=0.5.1
30
- Requires-Dist: watchtower>=3.0.0,!=3.3.0,<4
30
+ Requires-Dist: watchtower>=3.3.1,<4
31
31
  Requires-Dist: jsonpath_ng>=1.5.3
32
- Requires-Dist: redshift_connector>=2.0.918
32
+ Requires-Dist: redshift_connector>=2.1.3
33
33
  Requires-Dist: asgiref>=2.3.0
34
- Requires-Dist: PyAthena>=3.0.10
34
+ Requires-Dist: PyAthena>=3.10.0
35
35
  Requires-Dist: jmespath>=0.7.0
36
36
  Requires-Dist: python3-saml>=1.16.0
37
37
  Requires-Dist: xmlsec!=1.3.15,>=1.3.14
38
38
  Requires-Dist: sagemaker-studio>=1.0.9
39
- Requires-Dist: aiobotocore[boto3]>=2.13.0 ; extra == "aiobotocore"
39
+ Requires-Dist: aiobotocore[boto3]>=2.20.0 ; extra == "aiobotocore"
40
40
  Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache-hive"
41
41
  Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0 ; extra == "cncf-kubernetes"
42
42
  Requires-Dist: apache-airflow-providers-exasol ; extra == "exasol"
@@ -54,8 +54,8 @@ Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
54
54
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
55
55
  Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
56
56
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
57
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.5.0/changelog.html
58
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.5.0
57
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.6.0/changelog.html
58
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.6.0
59
59
  Project-URL: Mastodon, https://fosstodon.org/@airflow
60
60
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
61
61
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -103,7 +103,7 @@ Provides-Extra: standard
103
103
 
104
104
  Package ``apache-airflow-providers-amazon``
105
105
 
106
- Release: ``9.5.0``
106
+ Release: ``9.6.0``
107
107
 
108
108
 
109
109
  Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
@@ -116,7 +116,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
116
116
  are in ``airflow.providers.amazon`` python package.
117
117
 
118
118
  You can find package information and changelog for the provider
119
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.5.0/>`_.
119
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.6.0/>`_.
120
120
 
121
121
  Installation
122
122
  ------------
@@ -130,26 +130,26 @@ The package supports the following python versions: 3.9,3.10,3.11,3.12
130
130
  Requirements
131
131
  ------------
132
132
 
133
- ========================================== ======================
133
+ ========================================== =====================
134
134
  PIP package Version required
135
- ========================================== ======================
135
+ ========================================== =====================
136
136
  ``apache-airflow`` ``>=2.9.0``
137
137
  ``apache-airflow-providers-common-compat`` ``>=1.6.0``
138
138
  ``apache-airflow-providers-common-sql`` ``>=1.20.0``
139
139
  ``apache-airflow-providers-http``
140
140
  ``boto3`` ``>=1.37.0``
141
- ``botocore`` ``>=1.34.90``
141
+ ``botocore`` ``>=1.37.0``
142
142
  ``inflection`` ``>=0.5.1``
143
- ``watchtower`` ``>=3.0.0,!=3.3.0,<4``
143
+ ``watchtower`` ``>=3.3.1,<4``
144
144
  ``jsonpath_ng`` ``>=1.5.3``
145
- ``redshift_connector`` ``>=2.0.918``
145
+ ``redshift_connector`` ``>=2.1.3``
146
146
  ``asgiref`` ``>=2.3.0``
147
- ``PyAthena`` ``>=3.0.10``
147
+ ``PyAthena`` ``>=3.10.0``
148
148
  ``jmespath`` ``>=0.7.0``
149
149
  ``python3-saml`` ``>=1.16.0``
150
150
  ``xmlsec`` ``>=1.3.14,!=1.3.15``
151
151
  ``sagemaker-studio`` ``>=1.0.9``
152
- ========================================== ======================
152
+ ========================================== =====================
153
153
 
154
154
  Cross provider package dependencies
155
155
  -----------------------------------
@@ -184,5 +184,5 @@ Dependent package
184
184
  ====================================================================================================================== ===================
185
185
 
186
186
  The changelog for the provider package can be found in the
187
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.5.0/changelog.html>`_.
187
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.6.0/changelog.html>`_.
188
188