apache-airflow-providers-amazon 9.4.0__py3-none-any.whl → 9.5.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. airflow/providers/amazon/__init__.py +1 -1
  2. airflow/providers/amazon/aws/auth_manager/avp/entities.py +1 -1
  3. airflow/providers/amazon/aws/auth_manager/avp/facade.py +1 -1
  4. airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +21 -100
  5. airflow/providers/amazon/aws/auth_manager/router/login.py +3 -2
  6. airflow/providers/amazon/aws/auth_manager/user.py +7 -4
  7. airflow/providers/amazon/aws/hooks/base_aws.py +25 -0
  8. airflow/providers/amazon/aws/hooks/ec2.py +1 -1
  9. airflow/providers/amazon/aws/hooks/glue.py +6 -2
  10. airflow/providers/amazon/aws/hooks/logs.py +2 -2
  11. airflow/providers/amazon/aws/hooks/mwaa.py +79 -15
  12. airflow/providers/amazon/aws/hooks/redshift_cluster.py +1 -1
  13. airflow/providers/amazon/aws/hooks/redshift_data.py +2 -2
  14. airflow/providers/amazon/aws/hooks/sagemaker.py +1 -1
  15. airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py +188 -0
  16. airflow/providers/amazon/aws/links/base_aws.py +7 -1
  17. airflow/providers/amazon/aws/links/sagemaker_unified_studio.py +27 -0
  18. airflow/providers/amazon/aws/log/s3_task_handler.py +22 -7
  19. airflow/providers/amazon/aws/operators/s3.py +147 -157
  20. airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py +155 -0
  21. airflow/providers/amazon/aws/sensors/emr.py +1 -1
  22. airflow/providers/amazon/aws/sensors/mwaa.py +113 -0
  23. airflow/providers/amazon/aws/sensors/rds.py +10 -5
  24. airflow/providers/amazon/aws/sensors/s3.py +31 -42
  25. airflow/providers/amazon/aws/sensors/sagemaker_unified_studio.py +73 -0
  26. airflow/providers/amazon/aws/triggers/README.md +4 -4
  27. airflow/providers/amazon/aws/triggers/base.py +1 -1
  28. airflow/providers/amazon/aws/triggers/ecs.py +6 -2
  29. airflow/providers/amazon/aws/triggers/eks.py +2 -2
  30. airflow/providers/amazon/aws/triggers/glue.py +1 -1
  31. airflow/providers/amazon/aws/triggers/s3.py +31 -6
  32. airflow/providers/amazon/aws/triggers/sagemaker.py +2 -2
  33. airflow/providers/amazon/aws/triggers/sagemaker_unified_studio.py +66 -0
  34. airflow/providers/amazon/aws/triggers/sqs.py +11 -3
  35. airflow/providers/amazon/aws/{auth_manager/security_manager/__init__.py → utils/sagemaker_unified_studio.py} +12 -0
  36. airflow/providers/amazon/get_provider_info.py +36 -1
  37. {apache_airflow_providers_amazon-9.4.0.dist-info → apache_airflow_providers_amazon-9.5.0rc1.dist-info}/METADATA +33 -28
  38. {apache_airflow_providers_amazon-9.4.0.dist-info → apache_airflow_providers_amazon-9.5.0rc1.dist-info}/RECORD +40 -35
  39. {apache_airflow_providers_amazon-9.4.0.dist-info → apache_airflow_providers_amazon-9.5.0rc1.dist-info}/WHEEL +1 -1
  40. airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py +0 -40
  41. {apache_airflow_providers_amazon-9.4.0.dist-info → apache_airflow_providers_amazon-9.5.0rc1.dist-info}/entry_points.txt +0 -0
@@ -53,6 +53,9 @@ class S3KeyTrigger(BaseTrigger):
53
53
  poke_interval: float = 5.0,
54
54
  should_check_fn: bool = False,
55
55
  use_regex: bool = False,
56
+ region_name: str | None = None,
57
+ verify: bool | str | None = None,
58
+ botocore_config: dict | None = None,
56
59
  **hook_params: Any,
57
60
  ):
58
61
  super().__init__()
@@ -64,6 +67,9 @@ class S3KeyTrigger(BaseTrigger):
64
67
  self.poke_interval = poke_interval
65
68
  self.should_check_fn = should_check_fn
66
69
  self.use_regex = use_regex
70
+ self.region_name = region_name
71
+ self.verify = verify
72
+ self.botocore_config = botocore_config
67
73
 
68
74
  def serialize(self) -> tuple[str, dict[str, Any]]:
69
75
  """Serialize S3KeyTrigger arguments and classpath."""
@@ -78,17 +84,25 @@ class S3KeyTrigger(BaseTrigger):
78
84
  "poke_interval": self.poke_interval,
79
85
  "should_check_fn": self.should_check_fn,
80
86
  "use_regex": self.use_regex,
87
+ "region_name": self.region_name,
88
+ "verify": self.verify,
89
+ "botocore_config": self.botocore_config,
81
90
  },
82
91
  )
83
92
 
84
93
  @cached_property
85
94
  def hook(self) -> S3Hook:
86
- return S3Hook(aws_conn_id=self.aws_conn_id, verify=self.hook_params.get("verify"))
95
+ return S3Hook(
96
+ aws_conn_id=self.aws_conn_id,
97
+ region_name=self.region_name,
98
+ verify=self.verify,
99
+ config=self.botocore_config,
100
+ )
87
101
 
88
102
  async def run(self) -> AsyncIterator[TriggerEvent]:
89
103
  """Make an asynchronous connection using S3HookAsync."""
90
104
  try:
91
- async with self.hook.async_conn as client:
105
+ async with await self.hook.get_async_conn() as client:
92
106
  while True:
93
107
  if await self.hook.check_key_async(
94
108
  client, self.bucket_name, self.bucket_key, self.wildcard_match, self.use_regex
@@ -143,7 +157,9 @@ class S3KeysUnchangedTrigger(BaseTrigger):
143
157
  allow_delete: bool = True,
144
158
  aws_conn_id: str | None = "aws_default",
145
159
  last_activity_time: datetime | None = None,
160
+ region_name: str | None = None,
146
161
  verify: bool | str | None = None,
162
+ botocore_config: dict | None = None,
147
163
  **hook_params: Any,
148
164
  ):
149
165
  super().__init__()
@@ -160,8 +176,10 @@ class S3KeysUnchangedTrigger(BaseTrigger):
160
176
  self.allow_delete = allow_delete
161
177
  self.aws_conn_id = aws_conn_id
162
178
  self.last_activity_time = last_activity_time
163
- self.verify = verify
164
179
  self.polling_period_seconds = 0
180
+ self.region_name = region_name
181
+ self.verify = verify
182
+ self.botocore_config = botocore_config
165
183
  self.hook_params = hook_params
166
184
 
167
185
  def serialize(self) -> tuple[str, dict[str, Any]]:
@@ -179,19 +197,26 @@ class S3KeysUnchangedTrigger(BaseTrigger):
179
197
  "aws_conn_id": self.aws_conn_id,
180
198
  "last_activity_time": self.last_activity_time,
181
199
  "hook_params": self.hook_params,
182
- "verify": self.verify,
183
200
  "polling_period_seconds": self.polling_period_seconds,
201
+ "region_name": self.region_name,
202
+ "verify": self.verify,
203
+ "botocore_config": self.botocore_config,
184
204
  },
185
205
  )
186
206
 
187
207
  @cached_property
188
208
  def hook(self) -> S3Hook:
189
- return S3Hook(aws_conn_id=self.aws_conn_id, verify=self.hook_params.get("verify"))
209
+ return S3Hook(
210
+ aws_conn_id=self.aws_conn_id,
211
+ region_name=self.region_name,
212
+ verify=self.verify,
213
+ config=self.botocore_config,
214
+ )
190
215
 
191
216
  async def run(self) -> AsyncIterator[TriggerEvent]:
192
217
  """Make an asynchronous connection using S3Hook."""
193
218
  try:
194
- async with self.hook.async_conn as client:
219
+ async with await self.hook.get_async_conn() as client:
195
220
  while True:
196
221
  result = await self.hook.is_keys_unchanged_async(
197
222
  client=client,
@@ -108,7 +108,7 @@ class SageMakerTrigger(BaseTrigger):
108
108
 
109
109
  async def run(self):
110
110
  self.log.info("job name is %s and job type is %s", self.job_name, self.job_type)
111
- async with self.hook.async_conn as client:
111
+ async with await self.hook.get_async_conn() as client:
112
112
  waiter = self.hook.get_waiter(
113
113
  self._get_job_type_waiter(self.job_type), deferrable=True, client=client
114
114
  )
@@ -166,7 +166,7 @@ class SageMakerPipelineTrigger(BaseTrigger):
166
166
 
167
167
  async def run(self) -> AsyncIterator[TriggerEvent]:
168
168
  hook = SageMakerHook(aws_conn_id=self.aws_conn_id)
169
- async with hook.async_conn as conn:
169
+ async with await hook.get_async_conn() as conn:
170
170
  waiter = hook.get_waiter(self._waiter_name[self.waiter_type], deferrable=True, client=conn)
171
171
  for _ in range(self.waiter_max_attempts):
172
172
  try:
@@ -0,0 +1,66 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ """This module contains the Amazon SageMaker Unified Studio Notebook job trigger."""
19
+
20
+ from __future__ import annotations
21
+
22
+ from airflow.triggers.base import BaseTrigger
23
+
24
+
25
+ class SageMakerNotebookJobTrigger(BaseTrigger):
26
+ """
27
+ Watches for a notebook job, triggers when it finishes.
28
+
29
+ Examples:
30
+ .. code-block:: python
31
+
32
+ from airflow.providers.amazon.aws.triggers.sagemaker_unified_studio import SageMakerNotebookJobTrigger
33
+
34
+ notebook_trigger = SageMakerNotebookJobTrigger(
35
+ execution_id="notebook_job_1234",
36
+ execution_name="notebook_task",
37
+ waiter_delay=10,
38
+ waiter_max_attempts=1440,
39
+ )
40
+
41
+ :param execution_id: A unique, meaningful id for the task.
42
+ :param execution_name: A unique, meaningful name for the task.
43
+ :param waiter_delay: Interval in seconds to check the notebook execution status.
44
+ :param waiter_max_attempts: Number of attempts to wait before returning FAILED.
45
+ """
46
+
47
+ def __init__(self, execution_id, execution_name, waiter_delay, waiter_max_attempts, **kwargs):
48
+ super().__init__(**kwargs)
49
+ self.execution_id = execution_id
50
+ self.execution_name = execution_name
51
+ self.waiter_delay = waiter_delay
52
+ self.waiter_max_attempts = waiter_max_attempts
53
+
54
+ def serialize(self):
55
+ return (
56
+ # dynamically generate the fully qualified name of the class
57
+ self.__class__.__module__ + "." + self.__class__.__qualname__,
58
+ {
59
+ "execution_id": self.execution_id,
60
+ "execution_name": self.execution_name,
61
+ "poll_interval": self.poll_interval,
62
+ },
63
+ )
64
+
65
+ async def run(self):
66
+ pass
@@ -23,14 +23,22 @@ from typing import TYPE_CHECKING, Any
23
23
  from airflow.exceptions import AirflowException
24
24
  from airflow.providers.amazon.aws.hooks.sqs import SqsHook
25
25
  from airflow.providers.amazon.aws.utils.sqs import process_response
26
- from airflow.triggers.base import BaseTrigger, TriggerEvent
26
+ from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
27
+
28
+ if AIRFLOW_V_3_0_PLUS:
29
+ from airflow.triggers.base import BaseEventTrigger, TriggerEvent
30
+ else:
31
+ from airflow.triggers.base import ( # type: ignore
32
+ BaseTrigger as BaseEventTrigger,
33
+ TriggerEvent,
34
+ )
27
35
 
28
36
  if TYPE_CHECKING:
29
37
  from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection
30
38
  from airflow.providers.amazon.aws.utils.sqs import MessageFilteringType
31
39
 
32
40
 
33
- class SqsSensorTrigger(BaseTrigger):
41
+ class SqsSensorTrigger(BaseEventTrigger):
34
42
  """
35
43
  Asynchronously get messages from an Amazon SQS queue and then delete the messages from the queue.
36
44
 
@@ -176,7 +184,7 @@ class SqsSensorTrigger(BaseTrigger):
176
184
  while True:
177
185
  # This loop will run indefinitely until the timeout, which is set in the self.defer
178
186
  # method, is reached.
179
- async with self.hook.async_conn as client:
187
+ async with await self.hook.get_async_conn() as client:
180
188
  result = await self.poke(client=client)
181
189
  if result:
182
190
  yield TriggerEvent({"status": "success", "message_batch": result})
@@ -14,3 +14,15 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
+
18
+ """This module contains utils for the Amazon SageMaker Unified Studio Notebook plugin."""
19
+
20
+ from __future__ import annotations
21
+
22
+ import os
23
+
24
+ workflows_env_key = "WORKFLOWS_ENV"
25
+
26
+
27
+ def is_local_runner():
28
+ return os.getenv(workflows_env_key, "") == "Local"
@@ -27,8 +27,9 @@ def get_provider_info():
27
27
  "name": "Amazon",
28
28
  "description": "Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).\n",
29
29
  "state": "ready",
30
- "source-date-epoch": 1739958314,
30
+ "source-date-epoch": 1741507721,
31
31
  "versions": [
32
+ "9.5.0",
32
33
  "9.4.0",
33
34
  "9.2.0",
34
35
  "9.1.0",
@@ -275,6 +276,15 @@ def get_provider_info():
275
276
  "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/sagemaker.rst"],
276
277
  "tags": ["aws"],
277
278
  },
279
+ {
280
+ "integration-name": "Amazon SageMaker Unified Studio",
281
+ "external-doc-url": "https://aws.amazon.com/sagemaker/unified-studio/",
282
+ "logo": "/docs/integration-logos/Amazon-SageMaker_light-bg@4x.png",
283
+ "how-to-guide": [
284
+ "/docs/apache-airflow-providers-amazon/operators/sagemakerunifiedstudio.rst"
285
+ ],
286
+ "tags": ["aws"],
287
+ },
278
288
  {
279
289
  "integration-name": "Amazon SecretsManager",
280
290
  "external-doc-url": "https://aws.amazon.com/secrets-manager/",
@@ -491,6 +501,10 @@ def get_provider_info():
491
501
  "integration-name": "Amazon SageMaker",
492
502
  "python-modules": ["airflow.providers.amazon.aws.operators.sagemaker"],
493
503
  },
504
+ {
505
+ "integration-name": "Amazon SageMaker Unified Studio",
506
+ "python-modules": ["airflow.providers.amazon.aws.operators.sagemaker_unified_studio"],
507
+ },
494
508
  {
495
509
  "integration-name": "Amazon Simple Notification Service (SNS)",
496
510
  "python-modules": ["airflow.providers.amazon.aws.operators.sns"],
@@ -604,6 +618,10 @@ def get_provider_info():
604
618
  "integration-name": "Amazon Managed Service for Apache Flink",
605
619
  "python-modules": ["airflow.providers.amazon.aws.sensors.kinesis_analytics"],
606
620
  },
621
+ {
622
+ "integration-name": "Amazon Managed Workflows for Apache Airflow (MWAA)",
623
+ "python-modules": ["airflow.providers.amazon.aws.sensors.mwaa"],
624
+ },
607
625
  {
608
626
  "integration-name": "Amazon OpenSearch Serverless",
609
627
  "python-modules": ["airflow.providers.amazon.aws.sensors.opensearch_serverless"],
@@ -624,6 +642,10 @@ def get_provider_info():
624
642
  "integration-name": "Amazon SageMaker",
625
643
  "python-modules": ["airflow.providers.amazon.aws.sensors.sagemaker"],
626
644
  },
645
+ {
646
+ "integration-name": "Amazon SageMaker Unified Studio",
647
+ "python-modules": ["airflow.providers.amazon.aws.sensors.sagemaker_unified_studio"],
648
+ },
627
649
  {
628
650
  "integration-name": "Amazon Simple Queue Service (SQS)",
629
651
  "python-modules": ["airflow.providers.amazon.aws.sensors.sqs"],
@@ -777,6 +799,10 @@ def get_provider_info():
777
799
  "integration-name": "Amazon SageMaker",
778
800
  "python-modules": ["airflow.providers.amazon.aws.hooks.sagemaker"],
779
801
  },
802
+ {
803
+ "integration-name": "Amazon SageMaker Unified Studio",
804
+ "python-modules": ["airflow.providers.amazon.aws.hooks.sagemaker_unified_studio"],
805
+ },
780
806
  {
781
807
  "integration-name": "Amazon Simple Email Service (SES)",
782
808
  "python-modules": ["airflow.providers.amazon.aws.hooks.ses"],
@@ -874,6 +900,10 @@ def get_provider_info():
874
900
  "integration-name": "Amazon SageMaker",
875
901
  "python-modules": ["airflow.providers.amazon.aws.triggers.sagemaker"],
876
902
  },
903
+ {
904
+ "integration-name": "Amazon SageMaker Unified Studio",
905
+ "python-modules": ["airflow.providers.amazon.aws.triggers.sagemaker_unified_studio"],
906
+ },
877
907
  {
878
908
  "integration-name": "AWS Glue",
879
909
  "python-modules": [
@@ -1068,6 +1098,7 @@ def get_provider_info():
1068
1098
  "airflow.providers.amazon.aws.links.glue.GlueJobRunDetailsLink",
1069
1099
  "airflow.providers.amazon.aws.links.logs.CloudWatchEventsLink",
1070
1100
  "airflow.providers.amazon.aws.links.sagemaker.SageMakerTransformJobLink",
1101
+ "airflow.providers.amazon.aws.links.sagemaker_unified_studio.SageMakerUnifiedStudioLink",
1071
1102
  "airflow.providers.amazon.aws.links.step_function.StateMachineDetailsLink",
1072
1103
  "airflow.providers.amazon.aws.links.step_function.StateMachineExecutionsDetailsLink",
1073
1104
  "airflow.providers.amazon.aws.links.comprehend.ComprehendPiiEntitiesDetectionLink",
@@ -1350,6 +1381,7 @@ def get_provider_info():
1350
1381
  "PyAthena>=3.0.10",
1351
1382
  "jmespath>=0.7.0",
1352
1383
  "python3-saml>=1.16.0",
1384
+ "sagemaker-studio>=1.0.9",
1353
1385
  ],
1354
1386
  "optional-dependencies": {
1355
1387
  "pandas": ["pandas>=2.1.2,<2.2"],
@@ -1359,6 +1391,7 @@ def get_provider_info():
1359
1391
  "python3-saml": ["python3-saml>=1.16.0"],
1360
1392
  "apache.hive": ["apache-airflow-providers-apache-hive"],
1361
1393
  "exasol": ["apache-airflow-providers-exasol"],
1394
+ "fab": ["apache-airflow-providers-fab"],
1362
1395
  "ftp": ["apache-airflow-providers-ftp"],
1363
1396
  "google": ["apache-airflow-providers-google"],
1364
1397
  "imap": ["apache-airflow-providers-imap"],
@@ -1367,6 +1400,7 @@ def get_provider_info():
1367
1400
  "openlineage": ["apache-airflow-providers-openlineage"],
1368
1401
  "salesforce": ["apache-airflow-providers-salesforce"],
1369
1402
  "ssh": ["apache-airflow-providers-ssh"],
1403
+ "standard": ["apache-airflow-providers-standard"],
1370
1404
  },
1371
1405
  "devel-dependencies": [
1372
1406
  "aiobotocore>=2.13.0",
@@ -1379,5 +1413,6 @@ def get_provider_info():
1379
1413
  "s3fs>=2023.10.0",
1380
1414
  "openapi-schema-validator>=0.6.2",
1381
1415
  "openapi-spec-validator>=0.7.1",
1416
+ "opensearch-py>=2.2.0",
1382
1417
  ],
1383
1418
  }
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 9.4.0
3
+ Version: 9.5.0rc1
4
4
  Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
5
5
  Keywords: airflow-provider,amazon,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,9 +20,9 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.9.0
24
- Requires-Dist: apache-airflow-providers-common-compat>=1.3.0
25
- Requires-Dist: apache-airflow-providers-common-sql>=1.20.0
23
+ Requires-Dist: apache-airflow>=2.9.0rc0
24
+ Requires-Dist: apache-airflow-providers-common-compat>=1.3.0rc0
25
+ Requires-Dist: apache-airflow-providers-common-sql>=1.20.0rc0
26
26
  Requires-Dist: apache-airflow-providers-http
27
27
  Requires-Dist: boto3>=1.34.90
28
28
  Requires-Dist: botocore>=1.34.90
@@ -34,10 +34,12 @@ Requires-Dist: asgiref>=2.3.0
34
34
  Requires-Dist: PyAthena>=3.0.10
35
35
  Requires-Dist: jmespath>=0.7.0
36
36
  Requires-Dist: python3-saml>=1.16.0
37
+ Requires-Dist: sagemaker-studio>=1.0.9
37
38
  Requires-Dist: aiobotocore[boto3]>=2.13.0 ; extra == "aiobotocore"
38
39
  Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache-hive"
39
40
  Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0 ; extra == "cncf-kubernetes"
40
41
  Requires-Dist: apache-airflow-providers-exasol ; extra == "exasol"
42
+ Requires-Dist: apache-airflow-providers-fab ; extra == "fab"
41
43
  Requires-Dist: apache-airflow-providers-ftp ; extra == "ftp"
42
44
  Requires-Dist: apache-airflow-providers-google ; extra == "google"
43
45
  Requires-Dist: apache-airflow-providers-imap ; extra == "imap"
@@ -49,9 +51,10 @@ Requires-Dist: python3-saml>=1.16.0 ; extra == "python3-saml"
49
51
  Requires-Dist: s3fs>=2023.10.0 ; extra == "s3fs"
50
52
  Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
51
53
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
54
+ Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
52
55
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
53
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.4.0/changelog.html
54
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.4.0
56
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.5.0/changelog.html
57
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.5.0
55
58
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
56
59
  Project-URL: Source Code, https://github.com/apache/airflow
57
60
  Project-URL: Twitter, https://x.com/ApacheAirflow
@@ -60,6 +63,7 @@ Provides-Extra: aiobotocore
60
63
  Provides-Extra: apache-hive
61
64
  Provides-Extra: cncf-kubernetes
62
65
  Provides-Extra: exasol
66
+ Provides-Extra: fab
63
67
  Provides-Extra: ftp
64
68
  Provides-Extra: google
65
69
  Provides-Extra: imap
@@ -71,34 +75,34 @@ Provides-Extra: python3-saml
71
75
  Provides-Extra: s3fs
72
76
  Provides-Extra: salesforce
73
77
  Provides-Extra: ssh
78
+ Provides-Extra: standard
74
79
 
75
80
 
76
- .. Licensed to the Apache Software Foundation (ASF) under one
77
- or more contributor license agreements. See the NOTICE file
78
- distributed with this work for additional information
79
- regarding copyright ownership. The ASF licenses this file
80
- to you under the Apache License, Version 2.0 (the
81
- "License"); you may not use this file except in compliance
82
- with the License. You may obtain a copy of the License at
81
+ .. Licensed to the Apache Software Foundation (ASF) under one
82
+ or more contributor license agreements. See the NOTICE file
83
+ distributed with this work for additional information
84
+ regarding copyright ownership. The ASF licenses this file
85
+ to you under the Apache License, Version 2.0 (the
86
+ "License"); you may not use this file except in compliance
87
+ with the License. You may obtain a copy of the License at
83
88
 
84
- .. http://www.apache.org/licenses/LICENSE-2.0
89
+ .. http://www.apache.org/licenses/LICENSE-2.0
85
90
 
86
- .. Unless required by applicable law or agreed to in writing,
87
- software distributed under the License is distributed on an
88
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
89
- KIND, either express or implied. See the License for the
90
- specific language governing permissions and limitations
91
- under the License.
91
+ .. Unless required by applicable law or agreed to in writing,
92
+ software distributed under the License is distributed on an
93
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
94
+ KIND, either express or implied. See the License for the
95
+ specific language governing permissions and limitations
96
+ under the License.
92
97
 
93
- .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
94
-
95
- .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
96
- `PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
98
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
97
99
 
100
+ .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
101
+ ``PROVIDER_README_TEMPLATE.rst.jinja2`` IN the ``dev/breeze/src/airflow_breeze/templates`` DIRECTORY
98
102
 
99
103
  Package ``apache-airflow-providers-amazon``
100
104
 
101
- Release: ``9.4.0``
105
+ Release: ``9.5.0``
102
106
 
103
107
 
104
108
  Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
@@ -111,7 +115,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
111
115
  are in ``airflow.providers.amazon`` python package.
112
116
 
113
117
  You can find package information and changelog for the provider
114
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.4.0/>`_.
118
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.5.0/>`_.
115
119
 
116
120
  Installation
117
121
  ------------
@@ -142,6 +146,7 @@ PIP package Version required
142
146
  ``PyAthena`` ``>=3.0.10``
143
147
  ``jmespath`` ``>=0.7.0``
144
148
  ``python3-saml`` ``>=1.16.0``
149
+ ``sagemaker-studio`` ``>=1.0.9``
145
150
  ========================================== ======================
146
151
 
147
152
  Cross provider package dependencies
@@ -177,5 +182,5 @@ Dependent package
177
182
  ====================================================================================================================== ===================
178
183
 
179
184
  The changelog for the provider package can be found in the
180
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.4.0/changelog.html>`_.
185
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.5.0/changelog.html>`_.
181
186