apache-airflow-providers-amazon 9.4.0rc1__py3-none-any.whl → 9.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. airflow/providers/amazon/__init__.py +1 -1
  2. airflow/providers/amazon/aws/auth_manager/avp/entities.py +3 -1
  3. airflow/providers/amazon/aws/auth_manager/avp/facade.py +1 -1
  4. airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +80 -110
  5. airflow/providers/amazon/aws/auth_manager/router/login.py +11 -4
  6. airflow/providers/amazon/aws/auth_manager/user.py +7 -4
  7. airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +1 -1
  8. airflow/providers/amazon/aws/hooks/appflow.py +5 -15
  9. airflow/providers/amazon/aws/hooks/athena_sql.py +2 -2
  10. airflow/providers/amazon/aws/hooks/base_aws.py +34 -1
  11. airflow/providers/amazon/aws/hooks/batch_client.py +1 -2
  12. airflow/providers/amazon/aws/hooks/batch_waiters.py +11 -3
  13. airflow/providers/amazon/aws/hooks/dms.py +3 -1
  14. airflow/providers/amazon/aws/hooks/ec2.py +1 -1
  15. airflow/providers/amazon/aws/hooks/eks.py +3 -6
  16. airflow/providers/amazon/aws/hooks/glue.py +6 -2
  17. airflow/providers/amazon/aws/hooks/logs.py +2 -2
  18. airflow/providers/amazon/aws/hooks/mwaa.py +79 -15
  19. airflow/providers/amazon/aws/hooks/redshift_cluster.py +10 -10
  20. airflow/providers/amazon/aws/hooks/redshift_data.py +3 -4
  21. airflow/providers/amazon/aws/hooks/s3.py +3 -1
  22. airflow/providers/amazon/aws/hooks/sagemaker.py +2 -2
  23. airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py +188 -0
  24. airflow/providers/amazon/aws/links/athena.py +1 -2
  25. airflow/providers/amazon/aws/links/base_aws.py +8 -1
  26. airflow/providers/amazon/aws/links/sagemaker_unified_studio.py +27 -0
  27. airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +174 -54
  28. airflow/providers/amazon/aws/log/s3_task_handler.py +136 -84
  29. airflow/providers/amazon/aws/notifications/chime.py +1 -2
  30. airflow/providers/amazon/aws/notifications/sns.py +1 -1
  31. airflow/providers/amazon/aws/notifications/sqs.py +1 -1
  32. airflow/providers/amazon/aws/operators/ec2.py +91 -83
  33. airflow/providers/amazon/aws/operators/eks.py +3 -3
  34. airflow/providers/amazon/aws/operators/mwaa.py +73 -2
  35. airflow/providers/amazon/aws/operators/redshift_cluster.py +10 -3
  36. airflow/providers/amazon/aws/operators/s3.py +147 -157
  37. airflow/providers/amazon/aws/operators/sagemaker.py +4 -7
  38. airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py +155 -0
  39. airflow/providers/amazon/aws/sensors/ec2.py +5 -12
  40. airflow/providers/amazon/aws/sensors/emr.py +1 -1
  41. airflow/providers/amazon/aws/sensors/glacier.py +1 -1
  42. airflow/providers/amazon/aws/sensors/mwaa.py +161 -0
  43. airflow/providers/amazon/aws/sensors/rds.py +10 -5
  44. airflow/providers/amazon/aws/sensors/s3.py +32 -43
  45. airflow/providers/amazon/aws/sensors/sagemaker_unified_studio.py +73 -0
  46. airflow/providers/amazon/aws/sensors/step_function.py +2 -1
  47. airflow/providers/amazon/aws/transfers/mongo_to_s3.py +2 -2
  48. airflow/providers/amazon/aws/transfers/redshift_to_s3.py +19 -4
  49. airflow/providers/amazon/aws/transfers/s3_to_redshift.py +19 -3
  50. airflow/providers/amazon/aws/transfers/sql_to_s3.py +1 -1
  51. airflow/providers/amazon/aws/triggers/README.md +4 -4
  52. airflow/providers/amazon/aws/triggers/base.py +11 -2
  53. airflow/providers/amazon/aws/triggers/ecs.py +6 -2
  54. airflow/providers/amazon/aws/triggers/eks.py +2 -2
  55. airflow/providers/amazon/aws/triggers/glue.py +1 -1
  56. airflow/providers/amazon/aws/triggers/mwaa.py +128 -0
  57. airflow/providers/amazon/aws/triggers/s3.py +31 -6
  58. airflow/providers/amazon/aws/triggers/sagemaker.py +2 -2
  59. airflow/providers/amazon/aws/triggers/sagemaker_unified_studio.py +66 -0
  60. airflow/providers/amazon/aws/triggers/sqs.py +11 -3
  61. airflow/providers/amazon/aws/{auth_manager/security_manager/__init__.py → utils/sagemaker_unified_studio.py} +12 -0
  62. airflow/providers/amazon/aws/utils/waiter_with_logging.py +4 -3
  63. airflow/providers/amazon/aws/waiters/mwaa.json +36 -0
  64. airflow/providers/amazon/get_provider_info.py +46 -5
  65. {apache_airflow_providers_amazon-9.4.0rc1.dist-info → apache_airflow_providers_amazon-9.5.0.dist-info}/METADATA +40 -33
  66. {apache_airflow_providers_amazon-9.4.0rc1.dist-info → apache_airflow_providers_amazon-9.5.0.dist-info}/RECORD +68 -61
  67. {apache_airflow_providers_amazon-9.4.0rc1.dist-info → apache_airflow_providers_amazon-9.5.0.dist-info}/WHEEL +1 -1
  68. airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py +0 -40
  69. {apache_airflow_providers_amazon-9.4.0rc1.dist-info → apache_airflow_providers_amazon-9.5.0.dist-info}/entry_points.txt +0 -0
@@ -53,6 +53,9 @@ class S3KeyTrigger(BaseTrigger):
53
53
  poke_interval: float = 5.0,
54
54
  should_check_fn: bool = False,
55
55
  use_regex: bool = False,
56
+ region_name: str | None = None,
57
+ verify: bool | str | None = None,
58
+ botocore_config: dict | None = None,
56
59
  **hook_params: Any,
57
60
  ):
58
61
  super().__init__()
@@ -64,6 +67,9 @@ class S3KeyTrigger(BaseTrigger):
64
67
  self.poke_interval = poke_interval
65
68
  self.should_check_fn = should_check_fn
66
69
  self.use_regex = use_regex
70
+ self.region_name = region_name
71
+ self.verify = verify
72
+ self.botocore_config = botocore_config
67
73
 
68
74
  def serialize(self) -> tuple[str, dict[str, Any]]:
69
75
  """Serialize S3KeyTrigger arguments and classpath."""
@@ -78,17 +84,25 @@ class S3KeyTrigger(BaseTrigger):
78
84
  "poke_interval": self.poke_interval,
79
85
  "should_check_fn": self.should_check_fn,
80
86
  "use_regex": self.use_regex,
87
+ "region_name": self.region_name,
88
+ "verify": self.verify,
89
+ "botocore_config": self.botocore_config,
81
90
  },
82
91
  )
83
92
 
84
93
  @cached_property
85
94
  def hook(self) -> S3Hook:
86
- return S3Hook(aws_conn_id=self.aws_conn_id, verify=self.hook_params.get("verify"))
95
+ return S3Hook(
96
+ aws_conn_id=self.aws_conn_id,
97
+ region_name=self.region_name,
98
+ verify=self.verify,
99
+ config=self.botocore_config,
100
+ )
87
101
 
88
102
  async def run(self) -> AsyncIterator[TriggerEvent]:
89
103
  """Make an asynchronous connection using S3HookAsync."""
90
104
  try:
91
- async with self.hook.async_conn as client:
105
+ async with await self.hook.get_async_conn() as client:
92
106
  while True:
93
107
  if await self.hook.check_key_async(
94
108
  client, self.bucket_name, self.bucket_key, self.wildcard_match, self.use_regex
@@ -143,7 +157,9 @@ class S3KeysUnchangedTrigger(BaseTrigger):
143
157
  allow_delete: bool = True,
144
158
  aws_conn_id: str | None = "aws_default",
145
159
  last_activity_time: datetime | None = None,
160
+ region_name: str | None = None,
146
161
  verify: bool | str | None = None,
162
+ botocore_config: dict | None = None,
147
163
  **hook_params: Any,
148
164
  ):
149
165
  super().__init__()
@@ -160,8 +176,10 @@ class S3KeysUnchangedTrigger(BaseTrigger):
160
176
  self.allow_delete = allow_delete
161
177
  self.aws_conn_id = aws_conn_id
162
178
  self.last_activity_time = last_activity_time
163
- self.verify = verify
164
179
  self.polling_period_seconds = 0
180
+ self.region_name = region_name
181
+ self.verify = verify
182
+ self.botocore_config = botocore_config
165
183
  self.hook_params = hook_params
166
184
 
167
185
  def serialize(self) -> tuple[str, dict[str, Any]]:
@@ -179,19 +197,26 @@ class S3KeysUnchangedTrigger(BaseTrigger):
179
197
  "aws_conn_id": self.aws_conn_id,
180
198
  "last_activity_time": self.last_activity_time,
181
199
  "hook_params": self.hook_params,
182
- "verify": self.verify,
183
200
  "polling_period_seconds": self.polling_period_seconds,
201
+ "region_name": self.region_name,
202
+ "verify": self.verify,
203
+ "botocore_config": self.botocore_config,
184
204
  },
185
205
  )
186
206
 
187
207
  @cached_property
188
208
  def hook(self) -> S3Hook:
189
- return S3Hook(aws_conn_id=self.aws_conn_id, verify=self.hook_params.get("verify"))
209
+ return S3Hook(
210
+ aws_conn_id=self.aws_conn_id,
211
+ region_name=self.region_name,
212
+ verify=self.verify,
213
+ config=self.botocore_config,
214
+ )
190
215
 
191
216
  async def run(self) -> AsyncIterator[TriggerEvent]:
192
217
  """Make an asynchronous connection using S3Hook."""
193
218
  try:
194
- async with self.hook.async_conn as client:
219
+ async with await self.hook.get_async_conn() as client:
195
220
  while True:
196
221
  result = await self.hook.is_keys_unchanged_async(
197
222
  client=client,
@@ -108,7 +108,7 @@ class SageMakerTrigger(BaseTrigger):
108
108
 
109
109
  async def run(self):
110
110
  self.log.info("job name is %s and job type is %s", self.job_name, self.job_type)
111
- async with self.hook.async_conn as client:
111
+ async with await self.hook.get_async_conn() as client:
112
112
  waiter = self.hook.get_waiter(
113
113
  self._get_job_type_waiter(self.job_type), deferrable=True, client=client
114
114
  )
@@ -166,7 +166,7 @@ class SageMakerPipelineTrigger(BaseTrigger):
166
166
 
167
167
  async def run(self) -> AsyncIterator[TriggerEvent]:
168
168
  hook = SageMakerHook(aws_conn_id=self.aws_conn_id)
169
- async with hook.async_conn as conn:
169
+ async with await hook.get_async_conn() as conn:
170
170
  waiter = hook.get_waiter(self._waiter_name[self.waiter_type], deferrable=True, client=conn)
171
171
  for _ in range(self.waiter_max_attempts):
172
172
  try:
@@ -0,0 +1,66 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ """This module contains the Amazon SageMaker Unified Studio Notebook job trigger."""
19
+
20
+ from __future__ import annotations
21
+
22
+ from airflow.triggers.base import BaseTrigger
23
+
24
+
25
+ class SageMakerNotebookJobTrigger(BaseTrigger):
26
+ """
27
+ Watches for a notebook job, triggers when it finishes.
28
+
29
+ Examples:
30
+ .. code-block:: python
31
+
32
+ from airflow.providers.amazon.aws.triggers.sagemaker_unified_studio import SageMakerNotebookJobTrigger
33
+
34
+ notebook_trigger = SageMakerNotebookJobTrigger(
35
+ execution_id="notebook_job_1234",
36
+ execution_name="notebook_task",
37
+ waiter_delay=10,
38
+ waiter_max_attempts=1440,
39
+ )
40
+
41
+ :param execution_id: A unique, meaningful id for the task.
42
+ :param execution_name: A unique, meaningful name for the task.
43
+ :param waiter_delay: Interval in seconds to check the notebook execution status.
44
+ :param waiter_max_attempts: Number of attempts to wait before returning FAILED.
45
+ """
46
+
47
+ def __init__(self, execution_id, execution_name, waiter_delay, waiter_max_attempts, **kwargs):
48
+ super().__init__(**kwargs)
49
+ self.execution_id = execution_id
50
+ self.execution_name = execution_name
51
+ self.waiter_delay = waiter_delay
52
+ self.waiter_max_attempts = waiter_max_attempts
53
+
54
+ def serialize(self):
55
+ return (
56
+ # dynamically generate the fully qualified name of the class
57
+ self.__class__.__module__ + "." + self.__class__.__qualname__,
58
+ {
59
+ "execution_id": self.execution_id,
60
+ "execution_name": self.execution_name,
61
+ "poll_interval": self.poll_interval,
62
+ },
63
+ )
64
+
65
+ async def run(self):
66
+ pass
@@ -23,14 +23,22 @@ from typing import TYPE_CHECKING, Any
23
23
  from airflow.exceptions import AirflowException
24
24
  from airflow.providers.amazon.aws.hooks.sqs import SqsHook
25
25
  from airflow.providers.amazon.aws.utils.sqs import process_response
26
- from airflow.triggers.base import BaseTrigger, TriggerEvent
26
+ from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
27
+
28
+ if AIRFLOW_V_3_0_PLUS:
29
+ from airflow.triggers.base import BaseEventTrigger, TriggerEvent
30
+ else:
31
+ from airflow.triggers.base import ( # type: ignore
32
+ BaseTrigger as BaseEventTrigger,
33
+ TriggerEvent,
34
+ )
27
35
 
28
36
  if TYPE_CHECKING:
29
37
  from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection
30
38
  from airflow.providers.amazon.aws.utils.sqs import MessageFilteringType
31
39
 
32
40
 
33
- class SqsSensorTrigger(BaseTrigger):
41
+ class SqsSensorTrigger(BaseEventTrigger):
34
42
  """
35
43
  Asynchronously get messages from an Amazon SQS queue and then delete the messages from the queue.
36
44
 
@@ -176,7 +184,7 @@ class SqsSensorTrigger(BaseTrigger):
176
184
  while True:
177
185
  # This loop will run indefinitely until the timeout, which is set in the self.defer
178
186
  # method, is reached.
179
- async with self.hook.async_conn as client:
187
+ async with await self.hook.get_async_conn() as client:
180
188
  result = await self.poke(client=client)
181
189
  if result:
182
190
  yield TriggerEvent({"status": "success", "message_batch": result})
@@ -14,3 +14,15 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
+
18
+ """This module contains utils for the Amazon SageMaker Unified Studio Notebook plugin."""
19
+
20
+ from __future__ import annotations
21
+
22
+ import os
23
+
24
+ workflows_env_key = "WORKFLOWS_ENV"
25
+
26
+
27
+ def is_local_runner():
28
+ return os.getenv(workflows_env_key, "") == "Local"
@@ -136,15 +136,16 @@ async def async_wait(
136
136
  last_response = error.last_response
137
137
 
138
138
  if "terminal failure" in error_reason:
139
- log.error("%s: %s", failure_message, _LazyStatusFormatter(status_args, last_response))
140
- raise AirflowException(f"{failure_message}: {error}")
139
+ raise AirflowException(
140
+ f"{failure_message}: {_LazyStatusFormatter(status_args, last_response)}\n{error}"
141
+ )
141
142
 
142
143
  if (
143
144
  "An error occurred" in error_reason
144
145
  and isinstance(last_response.get("Error"), dict)
145
146
  and "Code" in last_response.get("Error")
146
147
  ):
147
- raise AirflowException(f"{failure_message}: {error}")
148
+ raise AirflowException(f"{failure_message}\n{last_response}\n{error}")
148
149
 
149
150
  log.info("%s: %s", status_message, _LazyStatusFormatter(status_args, last_response))
150
151
  else:
@@ -0,0 +1,36 @@
1
+ {
2
+ "version": 2,
3
+ "waiters": {
4
+ "mwaa_dag_run_complete": {
5
+ "delay": 60,
6
+ "maxAttempts": 720,
7
+ "operation": "InvokeRestApi",
8
+ "acceptors": [
9
+ {
10
+ "matcher": "path",
11
+ "argument": "RestApiResponse.state",
12
+ "expected": "queued",
13
+ "state": "retry"
14
+ },
15
+ {
16
+ "matcher": "path",
17
+ "argument": "RestApiResponse.state",
18
+ "expected": "running",
19
+ "state": "retry"
20
+ },
21
+ {
22
+ "matcher": "path",
23
+ "argument": "RestApiResponse.state",
24
+ "expected": "success",
25
+ "state": "success"
26
+ },
27
+ {
28
+ "matcher": "path",
29
+ "argument": "RestApiResponse.state",
30
+ "expected": "failed",
31
+ "state": "failure"
32
+ }
33
+ ]
34
+ }
35
+ }
36
+ }
@@ -27,8 +27,9 @@ def get_provider_info():
27
27
  "name": "Amazon",
28
28
  "description": "Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).\n",
29
29
  "state": "ready",
30
- "source-date-epoch": 1739958314,
30
+ "source-date-epoch": 1743477760,
31
31
  "versions": [
32
+ "9.5.0",
32
33
  "9.4.0",
33
34
  "9.2.0",
34
35
  "9.1.0",
@@ -275,6 +276,15 @@ def get_provider_info():
275
276
  "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/sagemaker.rst"],
276
277
  "tags": ["aws"],
277
278
  },
279
+ {
280
+ "integration-name": "Amazon SageMaker Unified Studio",
281
+ "external-doc-url": "https://aws.amazon.com/sagemaker/unified-studio/",
282
+ "logo": "/docs/integration-logos/Amazon-SageMaker_light-bg@4x.png",
283
+ "how-to-guide": [
284
+ "/docs/apache-airflow-providers-amazon/operators/sagemakerunifiedstudio.rst"
285
+ ],
286
+ "tags": ["aws"],
287
+ },
278
288
  {
279
289
  "integration-name": "Amazon SecretsManager",
280
290
  "external-doc-url": "https://aws.amazon.com/secrets-manager/",
@@ -491,6 +501,10 @@ def get_provider_info():
491
501
  "integration-name": "Amazon SageMaker",
492
502
  "python-modules": ["airflow.providers.amazon.aws.operators.sagemaker"],
493
503
  },
504
+ {
505
+ "integration-name": "Amazon SageMaker Unified Studio",
506
+ "python-modules": ["airflow.providers.amazon.aws.operators.sagemaker_unified_studio"],
507
+ },
494
508
  {
495
509
  "integration-name": "Amazon Simple Notification Service (SNS)",
496
510
  "python-modules": ["airflow.providers.amazon.aws.operators.sns"],
@@ -604,6 +618,10 @@ def get_provider_info():
604
618
  "integration-name": "Amazon Managed Service for Apache Flink",
605
619
  "python-modules": ["airflow.providers.amazon.aws.sensors.kinesis_analytics"],
606
620
  },
621
+ {
622
+ "integration-name": "Amazon Managed Workflows for Apache Airflow (MWAA)",
623
+ "python-modules": ["airflow.providers.amazon.aws.sensors.mwaa"],
624
+ },
607
625
  {
608
626
  "integration-name": "Amazon OpenSearch Serverless",
609
627
  "python-modules": ["airflow.providers.amazon.aws.sensors.opensearch_serverless"],
@@ -624,6 +642,10 @@ def get_provider_info():
624
642
  "integration-name": "Amazon SageMaker",
625
643
  "python-modules": ["airflow.providers.amazon.aws.sensors.sagemaker"],
626
644
  },
645
+ {
646
+ "integration-name": "Amazon SageMaker Unified Studio",
647
+ "python-modules": ["airflow.providers.amazon.aws.sensors.sagemaker_unified_studio"],
648
+ },
627
649
  {
628
650
  "integration-name": "Amazon Simple Queue Service (SQS)",
629
651
  "python-modules": ["airflow.providers.amazon.aws.sensors.sqs"],
@@ -777,6 +799,10 @@ def get_provider_info():
777
799
  "integration-name": "Amazon SageMaker",
778
800
  "python-modules": ["airflow.providers.amazon.aws.hooks.sagemaker"],
779
801
  },
802
+ {
803
+ "integration-name": "Amazon SageMaker Unified Studio",
804
+ "python-modules": ["airflow.providers.amazon.aws.hooks.sagemaker_unified_studio"],
805
+ },
780
806
  {
781
807
  "integration-name": "Amazon Simple Email Service (SES)",
782
808
  "python-modules": ["airflow.providers.amazon.aws.hooks.ses"],
@@ -855,6 +881,10 @@ def get_provider_info():
855
881
  "integration-name": "AWS Lambda",
856
882
  "python-modules": ["airflow.providers.amazon.aws.triggers.lambda_function"],
857
883
  },
884
+ {
885
+ "integration-name": "Amazon Managed Workflows for Apache Airflow (MWAA)",
886
+ "python-modules": ["airflow.providers.amazon.aws.triggers.mwaa"],
887
+ },
858
888
  {
859
889
  "integration-name": "Amazon Managed Service for Apache Flink",
860
890
  "python-modules": ["airflow.providers.amazon.aws.triggers.kinesis_analytics"],
@@ -874,6 +904,10 @@ def get_provider_info():
874
904
  "integration-name": "Amazon SageMaker",
875
905
  "python-modules": ["airflow.providers.amazon.aws.triggers.sagemaker"],
876
906
  },
907
+ {
908
+ "integration-name": "Amazon SageMaker Unified Studio",
909
+ "python-modules": ["airflow.providers.amazon.aws.triggers.sagemaker_unified_studio"],
910
+ },
877
911
  {
878
912
  "integration-name": "AWS Glue",
879
913
  "python-modules": [
@@ -1068,6 +1102,7 @@ def get_provider_info():
1068
1102
  "airflow.providers.amazon.aws.links.glue.GlueJobRunDetailsLink",
1069
1103
  "airflow.providers.amazon.aws.links.logs.CloudWatchEventsLink",
1070
1104
  "airflow.providers.amazon.aws.links.sagemaker.SageMakerTransformJobLink",
1105
+ "airflow.providers.amazon.aws.links.sagemaker_unified_studio.SageMakerUnifiedStudioLink",
1071
1106
  "airflow.providers.amazon.aws.links.step_function.StateMachineDetailsLink",
1072
1107
  "airflow.providers.amazon.aws.links.step_function.StateMachineExecutionsDetailsLink",
1073
1108
  "airflow.providers.amazon.aws.links.comprehend.ComprehendPiiEntitiesDetectionLink",
@@ -1337,10 +1372,10 @@ def get_provider_info():
1337
1372
  "executors": ["airflow.providers.amazon.aws.executors.ecs.ecs_executor.AwsEcsExecutor"],
1338
1373
  "dependencies": [
1339
1374
  "apache-airflow>=2.9.0",
1340
- "apache-airflow-providers-common-compat>=1.3.0",
1375
+ "apache-airflow-providers-common-compat>=1.6.0",
1341
1376
  "apache-airflow-providers-common-sql>=1.20.0",
1342
1377
  "apache-airflow-providers-http",
1343
- "boto3>=1.34.90",
1378
+ "boto3>=1.37.0",
1344
1379
  "botocore>=1.34.90",
1345
1380
  "inflection>=0.5.1",
1346
1381
  "watchtower>=3.0.0,!=3.3.0,<4",
@@ -1350,6 +1385,8 @@ def get_provider_info():
1350
1385
  "PyAthena>=3.0.10",
1351
1386
  "jmespath>=0.7.0",
1352
1387
  "python3-saml>=1.16.0",
1388
+ "xmlsec!=1.3.15,>=1.3.14",
1389
+ "sagemaker-studio>=1.0.9",
1353
1390
  ],
1354
1391
  "optional-dependencies": {
1355
1392
  "pandas": ["pandas>=2.1.2,<2.2"],
@@ -1359,6 +1396,7 @@ def get_provider_info():
1359
1396
  "python3-saml": ["python3-saml>=1.16.0"],
1360
1397
  "apache.hive": ["apache-airflow-providers-apache-hive"],
1361
1398
  "exasol": ["apache-airflow-providers-exasol"],
1399
+ "fab": ["apache-airflow-providers-fab"],
1362
1400
  "ftp": ["apache-airflow-providers-ftp"],
1363
1401
  "google": ["apache-airflow-providers-google"],
1364
1402
  "imap": ["apache-airflow-providers-imap"],
@@ -1367,17 +1405,20 @@ def get_provider_info():
1367
1405
  "openlineage": ["apache-airflow-providers-openlineage"],
1368
1406
  "salesforce": ["apache-airflow-providers-salesforce"],
1369
1407
  "ssh": ["apache-airflow-providers-ssh"],
1408
+ "standard": ["apache-airflow-providers-standard"],
1370
1409
  },
1371
1410
  "devel-dependencies": [
1372
1411
  "aiobotocore>=2.13.0",
1373
1412
  "aws_xray_sdk>=2.12.0",
1374
- "moto[cloudformation,glue]>=5.0.0",
1375
- "mypy-boto3-appflow>=1.35.39",
1413
+ "moto[cloudformation,glue]>=5.1.2",
1414
+ "mypy-boto3-appflow>=1.37.0",
1376
1415
  "mypy-boto3-rds>=1.34.90",
1377
1416
  "mypy-boto3-redshift-data>=1.34.0",
1378
1417
  "mypy-boto3-s3>=1.34.90",
1379
1418
  "s3fs>=2023.10.0",
1380
1419
  "openapi-schema-validator>=0.6.2",
1381
1420
  "openapi-spec-validator>=0.7.1",
1421
+ "opensearch-py>=2.2.0",
1422
+ "responses>=0.25.0",
1382
1423
  ],
1383
1424
  }
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 9.4.0rc1
3
+ Version: 9.5.0
4
4
  Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
5
5
  Keywords: airflow-provider,amazon,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,11 +20,11 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.9.0rc0
24
- Requires-Dist: apache-airflow-providers-common-compat>=1.3.0rc0
25
- Requires-Dist: apache-airflow-providers-common-sql>=1.20.0rc0
23
+ Requires-Dist: apache-airflow>=2.9.0
24
+ Requires-Dist: apache-airflow-providers-common-compat>=1.6.0
25
+ Requires-Dist: apache-airflow-providers-common-sql>=1.20.0
26
26
  Requires-Dist: apache-airflow-providers-http
27
- Requires-Dist: boto3>=1.34.90
27
+ Requires-Dist: boto3>=1.37.0
28
28
  Requires-Dist: botocore>=1.34.90
29
29
  Requires-Dist: inflection>=0.5.1
30
30
  Requires-Dist: watchtower>=3.0.0,!=3.3.0,<4
@@ -34,10 +34,13 @@ Requires-Dist: asgiref>=2.3.0
34
34
  Requires-Dist: PyAthena>=3.0.10
35
35
  Requires-Dist: jmespath>=0.7.0
36
36
  Requires-Dist: python3-saml>=1.16.0
37
+ Requires-Dist: xmlsec!=1.3.15,>=1.3.14
38
+ Requires-Dist: sagemaker-studio>=1.0.9
37
39
  Requires-Dist: aiobotocore[boto3]>=2.13.0 ; extra == "aiobotocore"
38
40
  Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache-hive"
39
41
  Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0 ; extra == "cncf-kubernetes"
40
42
  Requires-Dist: apache-airflow-providers-exasol ; extra == "exasol"
43
+ Requires-Dist: apache-airflow-providers-fab ; extra == "fab"
41
44
  Requires-Dist: apache-airflow-providers-ftp ; extra == "ftp"
42
45
  Requires-Dist: apache-airflow-providers-google ; extra == "google"
43
46
  Requires-Dist: apache-airflow-providers-imap ; extra == "imap"
@@ -49,17 +52,19 @@ Requires-Dist: python3-saml>=1.16.0 ; extra == "python3-saml"
49
52
  Requires-Dist: s3fs>=2023.10.0 ; extra == "s3fs"
50
53
  Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
51
54
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
55
+ Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
52
56
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
53
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.4.0/changelog.html
54
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.4.0
57
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.5.0/changelog.html
58
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.5.0
59
+ Project-URL: Mastodon, https://fosstodon.org/@airflow
55
60
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
56
61
  Project-URL: Source Code, https://github.com/apache/airflow
57
- Project-URL: Twitter, https://x.com/ApacheAirflow
58
62
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
59
63
  Provides-Extra: aiobotocore
60
64
  Provides-Extra: apache-hive
61
65
  Provides-Extra: cncf-kubernetes
62
66
  Provides-Extra: exasol
67
+ Provides-Extra: fab
63
68
  Provides-Extra: ftp
64
69
  Provides-Extra: google
65
70
  Provides-Extra: imap
@@ -71,34 +76,34 @@ Provides-Extra: python3-saml
71
76
  Provides-Extra: s3fs
72
77
  Provides-Extra: salesforce
73
78
  Provides-Extra: ssh
79
+ Provides-Extra: standard
74
80
 
75
81
 
76
- .. Licensed to the Apache Software Foundation (ASF) under one
77
- or more contributor license agreements. See the NOTICE file
78
- distributed with this work for additional information
79
- regarding copyright ownership. The ASF licenses this file
80
- to you under the Apache License, Version 2.0 (the
81
- "License"); you may not use this file except in compliance
82
- with the License. You may obtain a copy of the License at
82
+ .. Licensed to the Apache Software Foundation (ASF) under one
83
+ or more contributor license agreements. See the NOTICE file
84
+ distributed with this work for additional information
85
+ regarding copyright ownership. The ASF licenses this file
86
+ to you under the Apache License, Version 2.0 (the
87
+ "License"); you may not use this file except in compliance
88
+ with the License. You may obtain a copy of the License at
83
89
 
84
- .. http://www.apache.org/licenses/LICENSE-2.0
90
+ .. http://www.apache.org/licenses/LICENSE-2.0
85
91
 
86
- .. Unless required by applicable law or agreed to in writing,
87
- software distributed under the License is distributed on an
88
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
89
- KIND, either express or implied. See the License for the
90
- specific language governing permissions and limitations
91
- under the License.
92
+ .. Unless required by applicable law or agreed to in writing,
93
+ software distributed under the License is distributed on an
94
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
95
+ KIND, either express or implied. See the License for the
96
+ specific language governing permissions and limitations
97
+ under the License.
92
98
 
93
- .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
94
-
95
- .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
96
- `PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
99
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
97
100
 
101
+ .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
102
+ ``PROVIDER_README_TEMPLATE.rst.jinja2`` IN the ``dev/breeze/src/airflow_breeze/templates`` DIRECTORY
98
103
 
99
104
  Package ``apache-airflow-providers-amazon``
100
105
 
101
- Release: ``9.4.0``
106
+ Release: ``9.5.0``
102
107
 
103
108
 
104
109
  Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
@@ -111,7 +116,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
111
116
  are in ``airflow.providers.amazon`` python package.
112
117
 
113
118
  You can find package information and changelog for the provider
114
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.4.0/>`_.
119
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.5.0/>`_.
115
120
 
116
121
  Installation
117
122
  ------------
@@ -129,10 +134,10 @@ Requirements
129
134
  PIP package Version required
130
135
  ========================================== ======================
131
136
  ``apache-airflow`` ``>=2.9.0``
132
- ``apache-airflow-providers-common-compat`` ``>=1.3.0``
137
+ ``apache-airflow-providers-common-compat`` ``>=1.6.0``
133
138
  ``apache-airflow-providers-common-sql`` ``>=1.20.0``
134
139
  ``apache-airflow-providers-http``
135
- ``boto3`` ``>=1.34.90``
140
+ ``boto3`` ``>=1.37.0``
136
141
  ``botocore`` ``>=1.34.90``
137
142
  ``inflection`` ``>=0.5.1``
138
143
  ``watchtower`` ``>=3.0.0,!=3.3.0,<4``
@@ -142,13 +147,15 @@ PIP package Version required
142
147
  ``PyAthena`` ``>=3.0.10``
143
148
  ``jmespath`` ``>=0.7.0``
144
149
  ``python3-saml`` ``>=1.16.0``
150
+ ``xmlsec`` ``>=1.3.14,!=1.3.15``
151
+ ``sagemaker-studio`` ``>=1.0.9``
145
152
  ========================================== ======================
146
153
 
147
154
  Cross provider package dependencies
148
155
  -----------------------------------
149
156
 
150
157
  Those are dependencies that might be needed in order to use all the features of the package.
151
- You need to install the specified provider packages in order to use them.
158
+ You need to install the specified providers in order to use them.
152
159
 
153
160
  You can install such cross-provider dependencies when installing from PyPI. For example:
154
161
 
@@ -177,5 +184,5 @@ Dependent package
177
184
  ====================================================================================================================== ===================
178
185
 
179
186
  The changelog for the provider package can be found in the
180
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.4.0/changelog.html>`_.
187
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.5.0/changelog.html>`_.
181
188