apache-airflow-providers-amazon 7.4.1rc1__py3-none-any.whl → 8.0.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. airflow/providers/amazon/aws/hooks/athena.py +0 -15
  2. airflow/providers/amazon/aws/hooks/base_aws.py +98 -65
  3. airflow/providers/amazon/aws/hooks/batch_client.py +60 -27
  4. airflow/providers/amazon/aws/hooks/batch_waiters.py +3 -1
  5. airflow/providers/amazon/aws/hooks/emr.py +33 -74
  6. airflow/providers/amazon/aws/hooks/logs.py +22 -4
  7. airflow/providers/amazon/aws/hooks/redshift_cluster.py +1 -12
  8. airflow/providers/amazon/aws/hooks/sagemaker.py +0 -16
  9. airflow/providers/amazon/aws/links/emr.py +1 -3
  10. airflow/providers/amazon/aws/operators/athena.py +0 -15
  11. airflow/providers/amazon/aws/operators/batch.py +78 -24
  12. airflow/providers/amazon/aws/operators/ecs.py +21 -58
  13. airflow/providers/amazon/aws/operators/eks.py +0 -1
  14. airflow/providers/amazon/aws/operators/emr.py +94 -24
  15. airflow/providers/amazon/aws/operators/lambda_function.py +0 -19
  16. airflow/providers/amazon/aws/operators/rds.py +1 -1
  17. airflow/providers/amazon/aws/operators/redshift_cluster.py +22 -1
  18. airflow/providers/amazon/aws/operators/redshift_data.py +0 -62
  19. airflow/providers/amazon/aws/secrets/secrets_manager.py +0 -17
  20. airflow/providers/amazon/aws/secrets/systems_manager.py +0 -21
  21. airflow/providers/amazon/aws/sensors/dynamodb.py +97 -0
  22. airflow/providers/amazon/aws/sensors/emr.py +1 -2
  23. airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +1 -1
  24. airflow/providers/amazon/aws/transfers/gcs_to_s3.py +0 -19
  25. airflow/providers/amazon/aws/transfers/glacier_to_gcs.py +1 -7
  26. airflow/providers/amazon/aws/transfers/google_api_to_s3.py +10 -10
  27. airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py +0 -10
  28. airflow/providers/amazon/aws/transfers/mongo_to_s3.py +0 -11
  29. airflow/providers/amazon/aws/transfers/s3_to_sftp.py +0 -10
  30. airflow/providers/amazon/aws/transfers/sql_to_s3.py +23 -9
  31. airflow/providers/amazon/aws/triggers/redshift_cluster.py +54 -2
  32. airflow/providers/amazon/aws/waiters/base_waiter.py +12 -1
  33. airflow/providers/amazon/aws/waiters/emr-serverless.json +18 -0
  34. airflow/providers/amazon/get_provider_info.py +35 -30
  35. {apache_airflow_providers_amazon-7.4.1rc1.dist-info → apache_airflow_providers_amazon-8.0.0rc2.dist-info}/METADATA +81 -4
  36. {apache_airflow_providers_amazon-7.4.1rc1.dist-info → apache_airflow_providers_amazon-8.0.0rc2.dist-info}/RECORD +41 -41
  37. airflow/providers/amazon/aws/operators/aws_lambda.py +0 -29
  38. airflow/providers/amazon/aws/operators/redshift_sql.py +0 -57
  39. {apache_airflow_providers_amazon-7.4.1rc1.dist-info → apache_airflow_providers_amazon-8.0.0rc2.dist-info}/LICENSE +0 -0
  40. {apache_airflow_providers_amazon-7.4.1rc1.dist-info → apache_airflow_providers_amazon-8.0.0rc2.dist-info}/NOTICE +0 -0
  41. {apache_airflow_providers_amazon-7.4.1rc1.dist-info → apache_airflow_providers_amazon-8.0.0rc2.dist-info}/WHEEL +0 -0
  42. {apache_airflow_providers_amazon-7.4.1rc1.dist-info → apache_airflow_providers_amazon-8.0.0rc2.dist-info}/entry_points.txt +0 -0
  43. {apache_airflow_providers_amazon-7.4.1rc1.dist-info → apache_airflow_providers_amazon-8.0.0rc2.dist-info}/top_level.txt +0 -0
@@ -18,7 +18,8 @@ from __future__ import annotations
18
18
 
19
19
  from typing import Any, AsyncIterator
20
20
 
21
- from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftAsyncHook
21
+ from airflow.compat.functools import cached_property
22
+ from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftAsyncHook, RedshiftHook
22
23
  from airflow.triggers.base import BaseTrigger, TriggerEvent
23
24
 
24
25
 
@@ -55,7 +56,7 @@ class RedshiftClusterTrigger(BaseTrigger):
55
56
  },
56
57
  )
57
58
 
58
- async def run(self) -> AsyncIterator["TriggerEvent"]:
59
+ async def run(self) -> AsyncIterator[TriggerEvent]:
59
60
  hook = RedshiftAsyncHook(aws_conn_id=self.aws_conn_id)
60
61
  while self.attempts >= 1:
61
62
  self.attempts = self.attempts - 1
@@ -85,3 +86,54 @@ class RedshiftClusterTrigger(BaseTrigger):
85
86
  except Exception as e:
86
87
  if self.attempts < 1:
87
88
  yield TriggerEvent({"status": "error", "message": str(e)})
89
+
90
+
91
+ class RedshiftCreateClusterTrigger(BaseTrigger):
92
+ """
93
+ Trigger for RedshiftCreateClusterOperator.
94
+ The trigger will asynchronously poll the boto3 API and wait for the
95
+ Redshift cluster to be in the `available` state.
96
+
97
+ :param cluster_identifier: A unique identifier for the cluster.
98
+ :param poll_interval: The amount of time in seconds to wait between attempts.
99
+ :param max_attempt: The maximum number of attempts to be made.
100
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
101
+ """
102
+
103
+ def __init__(
104
+ self,
105
+ cluster_identifier: str,
106
+ poll_interval: int,
107
+ max_attempt: int,
108
+ aws_conn_id: str,
109
+ ):
110
+ self.cluster_identifier = cluster_identifier
111
+ self.poll_interval = poll_interval
112
+ self.max_attempt = max_attempt
113
+ self.aws_conn_id = aws_conn_id
114
+
115
+ def serialize(self) -> tuple[str, dict[str, Any]]:
116
+ return (
117
+ "airflow.providers.amazon.aws.triggers.redshift_cluster.RedshiftCreateClusterTrigger",
118
+ {
119
+ "cluster_identifier": str(self.cluster_identifier),
120
+ "poll_interval": str(self.poll_interval),
121
+ "max_attempt": str(self.max_attempt),
122
+ "aws_conn_id": str(self.aws_conn_id),
123
+ },
124
+ )
125
+
126
+ @cached_property
127
+ def hook(self) -> RedshiftHook:
128
+ return RedshiftHook(aws_conn_id=self.aws_conn_id)
129
+
130
+ async def run(self):
131
+ async with self.hook.async_conn as client:
132
+ await client.get_waiter("cluster_available").wait(
133
+ ClusterIdentifier=self.cluster_identifier,
134
+ WaiterConfig={
135
+ "Delay": int(self.poll_interval),
136
+ "MaxAttempts": int(self.max_attempt),
137
+ },
138
+ )
139
+ yield TriggerEvent({"status": "success", "message": "Cluster Created"})
@@ -28,9 +28,20 @@ class BaseBotoWaiter:
28
28
  For more details, see airflow/providers/amazon/aws/waiters/README.md
29
29
  """
30
30
 
31
- def __init__(self, client: boto3.client, model_config: dict) -> None:
31
+ def __init__(self, client: boto3.client, model_config: dict, deferrable: bool = False) -> None:
32
32
  self.model = WaiterModel(model_config)
33
33
  self.client = client
34
+ self.deferrable = deferrable
35
+
36
+ def _get_async_waiter_with_client(self, waiter_name: str):
37
+ from aiobotocore.waiter import create_waiter_with_client as create_async_waiter_with_client
38
+
39
+ return create_async_waiter_with_client(
40
+ waiter_name=waiter_name, waiter_model=self.model, client=self.client
41
+ )
34
42
 
35
43
  def waiter(self, waiter_name: str) -> Waiter:
44
+ if self.deferrable:
45
+ return self._get_async_waiter_with_client(waiter_name=waiter_name)
46
+
36
47
  return create_waiter_with_client(waiter_name=waiter_name, waiter_model=self.model, client=self.client)
@@ -0,0 +1,18 @@
1
+ {
2
+ "version": 2,
3
+ "waiters": {
4
+ "no_job_running": {
5
+ "operation": "ListJobRuns",
6
+ "delay": 10,
7
+ "maxAttempts": 60,
8
+ "acceptors": [
9
+ {
10
+ "matcher": "path",
11
+ "argument": "length(jobRuns) == `0`",
12
+ "expected": true,
13
+ "state": "success"
14
+ }
15
+ ]
16
+ }
17
+ }
18
+ }
@@ -29,6 +29,7 @@ def get_provider_info():
29
29
  "description": "Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).\n",
30
30
  "suspended": False,
31
31
  "versions": [
32
+ "8.0.0",
32
33
  "7.4.1",
33
34
  "7.4.0",
34
35
  "7.3.0",
@@ -73,6 +74,7 @@ def get_provider_info():
73
74
  "mypy-boto3-rds>=1.24.0",
74
75
  "mypy-boto3-redshift-data>=1.24.0",
75
76
  "mypy-boto3-appflow>=1.24.0",
77
+ "aiobotocore[boto3]>=2.2.0",
76
78
  ],
77
79
  "integrations": [
78
80
  {
@@ -105,6 +107,7 @@ def get_provider_info():
105
107
  "integration-name": "Amazon DynamoDB",
106
108
  "external-doc-url": "https://aws.amazon.com/dynamodb/",
107
109
  "logo": "/integration-logos/aws/Amazon-DynamoDB_light-bg@4x.png",
110
+ "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/dynamodb.rst"],
108
111
  "tags": ["aws"],
109
112
  },
110
113
  {
@@ -142,21 +145,21 @@ def get_provider_info():
142
145
  {
143
146
  "integration-name": "Amazon EMR",
144
147
  "external-doc-url": "https://aws.amazon.com/emr/",
145
- "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/emr.rst"],
148
+ "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/emr/emr.rst"],
146
149
  "logo": "/integration-logos/aws/Amazon-EMR_light-bg@4x.png",
147
150
  "tags": ["aws"],
148
151
  },
149
152
  {
150
153
  "integration-name": "Amazon EMR on EKS",
151
154
  "external-doc-url": "https://docs.aws.amazon.com/emr/latest/EMR-on-EKS-DevelopmentGuide/emr-eks.html",
152
- "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/emr_eks.rst"],
155
+ "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/emr/emr_eks.rst"],
153
156
  "logo": "/integration-logos/aws/Amazon-EMR_light-bg@4x.png",
154
157
  "tags": ["aws"],
155
158
  },
156
159
  {
157
160
  "integration-name": "Amazon EMR Serverless",
158
161
  "external-doc-url": "https://docs.aws.amazon.com/emr/latest/EMR-Serverless-UserGuide/emr-serverless.html",
159
- "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/emr_serverless.rst"],
162
+ "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/emr/emr_serverless.rst"],
160
163
  "logo": "/integration-logos/aws/Amazon-EMR_light-bg@4x.png",
161
164
  "tags": ["aws"],
162
165
  },
@@ -164,7 +167,7 @@ def get_provider_info():
164
167
  "integration-name": "Amazon Glacier",
165
168
  "external-doc-url": "https://aws.amazon.com/glacier/",
166
169
  "logo": "/integration-logos/aws/Amazon-S3-Glacier_light-bg@4x.png",
167
- "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/glacier.rst"],
170
+ "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/s3/glacier.rst"],
168
171
  "tags": ["aws"],
169
172
  },
170
173
  {
@@ -185,8 +188,8 @@ def get_provider_info():
185
188
  "external-doc-url": "https://aws.amazon.com/redshift/",
186
189
  "logo": "/integration-logos/aws/Amazon-Redshift_light-bg@4x.png",
187
190
  "how-to-guide": [
188
- "/docs/apache-airflow-providers-amazon/operators/redshift_sql.rst",
189
- "/docs/apache-airflow-providers-amazon/operators/redshift_cluster.rst",
191
+ "/docs/apache-airflow-providers-amazon/operators/redshift/redshift_sql.rst",
192
+ "/docs/apache-airflow-providers-amazon/operators/redshift/redshift_cluster.rst",
190
193
  ],
191
194
  "tags": ["aws"],
192
195
  },
@@ -194,7 +197,9 @@ def get_provider_info():
194
197
  "integration-name": "Amazon Redshift Data",
195
198
  "external-doc-url": "https://aws.amazon.com/redshift/",
196
199
  "logo": "/integration-logos/aws/Amazon-Redshift_light-bg@4x.png",
197
- "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/redshift_data.rst"],
200
+ "how-to-guide": [
201
+ "/docs/apache-airflow-providers-amazon/operators/redshift/redshift_data.rst"
202
+ ],
198
203
  "tags": ["aws"],
199
204
  },
200
205
  {
@@ -235,7 +240,7 @@ def get_provider_info():
235
240
  "integration-name": "Amazon Simple Storage Service (S3)",
236
241
  "external-doc-url": "https://aws.amazon.com/s3/",
237
242
  "logo": "/integration-logos/aws/Amazon-Simple-Storage-Service-S3_light-bg@4x.png",
238
- "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/s3.rst"],
243
+ "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/s3/s3.rst"],
239
244
  "tags": ["aws"],
240
245
  },
241
246
  {
@@ -366,10 +371,7 @@ def get_provider_info():
366
371
  },
367
372
  {
368
373
  "integration-name": "AWS Lambda",
369
- "python-modules": [
370
- "airflow.providers.amazon.aws.operators.aws_lambda",
371
- "airflow.providers.amazon.aws.operators.lambda_function",
372
- ],
374
+ "python-modules": ["airflow.providers.amazon.aws.operators.lambda_function"],
373
375
  },
374
376
  {
375
377
  "integration-name": "Amazon Simple Storage Service (S3)",
@@ -398,7 +400,6 @@ def get_provider_info():
398
400
  {
399
401
  "integration-name": "Amazon Redshift",
400
402
  "python-modules": [
401
- "airflow.providers.amazon.aws.operators.redshift_sql",
402
403
  "airflow.providers.amazon.aws.operators.redshift_cluster",
403
404
  "airflow.providers.amazon.aws.operators.redshift_data",
404
405
  ],
@@ -429,6 +430,10 @@ def get_provider_info():
429
430
  "integration-name": "AWS Database Migration Service",
430
431
  "python-modules": ["airflow.providers.amazon.aws.sensors.dms"],
431
432
  },
433
+ {
434
+ "integration-name": "Amazon DynamoDB",
435
+ "python-modules": ["airflow.providers.amazon.aws.sensors.dynamodb"],
436
+ },
432
437
  {
433
438
  "integration-name": "Amazon EC2",
434
439
  "python-modules": ["airflow.providers.amazon.aws.sensors.ec2"],
@@ -627,73 +632,73 @@ def get_provider_info():
627
632
  {
628
633
  "source-integration-name": "Amazon DynamoDB",
629
634
  "target-integration-name": "Amazon Simple Storage Service (S3)",
630
- "how-to-guide": "/docs/apache-airflow-providers-amazon/operators/transfer/dynamodb_to_s3.rst",
635
+ "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/dynamodb_to_s3.rst",
631
636
  "python-module": "airflow.providers.amazon.aws.transfers.dynamodb_to_s3",
632
637
  },
633
638
  {
634
639
  "source-integration-name": "Google Cloud Storage (GCS)",
635
640
  "target-integration-name": "Amazon Simple Storage Service (S3)",
636
- "how-to-guide": "/docs/apache-airflow-providers-amazon/operators/transfer/gcs_to_s3.rst",
641
+ "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/gcs_to_s3.rst",
637
642
  "python-module": "airflow.providers.amazon.aws.transfers.gcs_to_s3",
638
643
  },
639
644
  {
640
645
  "source-integration-name": "Amazon Glacier",
641
646
  "target-integration-name": "Google Cloud Storage (GCS)",
642
- "how-to-guide": "/docs/apache-airflow-providers-amazon/operators/transfer/glacier_to_gcs.rst",
647
+ "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/glacier_to_gcs.rst",
643
648
  "python-module": "airflow.providers.amazon.aws.transfers.glacier_to_gcs",
644
649
  },
645
650
  {
646
651
  "source-integration-name": "Google",
647
652
  "target-integration-name": "Amazon Simple Storage Service (S3)",
648
- "how-to-guide": "/docs/apache-airflow-providers-amazon/operators/transfer/google_api_to_s3.rst",
653
+ "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/google_api_to_s3.rst",
649
654
  "python-module": "airflow.providers.amazon.aws.transfers.google_api_to_s3",
650
655
  },
651
656
  {
652
657
  "source-integration-name": "Apache Hive",
653
658
  "target-integration-name": "Amazon DynamoDB",
654
- "how-to-guide": "/docs/apache-airflow-providers-amazon/operators/transfer/hive_to_dynamodb.rst",
659
+ "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/hive_to_dynamodb.rst",
655
660
  "python-module": "airflow.providers.amazon.aws.transfers.hive_to_dynamodb",
656
661
  },
657
662
  {
658
663
  "source-integration-name": "Internet Message Access Protocol (IMAP)",
659
664
  "target-integration-name": "Amazon Simple Storage Service (S3)",
660
- "how-to-guide": "/docs/apache-airflow-providers-amazon/operators/transfer/imap_attachment_to_s3.rst",
665
+ "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/imap_attachment_to_s3.rst",
661
666
  "python-module": "airflow.providers.amazon.aws.transfers.imap_attachment_to_s3",
662
667
  },
663
668
  {
664
669
  "source-integration-name": "MongoDB",
665
670
  "target-integration-name": "Amazon Simple Storage Service (S3)",
666
- "how-to-guide": "/docs/apache-airflow-providers-amazon/operators/transfer/mongo_to_s3.rst",
671
+ "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/mongo_to_s3.rst",
667
672
  "python-module": "airflow.providers.amazon.aws.transfers.mongo_to_s3",
668
673
  },
669
674
  {
670
675
  "source-integration-name": "Amazon Redshift",
671
676
  "target-integration-name": "Amazon Simple Storage Service (S3)",
672
- "how-to-guide": "/docs/apache-airflow-providers-amazon/operators/transfer/redshift_to_s3.rst",
677
+ "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/redshift_to_s3.rst",
673
678
  "python-module": "airflow.providers.amazon.aws.transfers.redshift_to_s3",
674
679
  },
675
680
  {
676
681
  "source-integration-name": "Amazon Simple Storage Service (S3)",
677
682
  "target-integration-name": "Amazon Redshift",
678
- "how-to-guide": "/docs/apache-airflow-providers-amazon/operators/transfer/s3_to_redshift.rst",
683
+ "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/s3_to_redshift.rst",
679
684
  "python-module": "airflow.providers.amazon.aws.transfers.s3_to_redshift",
680
685
  },
681
686
  {
682
687
  "source-integration-name": "Amazon Simple Storage Service (S3)",
683
688
  "target-integration-name": "SSH File Transfer Protocol (SFTP)",
684
- "how-to-guide": "/docs/apache-airflow-providers-amazon/operators/transfer/s3_to_sftp.rst",
689
+ "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/s3_to_sftp.rst",
685
690
  "python-module": "airflow.providers.amazon.aws.transfers.s3_to_sftp",
686
691
  },
687
692
  {
688
693
  "source-integration-name": "SSH File Transfer Protocol (SFTP)",
689
694
  "target-integration-name": "Amazon Simple Storage Service (S3)",
690
- "how-to-guide": "/docs/apache-airflow-providers-amazon/operators/transfer/sftp_to_s3.rst",
695
+ "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/sftp_to_s3.rst",
691
696
  "python-module": "airflow.providers.amazon.aws.transfers.sftp_to_s3",
692
697
  },
693
698
  {
694
699
  "source-integration-name": "Amazon Simple Storage Service (S3)",
695
700
  "target-integration-name": "File Transfer Protocol (FTP)",
696
- "how-to-guide": "/docs/apache-airflow-providers-amazon/operators/transfer/s3_to_ftp.rst",
701
+ "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/s3_to_ftp.rst",
697
702
  "python-module": "airflow.providers.amazon.aws.transfers.s3_to_ftp",
698
703
  },
699
704
  {
@@ -704,31 +709,31 @@ def get_provider_info():
704
709
  {
705
710
  "source-integration-name": "File Transfer Protocol (FTP)",
706
711
  "target-integration-name": "Amazon Simple Storage Service (S3)",
707
- "how-to-guide": "/docs/apache-airflow-providers-amazon/operators/transfer/ftp_to_s3.rst",
712
+ "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/ftp_to_s3.rst",
708
713
  "python-module": "airflow.providers.amazon.aws.transfers.ftp_to_s3",
709
714
  },
710
715
  {
711
716
  "source-integration-name": "Salesforce",
712
717
  "target-integration-name": "Amazon Simple Storage Service (S3)",
713
- "how-to-guide": "/docs/apache-airflow-providers-amazon/operators/transfer/salesforce_to_s3.rst",
718
+ "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/salesforce_to_s3.rst",
714
719
  "python-module": "airflow.providers.amazon.aws.transfers.salesforce_to_s3",
715
720
  },
716
721
  {
717
722
  "source-integration-name": "Local",
718
723
  "target-integration-name": "Amazon Simple Storage Service (S3)",
719
- "how-to-guide": "/docs/apache-airflow-providers-amazon/operators/transfer/local_to_s3.rst",
724
+ "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/local_to_s3.rst",
720
725
  "python-module": "airflow.providers.amazon.aws.transfers.local_to_s3",
721
726
  },
722
727
  {
723
728
  "source-integration-name": "Common SQL",
724
729
  "target-integration-name": "Amazon Simple Storage Service (S3)",
725
- "how-to-guide": "/docs/apache-airflow-providers-amazon/operators/transfer/sql_to_s3.rst",
730
+ "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/sql_to_s3.rst",
726
731
  "python-module": "airflow.providers.amazon.aws.transfers.sql_to_s3",
727
732
  },
728
733
  {
729
734
  "source-integration-name": "Amazon Simple Storage Service (S3)",
730
735
  "target-integration-name": "Common SQL",
731
- "how-to-guide": "/docs/apache-airflow-providers-amazon/operators/transfer/s3_to_sql.rst",
736
+ "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/s3_to_sql.rst",
732
737
  "python-module": "airflow.providers.amazon.aws.transfers.s3_to_sql",
733
738
  },
734
739
  {
@@ -1,13 +1,13 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 7.4.1rc1
3
+ Version: 8.0.0rc2
4
4
  Summary: Provider for Apache Airflow. Implements apache-airflow-providers-amazon package
5
5
  Home-page: https://airflow.apache.org/
6
6
  Download-URL: https://archive.apache.org/dist/airflow/providers
7
7
  Author: Apache Software Foundation
8
8
  Author-email: dev@airflow.apache.org
9
9
  License: Apache License 2.0
10
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/7.4.1/
10
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.0.0/
11
11
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
12
12
  Project-URL: Source Code, https://github.com/apache/airflow
13
13
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
@@ -30,6 +30,7 @@ Requires-Python: ~=3.7
30
30
  Description-Content-Type: text/x-rst
31
31
  License-File: LICENSE
32
32
  License-File: NOTICE
33
+ Requires-Dist: aiobotocore[boto3] (>=2.2.0)
33
34
  Requires-Dist: apache-airflow-providers-common-sql (>=1.3.1.dev0)
34
35
  Requires-Dist: apache-airflow (>=2.3.0.dev0)
35
36
  Requires-Dist: asgiref
@@ -87,7 +88,7 @@ Requires-Dist: apache-airflow-providers-ssh ; extra == 'ssh'
87
88
 
88
89
  Package ``apache-airflow-providers-amazon``
89
90
 
90
- Release: ``7.4.1rc1``
91
+ Release: ``8.0.0rc2``
91
92
 
92
93
 
93
94
  Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
@@ -100,7 +101,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
100
101
  are in ``airflow.providers.amazon`` python package.
101
102
 
102
103
  You can find package information and changelog for the provider
103
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/7.4.1/>`_.
104
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.0.0/>`_.
104
105
 
105
106
 
106
107
  Installation
@@ -129,6 +130,7 @@ PIP package Version required
129
130
  ``mypy-boto3-rds`` ``>=1.24.0``
130
131
  ``mypy-boto3-redshift-data`` ``>=1.24.0``
131
132
  ``mypy-boto3-appflow`` ``>=1.24.0``
133
+ ``aiobotocore[boto3]`` ``>=2.2.0``
132
134
  ======================================= ==================
133
135
 
134
136
  Cross provider package dependencies
@@ -185,6 +187,81 @@ Dependent package
185
187
  Changelog
186
188
  ---------
187
189
 
190
+ 8.0.0
191
+ ......
192
+
193
+ Breaking changes
194
+ ~~~~~~~~~~~~~~~~
195
+
196
+ .. warning::
197
+ In this version of the provider, deprecated GCS hook's parameter ``delegate_to`` is removed from the following operators: ``GCSToS3Operator``, ``GlacierToGCSOperator`` and ``GoogleApiToS3Operator``.
198
+ Impersonation can be achieved instead by utilizing the ``impersonation_chain`` param.
199
+
200
+ Removed deprecated parameter ``google_cloud_storage_conn_id`` from ``GCSToS3Operator``, ``gcp_conn_id`` should be used instead.
201
+
202
+ Removed deprecated parameter ``max_tries`` from the Athena & EMR hook & operators in favor of ``max_polling_attempts``.
203
+
204
+ Removed deprecated method ``waiter`` from emr hook in favor of the more generic ``airflow.providers.amazon.aws.utils.waiter.waiter``
205
+
206
+ Removed deprecated unused parameter ``cluster_identifier`` from Redshift Cluster's hook method ``get_cluster_snapshot_status``
207
+
208
+ Removed deprecated method ``find_processing_job_by_name`` from Sagemaker hook, use ``count_processing_jobs_by_name`` instead.
209
+
210
+ Removed deprecated module ``airflow.providers.amazon.aws.operators.aws_lambda`` in favor of ``airflow.providers.amazon.aws.operators.lambda_function``
211
+
212
+ Removed EcsOperator in favor of EcsRunTaskOperator.
213
+ EcsTaskLogFetcher and EcsProtocol should be imported from the hook.
214
+
215
+ Removed AwsLambdaInvokeFunctionOperator in favor of LambdaInvokeFunctionOperator.
216
+
217
+ Removed deprecated param ``await_result`` from RedshiftDataOperator in favor of ``wait_for_completion``.
218
+ Some methods from this operator should be imported from the hook instead.
219
+
220
+ Removed deprecated ``RedshiftSQLOperator`` in favor of the generic ``SQLExecuteQueryOperator``.
221
+ The parameter that was passed as ``redshift_conn_id`` needs to be changed to ``conn_id``, and the behavior should stay the same.
222
+
223
+ Removed deprecated method ``get_conn_uri`` from secrets manager in favor of ``get_conn_value``
224
+ Also removed deprecated method ``get_conn_uri`` from systems manager. ``deserialize_connection(...).get_uri()`` should be used instead.
225
+
226
+ Removed deprecated and unused param ``s3_conn_id`` from ``ImapAttachmentToS3Operator``, ``MongoToS3Operator`` and ``S3ToSFTPOperator``.
227
+
228
+ * ``remove delegate_to from GCP operators and hooks (#30748)``
229
+ * ``Remove deprecated code from Amazon provider (#30755)``
230
+
231
+ Features
232
+ ~~~~~~~~
233
+
234
+ * ``add a stop operator to emr serverless (#30720)``
235
+ * ``SqlToS3Operator - Add feature to partition SQL table (#30460)``
236
+ * ``New AWS sensor — DynamoDBValueSensor (#28338)``
237
+ * ``Add a "force" option to emr serverless stop/delete operator (#30757)``
238
+ * ``Add support for deferrable operators in AMPP (#30032)``
239
+
240
+ Bug Fixes
241
+ ~~~~~~~~~
242
+
243
+ * ``Fixed logging issue (#30703)``
244
+ * ``DynamoDBHook - waiter_path() to consider 'resource_type' or 'client_type' (#30595)``
245
+ * ``Add ability to override waiter delay in EcsRunTaskOperator (#30586)``
246
+ * ``Add support in AWS Batch Operator for multinode jobs (#29522)``
247
+ * ``AWS logs. Exit fast when 3 consecutive responses are returned from AWS Cloudwatch logs (#30756)``
248
+
249
+ Misc
250
+ ~~~~
251
+
252
+ * ``Remove @poke_mode_only from EmrStepSensor (#30774)``
253
+ * ``Organize Amazon providers docs index (#30541)``
254
+ * ``Remove duplicate param docstring in EksPodOperator (#30634)``
255
+ * ``Update AWS EMR Cluster Link to use the new dashboard (#30844)``
256
+
257
+ .. Below changes are excluded from the changelog. Move them to
258
+ appropriate section above if needed. Do not delete the lines(!):
259
+ * ``Decouple "job runner" from BaseJob ORM model (#30255)``
260
+ * ``Upgrade ruff to 0.0.262 (#30809)``
261
+ * ``fixes to system tests following obsolete cleanup (#30804)``
262
+ * ``restore fallback to empty connection behavior (#30806)``
263
+ * ``Prepare docs for adhoc release of providers (#30787)``
264
+
188
265
  7.4.1
189
266
  .....
190
267