apache-airflow-providers-amazon 8.12.0__py3-none-any.whl → 8.13.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/amazon/__init__.py +3 -3
- airflow/providers/amazon/aws/auth_manager/avp/__init__.py +16 -0
- airflow/providers/amazon/aws/auth_manager/avp/entities.py +64 -0
- airflow/providers/amazon/aws/auth_manager/avp/facade.py +126 -0
- airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +47 -6
- airflow/providers/amazon/aws/auth_manager/constants.py +3 -1
- airflow/providers/amazon/aws/auth_manager/user.py +3 -0
- airflow/providers/amazon/aws/fs/s3.py +6 -6
- airflow/providers/amazon/aws/hooks/athena.py +10 -17
- airflow/providers/amazon/aws/hooks/ec2.py +10 -5
- airflow/providers/amazon/aws/hooks/emr.py +6 -13
- airflow/providers/amazon/aws/hooks/redshift_sql.py +41 -18
- airflow/providers/amazon/aws/hooks/s3.py +3 -3
- airflow/providers/amazon/aws/hooks/verified_permissions.py +44 -0
- airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +3 -3
- airflow/providers/amazon/aws/notifications/chime.py +1 -7
- airflow/providers/amazon/aws/notifications/sns.py +1 -8
- airflow/providers/amazon/aws/notifications/sqs.py +1 -8
- airflow/providers/amazon/aws/operators/eks.py +22 -8
- airflow/providers/amazon/aws/operators/redshift_cluster.py +29 -0
- airflow/providers/amazon/aws/sensors/batch.py +1 -1
- airflow/providers/amazon/aws/sensors/dynamodb.py +6 -5
- airflow/providers/amazon/aws/sensors/emr.py +1 -1
- airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py +4 -4
- airflow/providers/amazon/aws/transfers/glacier_to_gcs.py +1 -1
- airflow/providers/amazon/aws/transfers/redshift_to_s3.py +4 -1
- airflow/providers/amazon/aws/triggers/eks.py +30 -6
- airflow/providers/amazon/aws/triggers/emr.py +7 -3
- airflow/providers/amazon/aws/triggers/rds.py +5 -1
- airflow/providers/amazon/get_provider_info.py +28 -3
- {apache_airflow_providers_amazon-8.12.0.dist-info → apache_airflow_providers_amazon-8.13.0rc1.dist-info}/METADATA +10 -10
- {apache_airflow_providers_amazon-8.12.0.dist-info → apache_airflow_providers_amazon-8.13.0rc1.dist-info}/RECORD +34 -30
- {apache_airflow_providers_amazon-8.12.0.dist-info → apache_airflow_providers_amazon-8.13.0rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_amazon-8.12.0.dist-info → apache_airflow_providers_amazon-8.13.0rc1.dist-info}/entry_points.txt +0 -0
@@ -20,18 +20,12 @@ from __future__ import annotations
|
|
20
20
|
from functools import cached_property
|
21
21
|
from typing import TYPE_CHECKING
|
22
22
|
|
23
|
-
from airflow.exceptions import AirflowOptionalProviderFeatureException
|
24
23
|
from airflow.providers.amazon.aws.hooks.chime import ChimeWebhookHook
|
25
24
|
|
26
25
|
if TYPE_CHECKING:
|
27
26
|
from airflow.utils.context import Context
|
28
27
|
|
29
|
-
|
30
|
-
from airflow.notifications.basenotifier import BaseNotifier
|
31
|
-
except ImportError:
|
32
|
-
raise AirflowOptionalProviderFeatureException(
|
33
|
-
"Failed to import BaseNotifier. This feature is only available in Airflow versions >= 2.6.0"
|
34
|
-
)
|
28
|
+
from airflow.notifications.basenotifier import BaseNotifier
|
35
29
|
|
36
30
|
|
37
31
|
class ChimeNotifier(BaseNotifier):
|
@@ -20,16 +20,9 @@ from __future__ import annotations
|
|
20
20
|
from functools import cached_property
|
21
21
|
from typing import Sequence
|
22
22
|
|
23
|
-
from airflow.
|
23
|
+
from airflow.notifications.basenotifier import BaseNotifier
|
24
24
|
from airflow.providers.amazon.aws.hooks.sns import SnsHook
|
25
25
|
|
26
|
-
try:
|
27
|
-
from airflow.notifications.basenotifier import BaseNotifier
|
28
|
-
except ImportError:
|
29
|
-
raise AirflowOptionalProviderFeatureException(
|
30
|
-
"Failed to import BaseNotifier. This feature is only available in Airflow versions >= 2.6.0"
|
31
|
-
)
|
32
|
-
|
33
26
|
|
34
27
|
class SnsNotifier(BaseNotifier):
|
35
28
|
"""
|
@@ -20,16 +20,9 @@ from __future__ import annotations
|
|
20
20
|
from functools import cached_property
|
21
21
|
from typing import Sequence
|
22
22
|
|
23
|
-
from airflow.
|
23
|
+
from airflow.notifications.basenotifier import BaseNotifier
|
24
24
|
from airflow.providers.amazon.aws.hooks.sqs import SqsHook
|
25
25
|
|
26
|
-
try:
|
27
|
-
from airflow.notifications.basenotifier import BaseNotifier
|
28
|
-
except ImportError:
|
29
|
-
raise AirflowOptionalProviderFeatureException(
|
30
|
-
"Failed to import BaseNotifier. This feature is only available in Airflow versions >= 2.6.0"
|
31
|
-
)
|
32
|
-
|
33
26
|
|
34
27
|
class SqsNotifier(BaseNotifier):
|
35
28
|
"""
|
@@ -21,6 +21,7 @@ import logging
|
|
21
21
|
import warnings
|
22
22
|
from ast import literal_eval
|
23
23
|
from datetime import timedelta
|
24
|
+
from functools import cached_property
|
24
25
|
from typing import TYPE_CHECKING, Any, List, Sequence, cast
|
25
26
|
|
26
27
|
from botocore.exceptions import ClientError, WaiterError
|
@@ -257,6 +258,20 @@ class EksCreateClusterOperator(BaseOperator):
|
|
257
258
|
**kwargs,
|
258
259
|
)
|
259
260
|
|
261
|
+
@cached_property
|
262
|
+
def hook(self) -> EksHook:
|
263
|
+
return EksHook(aws_conn_id=self.aws_conn_id, region_name=self.region)
|
264
|
+
|
265
|
+
@property
|
266
|
+
def eks_hook(self):
|
267
|
+
warnings.warn(
|
268
|
+
"`eks_hook` property is deprecated and will be removed in the future. "
|
269
|
+
"Please use `hook` property instead.",
|
270
|
+
AirflowProviderDeprecationWarning,
|
271
|
+
stacklevel=2,
|
272
|
+
)
|
273
|
+
return self.hook
|
274
|
+
|
260
275
|
def execute(self, context: Context):
|
261
276
|
if self.compute:
|
262
277
|
if self.compute not in SUPPORTED_COMPUTE_VALUES:
|
@@ -271,8 +286,7 @@ class EksCreateClusterOperator(BaseOperator):
|
|
271
286
|
compute=FARGATE_FULL_NAME, requirement="fargate_pod_execution_role_arn"
|
272
287
|
)
|
273
288
|
)
|
274
|
-
self.
|
275
|
-
self.eks_hook.create_cluster(
|
289
|
+
self.hook.create_cluster(
|
276
290
|
name=self.cluster_name,
|
277
291
|
roleArn=self.cluster_role_arn,
|
278
292
|
resourcesVpcConfig=self.resources_vpc_config,
|
@@ -285,7 +299,7 @@ class EksCreateClusterOperator(BaseOperator):
|
|
285
299
|
return None
|
286
300
|
|
287
301
|
self.log.info("Waiting for EKS Cluster to provision. This will take some time.")
|
288
|
-
client = self.
|
302
|
+
client = self.hook.conn
|
289
303
|
|
290
304
|
if self.deferrable:
|
291
305
|
self.defer(
|
@@ -307,7 +321,7 @@ class EksCreateClusterOperator(BaseOperator):
|
|
307
321
|
)
|
308
322
|
except (ClientError, WaiterError) as e:
|
309
323
|
self.log.error("Cluster failed to start and will be torn down.\n %s", e)
|
310
|
-
self.
|
324
|
+
self.hook.delete_cluster(name=self.cluster_name)
|
311
325
|
client.get_waiter("cluster_deleted").wait(
|
312
326
|
name=self.cluster_name,
|
313
327
|
WaiterConfig={"Delay": self.waiter_delay, "MaxAttempts": self.waiter_max_attempts},
|
@@ -337,7 +351,7 @@ class EksCreateClusterOperator(BaseOperator):
|
|
337
351
|
raise AirflowException("Trigger error: event is None")
|
338
352
|
elif event["status"] == "failed":
|
339
353
|
self.log.error("Cluster failed to start and will be torn down.")
|
340
|
-
self.
|
354
|
+
self.hook.delete_cluster(name=self.cluster_name)
|
341
355
|
self.defer(
|
342
356
|
trigger=EksDeleteClusterTrigger(
|
343
357
|
cluster_name=self.cluster_name,
|
@@ -382,7 +396,7 @@ class EksCreateClusterOperator(BaseOperator):
|
|
382
396
|
method_name="execute_complete",
|
383
397
|
timeout=timedelta(seconds=self.waiter_max_attempts * self.waiter_delay),
|
384
398
|
)
|
385
|
-
|
399
|
+
elif self.compute == "nodegroup":
|
386
400
|
self.defer(
|
387
401
|
trigger=EksCreateNodegroupTrigger(
|
388
402
|
nodegroup_name=self.nodegroup_name,
|
@@ -400,9 +414,9 @@ class EksCreateClusterOperator(BaseOperator):
|
|
400
414
|
if event is None:
|
401
415
|
self.log.info("Trigger error: event is None")
|
402
416
|
raise AirflowException("Trigger error: event is None")
|
403
|
-
elif event["status"] == "
|
417
|
+
elif event["status"] == "deleted":
|
404
418
|
self.log.info("Cluster deleted")
|
405
|
-
|
419
|
+
raise AirflowException("Error creating cluster")
|
406
420
|
|
407
421
|
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
|
408
422
|
resource = "fargate profile" if self.compute == "fargate" else self.compute
|
@@ -103,8 +103,37 @@ class RedshiftCreateClusterOperator(BaseOperator):
|
|
103
103
|
"cluster_identifier",
|
104
104
|
"cluster_type",
|
105
105
|
"node_type",
|
106
|
+
"master_username",
|
107
|
+
"master_user_password",
|
108
|
+
"cluster_type",
|
109
|
+
"db_name",
|
106
110
|
"number_of_nodes",
|
111
|
+
"cluster_security_groups",
|
107
112
|
"vpc_security_group_ids",
|
113
|
+
"cluster_subnet_group_name",
|
114
|
+
"availability_zone",
|
115
|
+
"preferred_maintenance_window",
|
116
|
+
"cluster_parameter_group_name",
|
117
|
+
"automated_snapshot_retention_period",
|
118
|
+
"manual_snapshot_retention_period",
|
119
|
+
"port",
|
120
|
+
"cluster_version",
|
121
|
+
"allow_version_upgrade",
|
122
|
+
"publicly_accessible",
|
123
|
+
"encrypted",
|
124
|
+
"hsm_client_certificate_identifier",
|
125
|
+
"hsm_configuration_identifier",
|
126
|
+
"elastic_ip",
|
127
|
+
"tags",
|
128
|
+
"kms_key_id",
|
129
|
+
"enhanced_vpc_routing",
|
130
|
+
"additional_info",
|
131
|
+
"iam_roles",
|
132
|
+
"maintenance_track_name",
|
133
|
+
"snapshot_schedule_identifier",
|
134
|
+
"availability_zone_relocation",
|
135
|
+
"aqua_configuration_status",
|
136
|
+
"default_iam_role_arn",
|
108
137
|
)
|
109
138
|
ui_color = "#eeaa11"
|
110
139
|
ui_fgcolor = "#ffffff"
|
@@ -125,7 +125,7 @@ class BatchSensor(BaseSensorOperator):
|
|
125
125
|
if event["status"] != "success":
|
126
126
|
message = f"Error while running job: {event}"
|
127
127
|
# TODO: remove this if-else block when min_airflow_version is set to higher than the version that
|
128
|
-
# changed in https://github.com/apache/airflow/pull/33424 is released
|
128
|
+
# changed in https://github.com/apache/airflow/pull/33424 is released (2.7.1)
|
129
129
|
if self.soft_fail:
|
130
130
|
raise AirflowSkipException(message)
|
131
131
|
raise AirflowException(message)
|
@@ -17,7 +17,7 @@
|
|
17
17
|
from __future__ import annotations
|
18
18
|
|
19
19
|
from functools import cached_property
|
20
|
-
from typing import TYPE_CHECKING, Any, Sequence
|
20
|
+
from typing import TYPE_CHECKING, Any, Iterable, Sequence
|
21
21
|
|
22
22
|
from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
|
23
23
|
from airflow.sensors.base import BaseSensorOperator
|
@@ -61,7 +61,7 @@ class DynamoDBValueSensor(BaseSensorOperator):
|
|
61
61
|
partition_key_name: str,
|
62
62
|
partition_key_value: str,
|
63
63
|
attribute_name: str,
|
64
|
-
attribute_value: str,
|
64
|
+
attribute_value: str | Iterable[str],
|
65
65
|
sort_key_name: str | None = None,
|
66
66
|
sort_key_value: str | None = None,
|
67
67
|
aws_conn_id: str | None = DynamoDBHook.default_conn_name,
|
@@ -99,12 +99,13 @@ class DynamoDBValueSensor(BaseSensorOperator):
|
|
99
99
|
self.log.info("Key: %s", key)
|
100
100
|
response = table.get_item(Key=key)
|
101
101
|
try:
|
102
|
+
item_attribute_value = response["Item"][self.attribute_name]
|
102
103
|
self.log.info("Response: %s", response)
|
103
104
|
self.log.info("Want: %s = %s", self.attribute_name, self.attribute_value)
|
104
|
-
self.log.info(
|
105
|
-
|
105
|
+
self.log.info("Got: {response['Item'][self.attribute_name]} = %s", item_attribute_value)
|
106
|
+
return item_attribute_value in (
|
107
|
+
[self.attribute_value] if isinstance(self.attribute_value, str) else self.attribute_value
|
106
108
|
)
|
107
|
-
return response["Item"][self.attribute_name] == self.attribute_value
|
108
109
|
except KeyError:
|
109
110
|
return False
|
110
111
|
|
@@ -343,7 +343,7 @@ class EmrContainerSensor(BaseSensorOperator):
|
|
343
343
|
raise AirflowSkipException(message)
|
344
344
|
raise AirflowException(message)
|
345
345
|
else:
|
346
|
-
self.log.info(
|
346
|
+
self.log.info("Job completed.")
|
347
347
|
|
348
348
|
|
349
349
|
class EmrNotebookExecutionSensor(EmrBaseSensor):
|
@@ -90,8 +90,8 @@ class AzureBlobStorageToS3Operator(BaseOperator):
|
|
90
90
|
dest_s3_extra_args: dict | None = None,
|
91
91
|
replace: bool = False,
|
92
92
|
s3_acl_policy: str | None = None,
|
93
|
-
wasb_extra_args: dict =
|
94
|
-
s3_extra_args: dict =
|
93
|
+
wasb_extra_args: dict | None = None,
|
94
|
+
s3_extra_args: dict | None = None,
|
95
95
|
**kwargs,
|
96
96
|
) -> None:
|
97
97
|
super().__init__(**kwargs)
|
@@ -106,8 +106,8 @@ class AzureBlobStorageToS3Operator(BaseOperator):
|
|
106
106
|
self.dest_s3_extra_args = dest_s3_extra_args or {}
|
107
107
|
self.replace = replace
|
108
108
|
self.s3_acl_policy = s3_acl_policy
|
109
|
-
self.wasb_extra_args = wasb_extra_args
|
110
|
-
self.s3_extra_args = s3_extra_args
|
109
|
+
self.wasb_extra_args = wasb_extra_args or {}
|
110
|
+
self.s3_extra_args = s3_extra_args or {}
|
111
111
|
|
112
112
|
def execute(self, context: Context) -> list[str]:
|
113
113
|
# list all files in the Azure Blob Storage container
|
@@ -98,7 +98,7 @@ class GlacierToGCSOperator(BaseOperator):
|
|
98
98
|
# Read the file content in chunks using StreamingBody
|
99
99
|
# https://botocore.amazonaws.com/v1/documentation/api/latest/reference/response.html
|
100
100
|
stream = glacier_data["body"]
|
101
|
-
for chunk in stream.
|
101
|
+
for chunk in stream.iter_chunks(chunk_size=self.chunk_size):
|
102
102
|
temp_file.write(chunk)
|
103
103
|
temp_file.flush()
|
104
104
|
gcs_hook.upload(
|
@@ -18,6 +18,7 @@
|
|
18
18
|
"""Transfers data from AWS Redshift into a S3 Bucket."""
|
19
19
|
from __future__ import annotations
|
20
20
|
|
21
|
+
import re
|
21
22
|
from typing import TYPE_CHECKING, Iterable, Mapping, Sequence
|
22
23
|
|
23
24
|
from airflow.exceptions import AirflowException
|
@@ -141,8 +142,10 @@ class RedshiftToS3Operator(BaseOperator):
|
|
141
142
|
def _build_unload_query(
|
142
143
|
self, credentials_block: str, select_query: str, s3_key: str, unload_options: str
|
143
144
|
) -> str:
|
145
|
+
# Un-escape already escaped queries
|
146
|
+
select_query = re.sub(r"''(.+)''", r"'\1'", select_query)
|
144
147
|
return f"""
|
145
|
-
UNLOAD (
|
148
|
+
UNLOAD ($${select_query}$$)
|
146
149
|
TO 's3://{self.s3_bucket}/{s3_key}'
|
147
150
|
credentials
|
148
151
|
'{credentials_block}'
|
@@ -19,7 +19,9 @@ from __future__ import annotations
|
|
19
19
|
import warnings
|
20
20
|
from typing import TYPE_CHECKING, Any
|
21
21
|
|
22
|
-
from
|
22
|
+
from botocore.exceptions import ClientError
|
23
|
+
|
24
|
+
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
|
23
25
|
from airflow.providers.amazon.aws.hooks.eks import EksHook
|
24
26
|
from airflow.providers.amazon.aws.triggers.base import AwsBaseWaiterTrigger
|
25
27
|
from airflow.providers.amazon.aws.utils.waiter_with_logging import async_wait
|
@@ -68,6 +70,25 @@ class EksCreateClusterTrigger(AwsBaseWaiterTrigger):
|
|
68
70
|
def hook(self) -> AwsGenericHook:
|
69
71
|
return EksHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
|
70
72
|
|
73
|
+
async def run(self):
|
74
|
+
async with self.hook().async_conn as client:
|
75
|
+
waiter = client.get_waiter(self.waiter_name)
|
76
|
+
try:
|
77
|
+
await async_wait(
|
78
|
+
waiter,
|
79
|
+
self.waiter_delay,
|
80
|
+
self.attempts,
|
81
|
+
self.waiter_args,
|
82
|
+
self.failure_message,
|
83
|
+
self.status_message,
|
84
|
+
self.status_queries,
|
85
|
+
)
|
86
|
+
except AirflowException as exception:
|
87
|
+
self.log.error("Error creating cluster: %s", exception)
|
88
|
+
yield TriggerEvent({"status": "failed"})
|
89
|
+
else:
|
90
|
+
yield TriggerEvent({"status": "success"})
|
91
|
+
|
71
92
|
|
72
93
|
class EksDeleteClusterTrigger(AwsBaseWaiterTrigger):
|
73
94
|
"""
|
@@ -120,12 +141,18 @@ class EksDeleteClusterTrigger(AwsBaseWaiterTrigger):
|
|
120
141
|
return EksHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
|
121
142
|
|
122
143
|
async def run(self):
|
123
|
-
async with self.hook.async_conn as client:
|
144
|
+
async with self.hook().async_conn as client:
|
124
145
|
waiter = client.get_waiter("cluster_deleted")
|
125
146
|
if self.force_delete_compute:
|
126
147
|
await self.delete_any_nodegroups(client=client)
|
127
148
|
await self.delete_any_fargate_profiles(client=client)
|
149
|
+
try:
|
128
150
|
await client.delete_cluster(name=self.cluster_name)
|
151
|
+
except ClientError as ex:
|
152
|
+
if ex.response.get("Error").get("Code") == "ResourceNotFoundException":
|
153
|
+
pass
|
154
|
+
else:
|
155
|
+
raise
|
129
156
|
await async_wait(
|
130
157
|
waiter=waiter,
|
131
158
|
waiter_delay=int(self.waiter_delay),
|
@@ -148,10 +175,7 @@ class EksDeleteClusterTrigger(AwsBaseWaiterTrigger):
|
|
148
175
|
nodegroups = await client.list_nodegroups(clusterName=self.cluster_name)
|
149
176
|
if nodegroups.get("nodegroups", None):
|
150
177
|
self.log.info("Deleting nodegroups")
|
151
|
-
|
152
|
-
waiter = self.hook.get_waiter( # type: ignore[attr-defined]
|
153
|
-
"all_nodegroups_deleted", deferrable=True, client=client
|
154
|
-
)
|
178
|
+
waiter = self.hook().get_waiter("all_nodegroups_deleted", deferrable=True, client=client)
|
155
179
|
for group in nodegroups["nodegroups"]:
|
156
180
|
await client.delete_nodegroup(clusterName=self.cluster_name, nodegroupName=group)
|
157
181
|
await async_wait(
|
@@ -444,8 +444,7 @@ class EmrServerlessCancelJobsTrigger(AwsBaseWaiterTrigger):
|
|
444
444
|
waiter_delay: int,
|
445
445
|
waiter_max_attempts: int,
|
446
446
|
) -> None:
|
447
|
-
|
448
|
-
states = list(self.hook_instance.JOB_INTERMEDIATE_STATES.union({"CANCELLING"}))
|
447
|
+
states = list(EmrServerlessHook.JOB_INTERMEDIATE_STATES.union({"CANCELLING"}))
|
449
448
|
super().__init__(
|
450
449
|
serialized_fields={"application_id": application_id},
|
451
450
|
waiter_name="no_job_running",
|
@@ -461,4 +460,9 @@ class EmrServerlessCancelJobsTrigger(AwsBaseWaiterTrigger):
|
|
461
460
|
)
|
462
461
|
|
463
462
|
def hook(self) -> AwsGenericHook:
|
464
|
-
return self.
|
463
|
+
return EmrServerlessHook(self.aws_conn_id)
|
464
|
+
|
465
|
+
@property
|
466
|
+
def hook_instance(self) -> AwsGenericHook:
|
467
|
+
"""This property is added for backward compatibility."""
|
468
|
+
return self.hook()
|
@@ -17,6 +17,7 @@
|
|
17
17
|
from __future__ import annotations
|
18
18
|
|
19
19
|
import warnings
|
20
|
+
from functools import cached_property
|
20
21
|
from typing import TYPE_CHECKING, Any
|
21
22
|
|
22
23
|
from airflow.exceptions import AirflowProviderDeprecationWarning
|
@@ -83,8 +84,11 @@ class RdsDbInstanceTrigger(BaseTrigger):
|
|
83
84
|
},
|
84
85
|
)
|
85
86
|
|
87
|
+
@cached_property
|
88
|
+
def hook(self) -> RdsHook:
|
89
|
+
return RdsHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
|
90
|
+
|
86
91
|
async def run(self):
|
87
|
-
self.hook = RdsHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
|
88
92
|
async with self.hook.async_conn as client:
|
89
93
|
waiter = client.get_waiter(self.waiter_name)
|
90
94
|
await async_wait(
|
@@ -28,8 +28,9 @@ def get_provider_info():
|
|
28
28
|
"name": "Amazon",
|
29
29
|
"description": "Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).\n",
|
30
30
|
"suspended": False,
|
31
|
-
"source-date-epoch":
|
31
|
+
"source-date-epoch": 1701983347,
|
32
32
|
"versions": [
|
33
|
+
"8.13.0",
|
33
34
|
"8.12.0",
|
34
35
|
"8.11.0",
|
35
36
|
"8.10.0",
|
@@ -79,7 +80,7 @@ def get_provider_info():
|
|
79
80
|
"1.0.0",
|
80
81
|
],
|
81
82
|
"dependencies": [
|
82
|
-
"apache-airflow>=2.
|
83
|
+
"apache-airflow>=2.6.0",
|
83
84
|
"apache-airflow-providers-common-sql>=1.3.1",
|
84
85
|
"apache-airflow-providers-http",
|
85
86
|
"boto3>=1.28.0",
|
@@ -351,6 +352,12 @@ def get_provider_info():
|
|
351
352
|
"logo": "/integration-logos/aws/AWS-Glue-DataBrew_64.png",
|
352
353
|
"tags": ["aws"],
|
353
354
|
},
|
355
|
+
{
|
356
|
+
"integration-name": "Amazon Verified Permissions",
|
357
|
+
"external-doc-url": "https://aws.amazon.com/verified-permissions/",
|
358
|
+
"logo": "/integration-logos/aws/Amazon-Verified-Permissions.png",
|
359
|
+
"tags": ["aws"],
|
360
|
+
},
|
354
361
|
],
|
355
362
|
"operators": [
|
356
363
|
{
|
@@ -691,6 +698,10 @@ def get_provider_info():
|
|
691
698
|
"integration-name": "AWS Glue DataBrew",
|
692
699
|
"python-modules": ["airflow.providers.amazon.aws.hooks.glue_databrew"],
|
693
700
|
},
|
701
|
+
{
|
702
|
+
"integration-name": "Amazon Verified Permissions",
|
703
|
+
"python-modules": ["airflow.providers.amazon.aws.hooks.verified_permissions"],
|
704
|
+
},
|
694
705
|
],
|
695
706
|
"triggers": [
|
696
707
|
{
|
@@ -928,7 +939,7 @@ def get_provider_info():
|
|
928
939
|
{"name": "pandas", "dependencies": ["pandas>=0.17.1"]},
|
929
940
|
{"name": "aiobotocore", "dependencies": ["aiobotocore[boto3]>=2.5.3"]},
|
930
941
|
{"name": "cncf.kubernetes", "dependencies": ["apache-airflow-providers-cncf-kubernetes>=7.2.0"]},
|
931
|
-
{"name": "s3fs", "dependencies": ["s3fs>=2023.
|
942
|
+
{"name": "s3fs", "dependencies": ["s3fs>=2023.10.0"]},
|
932
943
|
{"name": "python3-saml", "dependencies": ["python3-saml>=1.16.0"]},
|
933
944
|
],
|
934
945
|
"config": {
|
@@ -1057,6 +1068,13 @@ def get_provider_info():
|
|
1057
1068
|
"example": "True",
|
1058
1069
|
"default": "False",
|
1059
1070
|
},
|
1071
|
+
"conn_id": {
|
1072
|
+
"description": "The Airflow connection (i.e. credentials) used by the AWS auth manager to make API calls to AWS\nIdentity Center and Amazon Verified Permissions.\n",
|
1073
|
+
"version_added": "8.12.0",
|
1074
|
+
"type": "string",
|
1075
|
+
"example": "aws_default",
|
1076
|
+
"default": "aws_default",
|
1077
|
+
},
|
1060
1078
|
"saml_metadata_url": {
|
1061
1079
|
"description": "SAML metadata XML file provided by AWS Identity Center.\nThis URL can be found in the AWS Identity Center console. Required.\n",
|
1062
1080
|
"version_added": "8.12.0",
|
@@ -1064,6 +1082,13 @@ def get_provider_info():
|
|
1064
1082
|
"example": "https://portal.sso.<region>.amazonaws.com/saml/metadata/XXXXXXXXXX",
|
1065
1083
|
"default": None,
|
1066
1084
|
},
|
1085
|
+
"avp_policy_store_id": {
|
1086
|
+
"description": "Amazon Verified Permissions' policy store ID where all the policies defining user permissions\nin Airflow are stored. Required.\n",
|
1087
|
+
"version_added": "8.12.0",
|
1088
|
+
"type": "string",
|
1089
|
+
"example": None,
|
1090
|
+
"default": None,
|
1091
|
+
},
|
1067
1092
|
},
|
1068
1093
|
},
|
1069
1094
|
},
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: apache-airflow-providers-amazon
|
3
|
-
Version: 8.
|
3
|
+
Version: 8.13.0rc1
|
4
4
|
Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
|
5
5
|
Keywords: airflow-provider,amazon,airflow,integration
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
@@ -20,9 +20,9 @@ Classifier: Programming Language :: Python :: 3.9
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.10
|
21
21
|
Classifier: Programming Language :: Python :: 3.11
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
23
|
-
Requires-Dist: apache-airflow-providers-common-sql>=1.3.1
|
23
|
+
Requires-Dist: apache-airflow-providers-common-sql>=1.3.1.dev0
|
24
24
|
Requires-Dist: apache-airflow-providers-http
|
25
|
-
Requires-Dist: apache-airflow>=2.
|
25
|
+
Requires-Dist: apache-airflow>=2.6.0.dev0
|
26
26
|
Requires-Dist: asgiref
|
27
27
|
Requires-Dist: boto3>=1.28.0
|
28
28
|
Requires-Dist: botocore>=1.31.0
|
@@ -45,12 +45,12 @@ Requires-Dist: apache-airflow-providers-mongo ; extra == "mongo"
|
|
45
45
|
Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
|
46
46
|
Requires-Dist: pandas>=0.17.1 ; extra == "pandas"
|
47
47
|
Requires-Dist: python3-saml>=1.16.0 ; extra == "python3-saml"
|
48
|
-
Requires-Dist: s3fs>=2023.
|
48
|
+
Requires-Dist: s3fs>=2023.10.0 ; extra == "s3fs"
|
49
49
|
Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
|
50
50
|
Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
|
51
51
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
52
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.
|
53
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.
|
52
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.13.0/changelog.html
|
53
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.13.0
|
54
54
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
55
55
|
Project-URL: Source Code, https://github.com/apache/airflow
|
56
56
|
Project-URL: Twitter, https://twitter.com/ApacheAirflow
|
@@ -117,7 +117,7 @@ Provides-Extra: ssh
|
|
117
117
|
|
118
118
|
Package ``apache-airflow-providers-amazon``
|
119
119
|
|
120
|
-
Release: ``8.
|
120
|
+
Release: ``8.13.0.rc1``
|
121
121
|
|
122
122
|
|
123
123
|
Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
|
@@ -130,7 +130,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
|
|
130
130
|
are in ``airflow.providers.amazon`` python package.
|
131
131
|
|
132
132
|
You can find package information and changelog for the provider
|
133
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.
|
133
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.13.0/>`_.
|
134
134
|
|
135
135
|
Installation
|
136
136
|
------------
|
@@ -147,7 +147,7 @@ Requirements
|
|
147
147
|
======================================= ==================
|
148
148
|
PIP package Version required
|
149
149
|
======================================= ==================
|
150
|
-
``apache-airflow`` ``>=2.
|
150
|
+
``apache-airflow`` ``>=2.6.0``
|
151
151
|
``apache-airflow-providers-common-sql`` ``>=1.3.1``
|
152
152
|
``apache-airflow-providers-http``
|
153
153
|
``boto3`` ``>=1.28.0``
|
@@ -192,4 +192,4 @@ Dependent package
|
|
192
192
|
====================================================================================================================== ===================
|
193
193
|
|
194
194
|
The changelog for the provider package can be found in the
|
195
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.
|
195
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.13.0/changelog.html>`_.
|