apache-airflow-providers-amazon 9.14.0__py3-none-any.whl → 9.18.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/amazon/__init__.py +3 -3
- airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +106 -5
- airflow/providers/amazon/aws/auth_manager/routes/login.py +7 -1
- airflow/providers/amazon/aws/executors/aws_lambda/docker/app.py +5 -1
- airflow/providers/amazon/aws/executors/aws_lambda/lambda_executor.py +1 -1
- airflow/providers/amazon/aws/hooks/athena.py +6 -2
- airflow/providers/amazon/aws/hooks/athena_sql.py +2 -2
- airflow/providers/amazon/aws/hooks/base_aws.py +2 -2
- airflow/providers/amazon/aws/hooks/batch_client.py +4 -6
- airflow/providers/amazon/aws/hooks/batch_waiters.py +0 -1
- airflow/providers/amazon/aws/hooks/chime.py +1 -1
- airflow/providers/amazon/aws/hooks/datasync.py +3 -3
- airflow/providers/amazon/aws/hooks/firehose.py +56 -0
- airflow/providers/amazon/aws/hooks/glue.py +7 -1
- airflow/providers/amazon/aws/hooks/kinesis.py +31 -13
- airflow/providers/amazon/aws/hooks/mwaa.py +38 -7
- airflow/providers/amazon/aws/hooks/redshift_sql.py +20 -6
- airflow/providers/amazon/aws/hooks/s3.py +41 -11
- airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py +1 -1
- airflow/providers/amazon/aws/hooks/ses.py +76 -10
- airflow/providers/amazon/aws/hooks/sns.py +74 -18
- airflow/providers/amazon/aws/hooks/sqs.py +64 -11
- airflow/providers/amazon/aws/hooks/ssm.py +34 -6
- airflow/providers/amazon/aws/hooks/step_function.py +1 -1
- airflow/providers/amazon/aws/links/base_aws.py +1 -1
- airflow/providers/amazon/aws/notifications/ses.py +139 -0
- airflow/providers/amazon/aws/notifications/sns.py +16 -1
- airflow/providers/amazon/aws/notifications/sqs.py +17 -1
- airflow/providers/amazon/aws/operators/base_aws.py +2 -2
- airflow/providers/amazon/aws/operators/bedrock.py +2 -0
- airflow/providers/amazon/aws/operators/cloud_formation.py +2 -2
- airflow/providers/amazon/aws/operators/datasync.py +2 -1
- airflow/providers/amazon/aws/operators/emr.py +44 -33
- airflow/providers/amazon/aws/operators/mwaa.py +12 -3
- airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py +1 -1
- airflow/providers/amazon/aws/operators/ssm.py +122 -17
- airflow/providers/amazon/aws/secrets/secrets_manager.py +3 -4
- airflow/providers/amazon/aws/sensors/base_aws.py +2 -2
- airflow/providers/amazon/aws/sensors/mwaa.py +14 -1
- airflow/providers/amazon/aws/sensors/s3.py +27 -13
- airflow/providers/amazon/aws/sensors/sagemaker_unified_studio.py +1 -1
- airflow/providers/amazon/aws/sensors/ssm.py +33 -17
- airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py +3 -3
- airflow/providers/amazon/aws/transfers/base.py +5 -5
- airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +4 -4
- airflow/providers/amazon/aws/transfers/exasol_to_s3.py +1 -1
- airflow/providers/amazon/aws/transfers/ftp_to_s3.py +1 -1
- airflow/providers/amazon/aws/transfers/gcs_to_s3.py +48 -5
- airflow/providers/amazon/aws/transfers/glacier_to_gcs.py +1 -1
- airflow/providers/amazon/aws/transfers/google_api_to_s3.py +2 -5
- airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py +1 -1
- airflow/providers/amazon/aws/transfers/http_to_s3.py +1 -1
- airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py +1 -1
- airflow/providers/amazon/aws/transfers/local_to_s3.py +1 -1
- airflow/providers/amazon/aws/transfers/mongo_to_s3.py +1 -1
- airflow/providers/amazon/aws/transfers/redshift_to_s3.py +6 -6
- airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py +1 -1
- airflow/providers/amazon/aws/transfers/s3_to_ftp.py +1 -1
- airflow/providers/amazon/aws/transfers/s3_to_redshift.py +6 -6
- airflow/providers/amazon/aws/transfers/s3_to_sftp.py +1 -1
- airflow/providers/amazon/aws/transfers/s3_to_sql.py +1 -1
- airflow/providers/amazon/aws/transfers/salesforce_to_s3.py +1 -1
- airflow/providers/amazon/aws/transfers/sftp_to_s3.py +1 -1
- airflow/providers/amazon/aws/transfers/sql_to_s3.py +4 -5
- airflow/providers/amazon/aws/triggers/bedrock.py +1 -1
- airflow/providers/amazon/aws/triggers/s3.py +29 -2
- airflow/providers/amazon/aws/triggers/ssm.py +17 -1
- airflow/providers/amazon/aws/utils/connection_wrapper.py +2 -5
- airflow/providers/amazon/aws/utils/mixins.py +1 -1
- airflow/providers/amazon/aws/utils/waiter.py +2 -2
- airflow/providers/amazon/aws/waiters/emr.json +6 -6
- airflow/providers/amazon/get_provider_info.py +19 -1
- airflow/providers/amazon/version_compat.py +19 -16
- {apache_airflow_providers_amazon-9.14.0.dist-info → apache_airflow_providers_amazon-9.18.0rc2.dist-info}/METADATA +25 -19
- {apache_airflow_providers_amazon-9.14.0.dist-info → apache_airflow_providers_amazon-9.18.0rc2.dist-info}/RECORD +79 -76
- apache_airflow_providers_amazon-9.18.0rc2.dist-info/licenses/NOTICE +5 -0
- {apache_airflow_providers_amazon-9.14.0.dist-info → apache_airflow_providers_amazon-9.18.0rc2.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_amazon-9.14.0.dist-info → apache_airflow_providers_amazon-9.18.0rc2.dist-info}/entry_points.txt +0 -0
- {airflow/providers/amazon → apache_airflow_providers_amazon-9.18.0rc2.dist-info/licenses}/LICENSE +0 -0
|
@@ -41,6 +41,11 @@ class S3KeyTrigger(BaseTrigger):
|
|
|
41
41
|
Unix wildcard pattern
|
|
42
42
|
:param aws_conn_id: reference to the s3 connection
|
|
43
43
|
:param use_regex: whether to use regex to check bucket
|
|
44
|
+
:param metadata_keys: List of head_object attributes to gather and send to ``check_fn``.
|
|
45
|
+
Acceptable values: Any top level attribute returned by s3.head_object. Specify * to return
|
|
46
|
+
all available attributes.
|
|
47
|
+
Default value: "Size".
|
|
48
|
+
If the requested attribute is not found, the key is still included and the value is None.
|
|
44
49
|
:param hook_params: params for hook its optional
|
|
45
50
|
"""
|
|
46
51
|
|
|
@@ -56,6 +61,7 @@ class S3KeyTrigger(BaseTrigger):
|
|
|
56
61
|
region_name: str | None = None,
|
|
57
62
|
verify: bool | str | None = None,
|
|
58
63
|
botocore_config: dict | None = None,
|
|
64
|
+
metadata_keys: list[str] | None = None,
|
|
59
65
|
**hook_params: Any,
|
|
60
66
|
):
|
|
61
67
|
super().__init__()
|
|
@@ -70,6 +76,7 @@ class S3KeyTrigger(BaseTrigger):
|
|
|
70
76
|
self.region_name = region_name
|
|
71
77
|
self.verify = verify
|
|
72
78
|
self.botocore_config = botocore_config
|
|
79
|
+
self.metadata_keys = metadata_keys if metadata_keys else ["Size", "Key"]
|
|
73
80
|
|
|
74
81
|
def serialize(self) -> tuple[str, dict[str, Any]]:
|
|
75
82
|
"""Serialize S3KeyTrigger arguments and classpath."""
|
|
@@ -87,6 +94,7 @@ class S3KeyTrigger(BaseTrigger):
|
|
|
87
94
|
"region_name": self.region_name,
|
|
88
95
|
"verify": self.verify,
|
|
89
96
|
"botocore_config": self.botocore_config,
|
|
97
|
+
"metadata_keys": self.metadata_keys,
|
|
90
98
|
},
|
|
91
99
|
)
|
|
92
100
|
|
|
@@ -108,11 +116,30 @@ class S3KeyTrigger(BaseTrigger):
|
|
|
108
116
|
client, self.bucket_name, self.bucket_key, self.wildcard_match, self.use_regex
|
|
109
117
|
):
|
|
110
118
|
if self.should_check_fn:
|
|
111
|
-
|
|
119
|
+
raw_objects = await self.hook.get_files_async(
|
|
112
120
|
client, self.bucket_name, self.bucket_key, self.wildcard_match
|
|
113
121
|
)
|
|
122
|
+
files = []
|
|
123
|
+
for f in raw_objects:
|
|
124
|
+
metadata = {}
|
|
125
|
+
obj = await self.hook.get_head_object_async(
|
|
126
|
+
client=client, key=f, bucket_name=self.bucket_name
|
|
127
|
+
)
|
|
128
|
+
if obj is None:
|
|
129
|
+
return
|
|
130
|
+
|
|
131
|
+
if "*" in self.metadata_keys:
|
|
132
|
+
metadata = obj
|
|
133
|
+
else:
|
|
134
|
+
for mk in self.metadata_keys:
|
|
135
|
+
if mk == "Size":
|
|
136
|
+
metadata[mk] = obj.get("ContentLength")
|
|
137
|
+
else:
|
|
138
|
+
metadata[mk] = obj.get(mk, None)
|
|
139
|
+
metadata["Key"] = f
|
|
140
|
+
files.append(metadata)
|
|
114
141
|
await asyncio.sleep(self.poke_interval)
|
|
115
|
-
yield TriggerEvent({"status": "running", "files":
|
|
142
|
+
yield TriggerEvent({"status": "running", "files": files})
|
|
116
143
|
else:
|
|
117
144
|
yield TriggerEvent({"status": "success"})
|
|
118
145
|
return
|
|
@@ -36,6 +36,11 @@ class SsmRunCommandTrigger(AwsBaseWaiterTrigger):
|
|
|
36
36
|
:param waiter_delay: The amount of time in seconds to wait between attempts. (default: 120)
|
|
37
37
|
:param waiter_max_attempts: The maximum number of attempts to be made. (default: 75)
|
|
38
38
|
:param aws_conn_id: The Airflow connection used for AWS credentials.
|
|
39
|
+
:param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
|
|
40
|
+
:param verify: Whether or not to verify SSL certificates. See:
|
|
41
|
+
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
|
|
42
|
+
:param botocore_config: Configuration dictionary (key-values) for botocore client. See:
|
|
43
|
+
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
|
|
39
44
|
"""
|
|
40
45
|
|
|
41
46
|
def __init__(
|
|
@@ -45,6 +50,9 @@ class SsmRunCommandTrigger(AwsBaseWaiterTrigger):
|
|
|
45
50
|
waiter_delay: int = 120,
|
|
46
51
|
waiter_max_attempts: int = 75,
|
|
47
52
|
aws_conn_id: str | None = None,
|
|
53
|
+
region_name: str | None = None,
|
|
54
|
+
verify: bool | str | None = None,
|
|
55
|
+
botocore_config: dict | None = None,
|
|
48
56
|
) -> None:
|
|
49
57
|
super().__init__(
|
|
50
58
|
serialized_fields={"command_id": command_id},
|
|
@@ -58,11 +66,19 @@ class SsmRunCommandTrigger(AwsBaseWaiterTrigger):
|
|
|
58
66
|
waiter_delay=waiter_delay,
|
|
59
67
|
waiter_max_attempts=waiter_max_attempts,
|
|
60
68
|
aws_conn_id=aws_conn_id,
|
|
69
|
+
region_name=region_name,
|
|
70
|
+
verify=verify,
|
|
71
|
+
botocore_config=botocore_config,
|
|
61
72
|
)
|
|
62
73
|
self.command_id = command_id
|
|
63
74
|
|
|
64
75
|
def hook(self) -> AwsGenericHook:
|
|
65
|
-
return SsmHook(
|
|
76
|
+
return SsmHook(
|
|
77
|
+
aws_conn_id=self.aws_conn_id,
|
|
78
|
+
region_name=self.region_name,
|
|
79
|
+
verify=self.verify,
|
|
80
|
+
config=self.botocore_config,
|
|
81
|
+
)
|
|
66
82
|
|
|
67
83
|
async def run(self) -> AsyncIterator[TriggerEvent]:
|
|
68
84
|
hook = self.hook()
|
|
@@ -28,14 +28,11 @@ from botocore.config import Config
|
|
|
28
28
|
|
|
29
29
|
from airflow.exceptions import AirflowException
|
|
30
30
|
from airflow.providers.amazon.aws.utils import trim_none_values
|
|
31
|
+
from airflow.providers.amazon.version_compat import NOTSET, ArgNotSet
|
|
31
32
|
from airflow.utils.log.logging_mixin import LoggingMixin
|
|
32
|
-
from airflow.utils.types import NOTSET, ArgNotSet
|
|
33
33
|
|
|
34
34
|
if TYPE_CHECKING:
|
|
35
|
-
|
|
36
|
-
from airflow.sdk import Connection
|
|
37
|
-
except ImportError:
|
|
38
|
-
from airflow.models.connection import Connection # type: ignore[assignment]
|
|
35
|
+
from airflow.providers.common.compat.sdk import Connection
|
|
39
36
|
|
|
40
37
|
|
|
41
38
|
@dataclass
|
|
@@ -158,4 +158,4 @@ def aws_template_fields(*template_fields: str) -> tuple[str, ...]:
|
|
|
158
158
|
f"{', '.join(map(repr, template_fields))}."
|
|
159
159
|
)
|
|
160
160
|
raise TypeError(msg)
|
|
161
|
-
return tuple(sorted(
|
|
161
|
+
return tuple(sorted({"aws_conn_id", "region_name", "verify"} | set(template_fields)))
|
|
@@ -51,8 +51,8 @@ def waiter(
|
|
|
51
51
|
:param desired_state: Wait until the getter returns this value
|
|
52
52
|
:param failure_states: A set of states which indicate failure and should throw an
|
|
53
53
|
exception if any are reached before the desired_state
|
|
54
|
-
:param object_type: Used for the reporting string. What are you waiting for? (application, job, etc)
|
|
55
|
-
:param action: Used for the reporting string. What action are you waiting for? (created, deleted, etc)
|
|
54
|
+
:param object_type: Used for the reporting string. What are you waiting for? (application, job, etc.)
|
|
55
|
+
:param action: Used for the reporting string. What action are you waiting for? (created, deleted, etc.)
|
|
56
56
|
:param countdown: Number of seconds the waiter should wait for the desired state before timing out.
|
|
57
57
|
Defaults to 25 * 60 seconds. None = infinite.
|
|
58
58
|
:param check_interval_seconds: Number of seconds waiter should wait before attempting
|
|
@@ -8,19 +8,19 @@
|
|
|
8
8
|
"acceptors": [
|
|
9
9
|
{
|
|
10
10
|
"matcher": "path",
|
|
11
|
-
"argument": "
|
|
11
|
+
"argument": "NotebookExecution.Status",
|
|
12
12
|
"expected": "RUNNING",
|
|
13
13
|
"state": "success"
|
|
14
14
|
},
|
|
15
15
|
{
|
|
16
16
|
"matcher": "path",
|
|
17
|
-
"argument": "
|
|
17
|
+
"argument": "NotebookExecution.Status",
|
|
18
18
|
"expected": "FINISHED",
|
|
19
19
|
"state": "success"
|
|
20
20
|
},
|
|
21
21
|
{
|
|
22
22
|
"matcher": "path",
|
|
23
|
-
"argument": "
|
|
23
|
+
"argument": "NotebookExecution.Status",
|
|
24
24
|
"expected": "FAILED",
|
|
25
25
|
"state": "failure"
|
|
26
26
|
}
|
|
@@ -33,19 +33,19 @@
|
|
|
33
33
|
"acceptors": [
|
|
34
34
|
{
|
|
35
35
|
"matcher": "path",
|
|
36
|
-
"argument": "
|
|
36
|
+
"argument": "NotebookExecution.Status",
|
|
37
37
|
"expected": "STOPPED",
|
|
38
38
|
"state": "success"
|
|
39
39
|
},
|
|
40
40
|
{
|
|
41
41
|
"matcher": "path",
|
|
42
|
-
"argument": "
|
|
42
|
+
"argument": "NotebookExecution.Status",
|
|
43
43
|
"expected": "FINISHED",
|
|
44
44
|
"state": "success"
|
|
45
45
|
},
|
|
46
46
|
{
|
|
47
47
|
"matcher": "path",
|
|
48
|
-
"argument": "
|
|
48
|
+
"argument": "NotebookExecution.Status",
|
|
49
49
|
"expected": "FAILED",
|
|
50
50
|
"state": "failure"
|
|
51
51
|
}
|
|
@@ -340,6 +340,20 @@ def get_provider_info():
|
|
|
340
340
|
"how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/neptune.rst"],
|
|
341
341
|
"tags": ["aws"],
|
|
342
342
|
},
|
|
343
|
+
{
|
|
344
|
+
"integration-name": "Amazon Kinesis Data Stream",
|
|
345
|
+
"external-doc-url": "https://aws.amazon.com/kinesis/",
|
|
346
|
+
"logo": "/docs/integration-logos/Amazon-Kinesis-Data-Firehose_light-bg@4x.png",
|
|
347
|
+
"how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/kinesis_analytics.rst"],
|
|
348
|
+
"tags": ["aws"],
|
|
349
|
+
},
|
|
350
|
+
{
|
|
351
|
+
"integration-name": "Amazon Managed Workflows for Apache Airflow (MWAA)",
|
|
352
|
+
"external-doc-url": "https://aws.amazon.com/managed-workflows-for-apache-airflow/",
|
|
353
|
+
"logo": "/docs/integration-logos/Amazon-MWAA.png",
|
|
354
|
+
"how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/mwaa.rst"],
|
|
355
|
+
"tags": ["aws"],
|
|
356
|
+
},
|
|
343
357
|
],
|
|
344
358
|
"operators": [
|
|
345
359
|
{
|
|
@@ -695,9 +709,13 @@ def get_provider_info():
|
|
|
695
709
|
],
|
|
696
710
|
},
|
|
697
711
|
{
|
|
698
|
-
"integration-name": "Amazon Kinesis Data
|
|
712
|
+
"integration-name": "Amazon Kinesis Data Stream",
|
|
699
713
|
"python-modules": ["airflow.providers.amazon.aws.hooks.kinesis"],
|
|
700
714
|
},
|
|
715
|
+
{
|
|
716
|
+
"integration-name": "Amazon Kinesis Data Firehose",
|
|
717
|
+
"python-modules": ["airflow.providers.amazon.aws.hooks.firehose"],
|
|
718
|
+
},
|
|
701
719
|
{
|
|
702
720
|
"integration-name": "AWS Lambda",
|
|
703
721
|
"python-modules": ["airflow.providers.amazon.aws.hooks.lambda_function"],
|
|
@@ -20,9 +20,13 @@
|
|
|
20
20
|
# ON AIRFLOW VERSION, PLEASE COPY THIS FILE TO THE ROOT PACKAGE OF YOUR PROVIDER AND IMPORT
|
|
21
21
|
# THOSE CONSTANTS FROM IT RATHER THAN IMPORTING THEM FROM ANOTHER PROVIDER OR TEST CODE
|
|
22
22
|
#
|
|
23
|
+
|
|
23
24
|
from __future__ import annotations
|
|
24
25
|
|
|
26
|
+
import functools
|
|
27
|
+
|
|
25
28
|
|
|
29
|
+
@functools.cache
|
|
26
30
|
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
|
|
27
31
|
from packaging.version import Version
|
|
28
32
|
|
|
@@ -34,26 +38,25 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
|
|
|
34
38
|
|
|
35
39
|
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
|
|
36
40
|
AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
|
|
41
|
+
AIRFLOW_V_3_1_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 1)
|
|
42
|
+
|
|
43
|
+
try:
|
|
44
|
+
from airflow.sdk.definitions._internal.types import NOTSET, ArgNotSet
|
|
45
|
+
except ImportError:
|
|
46
|
+
from airflow.utils.types import NOTSET, ArgNotSet # type: ignore[attr-defined,no-redef]
|
|
47
|
+
try:
|
|
48
|
+
from airflow.sdk.definitions._internal.types import is_arg_set
|
|
49
|
+
except ImportError:
|
|
37
50
|
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
else:
|
|
41
|
-
from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
|
|
51
|
+
def is_arg_set(value): # type: ignore[misc,no-redef]
|
|
52
|
+
return value is not NOTSET
|
|
42
53
|
|
|
43
|
-
if AIRFLOW_V_3_0_PLUS:
|
|
44
|
-
from airflow.sdk import BaseOperator, BaseOperatorLink, BaseSensorOperator
|
|
45
|
-
from airflow.sdk.execution_time.xcom import XCom
|
|
46
|
-
else:
|
|
47
|
-
from airflow.models import BaseOperator, XCom
|
|
48
|
-
from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
|
|
49
|
-
from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
|
|
50
54
|
|
|
51
55
|
__all__ = [
|
|
52
56
|
"AIRFLOW_V_3_0_PLUS",
|
|
53
57
|
"AIRFLOW_V_3_1_PLUS",
|
|
54
|
-
"
|
|
55
|
-
"
|
|
56
|
-
"
|
|
57
|
-
"
|
|
58
|
-
"XCom",
|
|
58
|
+
"AIRFLOW_V_3_1_1_PLUS",
|
|
59
|
+
"NOTSET",
|
|
60
|
+
"ArgNotSet",
|
|
61
|
+
"is_arg_set",
|
|
59
62
|
]
|
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-amazon
|
|
3
|
-
Version: 9.
|
|
3
|
+
Version: 9.18.0rc2
|
|
4
4
|
Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,amazon,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
7
7
|
Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
8
8
|
Requires-Python: >=3.10
|
|
9
9
|
Description-Content-Type: text/x-rst
|
|
10
|
+
License-Expression: Apache-2.0
|
|
10
11
|
Classifier: Development Status :: 5 - Production/Stable
|
|
11
12
|
Classifier: Environment :: Console
|
|
12
13
|
Classifier: Environment :: Web Environment
|
|
@@ -14,15 +15,16 @@ Classifier: Intended Audience :: Developers
|
|
|
14
15
|
Classifier: Intended Audience :: System Administrators
|
|
15
16
|
Classifier: Framework :: Apache Airflow
|
|
16
17
|
Classifier: Framework :: Apache Airflow :: Provider
|
|
17
|
-
Classifier: License :: OSI Approved :: Apache Software License
|
|
18
18
|
Classifier: Programming Language :: Python :: 3.10
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.11
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.12
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.13
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
Requires-Dist: apache-airflow
|
|
23
|
+
License-File: LICENSE
|
|
24
|
+
License-File: NOTICE
|
|
25
|
+
Requires-Dist: apache-airflow>=2.11.0rc1
|
|
26
|
+
Requires-Dist: apache-airflow-providers-common-compat>=1.10.0rc1
|
|
27
|
+
Requires-Dist: apache-airflow-providers-common-sql>=1.27.0rc1
|
|
26
28
|
Requires-Dist: apache-airflow-providers-http
|
|
27
29
|
Requires-Dist: boto3>=1.37.2
|
|
28
30
|
Requires-Dist: botocore>=1.37.2
|
|
@@ -34,19 +36,21 @@ Requires-Dist: asgiref>=2.3.0
|
|
|
34
36
|
Requires-Dist: PyAthena>=3.10.0
|
|
35
37
|
Requires-Dist: jmespath>=0.7.0
|
|
36
38
|
Requires-Dist: sagemaker-studio>=1.0.9
|
|
39
|
+
Requires-Dist: pydynamodb>=0.7.5; python_version >= '3.13'
|
|
40
|
+
Requires-Dist: sqlean.py>=3.47.0; python_version >= '3.13'
|
|
37
41
|
Requires-Dist: marshmallow>=3
|
|
38
42
|
Requires-Dist: aiobotocore[boto3]>=2.21.1 ; extra == "aiobotocore"
|
|
39
43
|
Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache-hive"
|
|
40
|
-
Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.
|
|
41
|
-
Requires-Dist: apache-airflow-providers-common-messaging>=2.0.
|
|
44
|
+
Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0rc1 ; extra == "cncf-kubernetes"
|
|
45
|
+
Requires-Dist: apache-airflow-providers-common-messaging>=2.0.0rc1 ; extra == "common-messaging"
|
|
42
46
|
Requires-Dist: apache-airflow-providers-exasol ; extra == "exasol"
|
|
43
|
-
Requires-Dist: apache-airflow-providers-fab>=2.2.
|
|
47
|
+
Requires-Dist: apache-airflow-providers-fab>=2.2.0rc1 ; extra == "fab" and ( python_version < '3.13')
|
|
44
48
|
Requires-Dist: apache-airflow-providers-ftp ; extra == "ftp"
|
|
45
49
|
Requires-Dist: apache-airflow-providers-google ; extra == "google"
|
|
46
50
|
Requires-Dist: apache-airflow-providers-imap ; extra == "imap"
|
|
47
51
|
Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft-azure"
|
|
48
52
|
Requires-Dist: apache-airflow-providers-mongo ; extra == "mongo"
|
|
49
|
-
Requires-Dist: apache-airflow-providers-openlineage>=2.3.
|
|
53
|
+
Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1 ; extra == "openlineage"
|
|
50
54
|
Requires-Dist: python3-saml>=1.16.0 ; extra == "python3-saml" and ( python_version < '3.13')
|
|
51
55
|
Requires-Dist: xmlsec>=1.3.14 ; extra == "python3-saml" and ( python_version < '3.13')
|
|
52
56
|
Requires-Dist: lxml>=6.0.0 ; extra == "python3-saml" and ( python_version < '3.13')
|
|
@@ -55,8 +59,8 @@ Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
|
|
|
55
59
|
Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
|
|
56
60
|
Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
|
|
57
61
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
58
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.
|
|
59
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.
|
|
62
|
+
Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.18.0/changelog.html
|
|
63
|
+
Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.18.0
|
|
60
64
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
61
65
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
62
66
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -104,7 +108,7 @@ Provides-Extra: standard
|
|
|
104
108
|
|
|
105
109
|
Package ``apache-airflow-providers-amazon``
|
|
106
110
|
|
|
107
|
-
Release: ``9.
|
|
111
|
+
Release: ``9.18.0``
|
|
108
112
|
|
|
109
113
|
|
|
110
114
|
Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
|
|
@@ -117,7 +121,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
|
|
|
117
121
|
are in ``airflow.providers.amazon`` python package.
|
|
118
122
|
|
|
119
123
|
You can find package information and changelog for the provider
|
|
120
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.
|
|
124
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.18.0/>`_.
|
|
121
125
|
|
|
122
126
|
Installation
|
|
123
127
|
------------
|
|
@@ -131,11 +135,11 @@ The package supports the following python versions: 3.10,3.11,3.12,3.13
|
|
|
131
135
|
Requirements
|
|
132
136
|
------------
|
|
133
137
|
|
|
134
|
-
==========================================
|
|
138
|
+
========================================== ======================================
|
|
135
139
|
PIP package Version required
|
|
136
|
-
==========================================
|
|
137
|
-
``apache-airflow`` ``>=2.
|
|
138
|
-
``apache-airflow-providers-common-compat`` ``>=1.
|
|
140
|
+
========================================== ======================================
|
|
141
|
+
``apache-airflow`` ``>=2.11.0``
|
|
142
|
+
``apache-airflow-providers-common-compat`` ``>=1.10.0``
|
|
139
143
|
``apache-airflow-providers-common-sql`` ``>=1.27.0``
|
|
140
144
|
``apache-airflow-providers-http``
|
|
141
145
|
``boto3`` ``>=1.37.2``
|
|
@@ -148,8 +152,10 @@ PIP package Version required
|
|
|
148
152
|
``PyAthena`` ``>=3.10.0``
|
|
149
153
|
``jmespath`` ``>=0.7.0``
|
|
150
154
|
``sagemaker-studio`` ``>=1.0.9``
|
|
155
|
+
``pydynamodb`` ``>=0.7.5; python_version >= "3.13"``
|
|
156
|
+
``sqlean.py`` ``>=3.47.0; python_version >= "3.13"``
|
|
151
157
|
``marshmallow`` ``>=3``
|
|
152
|
-
==========================================
|
|
158
|
+
========================================== ======================================
|
|
153
159
|
|
|
154
160
|
Cross provider package dependencies
|
|
155
161
|
-----------------------------------
|
|
@@ -210,5 +216,5 @@ Extra Dependencies
|
|
|
210
216
|
==================== ========================================================================================================================================
|
|
211
217
|
|
|
212
218
|
The changelog for the provider package can be found in the
|
|
213
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.
|
|
219
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.18.0/changelog.html>`_.
|
|
214
220
|
|