apache-airflow-providers-amazon 9.12.0rc1__py3-none-any.whl → 9.13.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/amazon/__init__.py +1 -1
- airflow/providers/amazon/aws/auth_manager/avp/entities.py +5 -1
- airflow/providers/amazon/aws/auth_manager/avp/facade.py +4 -2
- airflow/providers/amazon/aws/auth_manager/avp/schema.json +75 -0
- airflow/providers/amazon/aws/hooks/base_aws.py +13 -17
- airflow/providers/amazon/aws/hooks/ecr.py +5 -2
- airflow/providers/amazon/aws/hooks/eks.py +69 -48
- airflow/providers/amazon/aws/hooks/ssm.py +5 -2
- airflow/providers/amazon/aws/operators/bedrock.py +9 -2
- airflow/providers/amazon/aws/operators/eks.py +22 -8
- airflow/providers/amazon/aws/queues/sqs.py +29 -1
- airflow/providers/amazon/aws/sensors/mwaa.py +0 -2
- airflow/providers/amazon/aws/transfers/gcs_to_s3.py +12 -0
- airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py +9 -3
- airflow/providers/amazon/aws/transfers/sql_to_s3.py +92 -27
- airflow/providers/amazon/aws/utils/eks_get_token.py +48 -10
- {apache_airflow_providers_amazon-9.12.0rc1.dist-info → apache_airflow_providers_amazon-9.13.0rc1.dist-info}/METADATA +8 -9
- {apache_airflow_providers_amazon-9.12.0rc1.dist-info → apache_airflow_providers_amazon-9.13.0rc1.dist-info}/RECORD +20 -20
- {apache_airflow_providers_amazon-9.12.0rc1.dist-info → apache_airflow_providers_amazon-9.13.0rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_amazon-9.12.0rc1.dist-info → apache_airflow_providers_amazon-9.13.0rc1.dist-info}/entry_points.txt +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
29
29
|
|
30
30
|
__all__ = ["__version__"]
|
31
31
|
|
32
|
-
__version__ = "9.
|
32
|
+
__version__ = "9.13.0"
|
33
33
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
35
35
|
"2.10.0"
|
@@ -57,7 +57,7 @@ def get_entity_type(resource_type: AvpEntities) -> str:
|
|
57
57
|
return AVP_PREFIX_ENTITIES + resource_type.value
|
58
58
|
|
59
59
|
|
60
|
-
def get_action_id(resource_type: AvpEntities, method: ResourceMethod | str):
|
60
|
+
def get_action_id(resource_type: AvpEntities, method: ResourceMethod | str, entity_id: str | None):
|
61
61
|
"""
|
62
62
|
Return action id.
|
63
63
|
|
@@ -65,5 +65,9 @@ def get_action_id(resource_type: AvpEntities, method: ResourceMethod | str):
|
|
65
65
|
|
66
66
|
:param resource_type: Resource type.
|
67
67
|
:param method: Resource method.
|
68
|
+
:param entity_id: The entity ID.
|
68
69
|
"""
|
70
|
+
if method == "GET" and not entity_id:
|
71
|
+
method = "LIST"
|
72
|
+
|
69
73
|
return f"{resource_type.value}.{method}"
|
@@ -120,7 +120,7 @@ class AwsAuthManagerAmazonVerifiedPermissionsFacade(LoggingMixin):
|
|
120
120
|
"principal": {"entityType": get_entity_type(AvpEntities.USER), "entityId": user.get_id()},
|
121
121
|
"action": {
|
122
122
|
"actionType": get_entity_type(AvpEntities.ACTION),
|
123
|
-
"actionId": get_action_id(entity_type, method),
|
123
|
+
"actionId": get_action_id(entity_type, method, entity_id),
|
124
124
|
},
|
125
125
|
"resource": {"entityType": get_entity_type(entity_type), "entityId": entity_id or "*"},
|
126
126
|
"entities": {"entityList": entity_list},
|
@@ -281,7 +281,9 @@ class AwsAuthManagerAmazonVerifiedPermissionsFacade(LoggingMixin):
|
|
281
281
|
"principal": {"entityType": get_entity_type(AvpEntities.USER), "entityId": user.get_id()},
|
282
282
|
"action": {
|
283
283
|
"actionType": get_entity_type(AvpEntities.ACTION),
|
284
|
-
"actionId": get_action_id(
|
284
|
+
"actionId": get_action_id(
|
285
|
+
request["entity_type"], request["method"], request.get("entity_id")
|
286
|
+
),
|
285
287
|
},
|
286
288
|
"resource": {
|
287
289
|
"entityType": get_entity_type(request["entity_type"]),
|
@@ -7,18 +7,48 @@
|
|
7
7
|
"resourceTypes": ["Asset"]
|
8
8
|
}
|
9
9
|
},
|
10
|
+
"Asset.LIST": {
|
11
|
+
"appliesTo": {
|
12
|
+
"principalTypes": ["User"],
|
13
|
+
"resourceTypes": ["Asset"]
|
14
|
+
}
|
15
|
+
},
|
16
|
+
"Asset.POST": {
|
17
|
+
"appliesTo": {
|
18
|
+
"principalTypes": ["User"],
|
19
|
+
"resourceTypes": ["Asset"]
|
20
|
+
}
|
21
|
+
},
|
22
|
+
"Asset.DELETE": {
|
23
|
+
"appliesTo": {
|
24
|
+
"principalTypes": ["User"],
|
25
|
+
"resourceTypes": ["Asset"]
|
26
|
+
}
|
27
|
+
},
|
10
28
|
"AssetAlias.GET": {
|
11
29
|
"appliesTo": {
|
12
30
|
"principalTypes": ["User"],
|
13
31
|
"resourceTypes": ["AssetAlias"]
|
14
32
|
}
|
15
33
|
},
|
34
|
+
"AssetAlias.LIST": {
|
35
|
+
"appliesTo": {
|
36
|
+
"principalTypes": ["User"],
|
37
|
+
"resourceTypes": ["AssetAlias"]
|
38
|
+
}
|
39
|
+
},
|
16
40
|
"Backfill.GET": {
|
17
41
|
"appliesTo": {
|
18
42
|
"principalTypes": ["User"],
|
19
43
|
"resourceTypes": ["Backfill"]
|
20
44
|
}
|
21
45
|
},
|
46
|
+
"Backfill.LIST": {
|
47
|
+
"appliesTo": {
|
48
|
+
"principalTypes": ["User"],
|
49
|
+
"resourceTypes": ["Backfill"]
|
50
|
+
}
|
51
|
+
},
|
22
52
|
"Backfill.POST": {
|
23
53
|
"appliesTo": {
|
24
54
|
"principalTypes": ["User"],
|
@@ -43,6 +73,12 @@
|
|
43
73
|
"resourceTypes": ["Connection"]
|
44
74
|
}
|
45
75
|
},
|
76
|
+
"Connection.LIST": {
|
77
|
+
"appliesTo": {
|
78
|
+
"principalTypes": ["User"],
|
79
|
+
"resourceTypes": ["Connection"]
|
80
|
+
}
|
81
|
+
},
|
46
82
|
"Connection.POST": {
|
47
83
|
"appliesTo": {
|
48
84
|
"principalTypes": ["User"],
|
@@ -67,6 +103,12 @@
|
|
67
103
|
"resourceTypes": ["Custom"]
|
68
104
|
}
|
69
105
|
},
|
106
|
+
"Custom.LIST": {
|
107
|
+
"appliesTo": {
|
108
|
+
"principalTypes": ["User"],
|
109
|
+
"resourceTypes": ["Custom"]
|
110
|
+
}
|
111
|
+
},
|
70
112
|
"Custom.POST": {
|
71
113
|
"appliesTo": {
|
72
114
|
"principalTypes": ["User"],
|
@@ -85,6 +127,12 @@
|
|
85
127
|
"resourceTypes": ["Configuration"]
|
86
128
|
}
|
87
129
|
},
|
130
|
+
"Configuration.LIST": {
|
131
|
+
"appliesTo": {
|
132
|
+
"principalTypes": ["User"],
|
133
|
+
"resourceTypes": ["Configuration"]
|
134
|
+
}
|
135
|
+
},
|
88
136
|
"Dag.DELETE": {
|
89
137
|
"appliesTo": {
|
90
138
|
"principalTypes": ["User"],
|
@@ -115,6 +163,21 @@
|
|
115
163
|
}
|
116
164
|
}
|
117
165
|
},
|
166
|
+
"Dag.LIST": {
|
167
|
+
"appliesTo": {
|
168
|
+
"principalTypes": ["User"],
|
169
|
+
"resourceTypes": ["Dag"],
|
170
|
+
"context": {
|
171
|
+
"attributes": {
|
172
|
+
"dag_entity": {
|
173
|
+
"required": false,
|
174
|
+
"type": "String"
|
175
|
+
}
|
176
|
+
},
|
177
|
+
"type": "Record"
|
178
|
+
}
|
179
|
+
}
|
180
|
+
},
|
118
181
|
"Dag.POST": {
|
119
182
|
"appliesTo": {
|
120
183
|
"principalTypes": ["User"],
|
@@ -163,6 +226,12 @@
|
|
163
226
|
"resourceTypes": ["Pool"]
|
164
227
|
}
|
165
228
|
},
|
229
|
+
"Pool.LIST": {
|
230
|
+
"appliesTo": {
|
231
|
+
"principalTypes": ["User"],
|
232
|
+
"resourceTypes": ["Pool"]
|
233
|
+
}
|
234
|
+
},
|
166
235
|
"Pool.POST": {
|
167
236
|
"appliesTo": {
|
168
237
|
"principalTypes": ["User"],
|
@@ -187,6 +256,12 @@
|
|
187
256
|
"resourceTypes": ["Variable"]
|
188
257
|
}
|
189
258
|
},
|
259
|
+
"Variable.LIST": {
|
260
|
+
"appliesTo": {
|
261
|
+
"principalTypes": ["User"],
|
262
|
+
"resourceTypes": ["Variable"]
|
263
|
+
}
|
264
|
+
},
|
190
265
|
"Variable.POST": {
|
191
266
|
"appliesTo": {
|
192
267
|
"principalTypes": ["User"],
|
@@ -61,7 +61,6 @@ from airflow.providers.amazon.aws.utils.connection_wrapper import AwsConnectionW
|
|
61
61
|
from airflow.providers.amazon.aws.utils.identifiers import generate_uuid
|
62
62
|
from airflow.providers.amazon.aws.utils.suppress import return_on_error
|
63
63
|
from airflow.providers.amazon.version_compat import BaseHook
|
64
|
-
from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
|
65
64
|
from airflow.providers_manager import ProvidersManager
|
66
65
|
from airflow.utils.helpers import exactly_one
|
67
66
|
from airflow.utils.log.logging_mixin import LoggingMixin
|
@@ -77,9 +76,6 @@ from airflow.utils.log.logging_mixin import LoggingMixin
|
|
77
76
|
BaseAwsConnection = TypeVar("BaseAwsConnection", bound=Union[BaseClient, ServiceResource]) # noqa: UP007
|
78
77
|
|
79
78
|
|
80
|
-
if AIRFLOW_V_3_0_PLUS:
|
81
|
-
from airflow.sdk.exceptions import AirflowRuntimeError, ErrorType
|
82
|
-
|
83
79
|
if TYPE_CHECKING:
|
84
80
|
from aiobotocore.session import AioSession
|
85
81
|
from botocore.client import ClientMeta
|
@@ -89,9 +85,12 @@ if TYPE_CHECKING:
|
|
89
85
|
from airflow.sdk.execution_time.secrets_masker import mask_secret
|
90
86
|
else:
|
91
87
|
try:
|
92
|
-
from airflow.sdk.
|
88
|
+
from airflow.sdk.log import mask_secret
|
93
89
|
except ImportError:
|
94
|
-
|
90
|
+
try:
|
91
|
+
from airflow.sdk.execution_time.secrets_masker import mask_secret
|
92
|
+
except ImportError:
|
93
|
+
from airflow.utils.log.secrets_masker import mask_secret
|
95
94
|
|
96
95
|
_loader = botocore.loaders.Loader()
|
97
96
|
"""
|
@@ -621,20 +620,17 @@ class AwsGenericHook(BaseHook, Generic[BaseAwsConnection]):
|
|
621
620
|
if self.aws_conn_id:
|
622
621
|
try:
|
623
622
|
connection = self.get_connection(self.aws_conn_id)
|
624
|
-
except
|
625
|
-
|
626
|
-
|
627
|
-
AIRFLOW_V_3_0_PLUS
|
628
|
-
and isinstance(e, AirflowRuntimeError)
|
629
|
-
and e.error.error == ErrorType.CONNECTION_NOT_FOUND
|
623
|
+
except AirflowNotFoundException:
|
624
|
+
self.log.warning(
|
625
|
+
"Unable to find AWS Connection ID '%s', switching to empty.", self.aws_conn_id
|
630
626
|
)
|
631
|
-
|
632
|
-
|
633
|
-
|
634
|
-
|
627
|
+
# In the TaskSDK's BaseHook, it only retrieves the connection via task-sdk. Since the AWS system testing infrastructure
|
628
|
+
# doesn't use task-sdk, this leads to an error which we handle below.
|
629
|
+
except ImportError as e:
|
630
|
+
if "SUPERVISOR_COMMS" in str(e):
|
631
|
+
self.log.exception(e)
|
635
632
|
else:
|
636
633
|
raise
|
637
|
-
|
638
634
|
return AwsConnectionWrapper(
|
639
635
|
conn=connection,
|
640
636
|
region_name=self._region_name,
|
@@ -30,9 +30,12 @@ if TYPE_CHECKING:
|
|
30
30
|
from airflow.sdk.execution_time.secrets_masker import mask_secret
|
31
31
|
else:
|
32
32
|
try:
|
33
|
-
from airflow.sdk.
|
33
|
+
from airflow.sdk.log import mask_secret
|
34
34
|
except ImportError:
|
35
|
-
|
35
|
+
try:
|
36
|
+
from airflow.sdk.execution_time.secrets_masker import mask_secret
|
37
|
+
except ImportError:
|
38
|
+
from airflow.utils.log.secrets_masker import mask_secret
|
36
39
|
|
37
40
|
logger = logging.getLogger(__name__)
|
38
41
|
|
@@ -18,9 +18,10 @@
|
|
18
18
|
|
19
19
|
from __future__ import annotations
|
20
20
|
|
21
|
-
import
|
21
|
+
import contextlib
|
22
22
|
import json
|
23
23
|
import os
|
24
|
+
import stat
|
24
25
|
import sys
|
25
26
|
import tempfile
|
26
27
|
from collections.abc import Callable, Generator
|
@@ -29,14 +30,12 @@ from enum import Enum
|
|
29
30
|
from functools import partial
|
30
31
|
|
31
32
|
from botocore.exceptions import ClientError
|
32
|
-
from botocore.signers import RequestSigner
|
33
33
|
|
34
34
|
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
|
35
35
|
from airflow.providers.amazon.aws.hooks.sts import StsHook
|
36
36
|
from airflow.utils import yaml
|
37
37
|
|
38
38
|
DEFAULT_PAGINATION_TOKEN = ""
|
39
|
-
STS_TOKEN_EXPIRES_IN = 60
|
40
39
|
AUTHENTICATION_API_VERSION = "client.authentication.k8s.io/v1alpha1"
|
41
40
|
_POD_USERNAME = "aws"
|
42
41
|
_CONTEXT_NAME = "aws"
|
@@ -79,11 +78,18 @@ class NodegroupStates(Enum):
|
|
79
78
|
|
80
79
|
COMMAND = """
|
81
80
|
export PYTHON_OPERATORS_VIRTUAL_ENV_MODE=1
|
81
|
+
|
82
|
+
# Source credentials from secure file
|
83
|
+
source {credentials_file}
|
84
|
+
|
82
85
|
output=$({python_executable} -m airflow.providers.amazon.aws.utils.eks_get_token \
|
83
|
-
--cluster-name {eks_cluster_name} {args} 2>&1)
|
86
|
+
--cluster-name {eks_cluster_name} --sts-url '{sts_url}' {args} 2>&1)
|
84
87
|
|
85
88
|
status=$?
|
86
89
|
|
90
|
+
# Clear environment variables after use (defense in depth)
|
91
|
+
unset AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN
|
92
|
+
|
87
93
|
if [ "$status" -ne 0 ]; then
|
88
94
|
printf '%s' "$output" >&2
|
89
95
|
exit "$status"
|
@@ -537,11 +543,60 @@ class EksHook(AwsBaseHook):
|
|
537
543
|
|
538
544
|
return name_collection
|
539
545
|
|
546
|
+
@contextlib.contextmanager
|
547
|
+
def _secure_credential_context(
|
548
|
+
self, access_key: str, secret_key: str, session_token: str | None
|
549
|
+
) -> Generator[str, None, None]:
|
550
|
+
"""
|
551
|
+
Context manager for secure temporary credential file.
|
552
|
+
|
553
|
+
Creates a temporary file with restrictive permissions (0600) containing AWS credentials.
|
554
|
+
The file is automatically cleaned up when the context manager exits.
|
555
|
+
|
556
|
+
:param access_key: AWS access key ID
|
557
|
+
:param secret_key: AWS secret access key
|
558
|
+
:param session_token: AWS session token (optional)
|
559
|
+
:return: Path to the temporary credential file
|
560
|
+
"""
|
561
|
+
fd = None
|
562
|
+
temp_path = None
|
563
|
+
|
564
|
+
try:
|
565
|
+
# Create secure temporary file
|
566
|
+
fd, temp_path = tempfile.mkstemp(
|
567
|
+
suffix=".aws_creds",
|
568
|
+
prefix="airflow_eks_",
|
569
|
+
)
|
570
|
+
|
571
|
+
# Set restrictive permissions (0600) - owner read/write only
|
572
|
+
os.fchmod(fd, stat.S_IRUSR | stat.S_IWUSR)
|
573
|
+
|
574
|
+
# Write credentials to secure file
|
575
|
+
with os.fdopen(fd, "w") as f:
|
576
|
+
f.write(f"export AWS_ACCESS_KEY_ID='{access_key}'\n")
|
577
|
+
f.write(f"export AWS_SECRET_ACCESS_KEY='{secret_key}'\n")
|
578
|
+
if session_token:
|
579
|
+
f.write(f"export AWS_SESSION_TOKEN='{session_token}'\n")
|
580
|
+
|
581
|
+
fd = None # File handle closed by fdopen
|
582
|
+
yield temp_path
|
583
|
+
|
584
|
+
finally:
|
585
|
+
# Cleanup
|
586
|
+
if fd is not None:
|
587
|
+
os.close(fd)
|
588
|
+
if temp_path and os.path.exists(temp_path):
|
589
|
+
try:
|
590
|
+
os.unlink(temp_path)
|
591
|
+
except OSError:
|
592
|
+
pass # Best effort cleanup
|
593
|
+
|
540
594
|
@contextmanager
|
541
595
|
def generate_config_file(
|
542
596
|
self,
|
543
597
|
eks_cluster_name: str,
|
544
598
|
pod_namespace: str | None,
|
599
|
+
credentials_file,
|
545
600
|
) -> Generator[str, None, None]:
|
546
601
|
"""
|
547
602
|
Write the kubeconfig file given an EKS Cluster.
|
@@ -553,20 +608,24 @@ class EksHook(AwsBaseHook):
|
|
553
608
|
if self.region_name is not None:
|
554
609
|
args = args + f" --region-name {self.region_name}"
|
555
610
|
|
556
|
-
if self.aws_conn_id is not None:
|
557
|
-
args = args + f" --aws-conn-id {self.aws_conn_id}"
|
558
|
-
|
559
611
|
# We need to determine which python executable the host is running in order to correctly
|
560
612
|
# call the eks_get_token.py script.
|
561
613
|
python_executable = f"python{sys.version_info[0]}.{sys.version_info[1]}"
|
562
614
|
# Set up the client
|
563
615
|
eks_client = self.conn
|
616
|
+
session = self.get_session()
|
564
617
|
|
565
618
|
# Get cluster details
|
566
619
|
cluster = eks_client.describe_cluster(name=eks_cluster_name)
|
567
620
|
cluster_cert = cluster["cluster"]["certificateAuthority"]["data"]
|
568
621
|
cluster_ep = cluster["cluster"]["endpoint"]
|
569
622
|
|
623
|
+
os.environ["AWS_STS_REGIONAL_ENDPOINTS"] = "regional"
|
624
|
+
try:
|
625
|
+
sts_url = f"{StsHook(region_name=session.region_name).conn_client_meta.endpoint_url}/?Action=GetCallerIdentity&Version=2011-06-15"
|
626
|
+
finally:
|
627
|
+
del os.environ["AWS_STS_REGIONAL_ENDPOINTS"]
|
628
|
+
|
570
629
|
cluster_config = {
|
571
630
|
"apiVersion": "v1",
|
572
631
|
"kind": "Config",
|
@@ -598,6 +657,8 @@ class EksHook(AwsBaseHook):
|
|
598
657
|
"args": [
|
599
658
|
"-c",
|
600
659
|
COMMAND.format(
|
660
|
+
credentials_file=credentials_file,
|
661
|
+
sts_url=sts_url,
|
601
662
|
python_executable=python_executable,
|
602
663
|
eks_cluster_name=eks_cluster_name,
|
603
664
|
args=args,
|
@@ -609,50 +670,10 @@ class EksHook(AwsBaseHook):
|
|
609
670
|
}
|
610
671
|
],
|
611
672
|
}
|
673
|
+
|
612
674
|
config_text = yaml.dump(cluster_config, default_flow_style=False)
|
613
675
|
|
614
676
|
with tempfile.NamedTemporaryFile(mode="w") as config_file:
|
615
677
|
config_file.write(config_text)
|
616
678
|
config_file.flush()
|
617
679
|
yield config_file.name
|
618
|
-
|
619
|
-
def fetch_access_token_for_cluster(self, eks_cluster_name: str) -> str:
|
620
|
-
session = self.get_session()
|
621
|
-
service_id = self.conn.meta.service_model.service_id
|
622
|
-
# This env variable is required so that we get a regionalized endpoint for STS in regions that
|
623
|
-
# otherwise default to global endpoints. The mechanism below to generate the token is very picky that
|
624
|
-
# the endpoint is regional.
|
625
|
-
os.environ["AWS_STS_REGIONAL_ENDPOINTS"] = "regional"
|
626
|
-
try:
|
627
|
-
sts_url = f"{StsHook(region_name=session.region_name).conn_client_meta.endpoint_url}/?Action=GetCallerIdentity&Version=2011-06-15"
|
628
|
-
finally:
|
629
|
-
del os.environ["AWS_STS_REGIONAL_ENDPOINTS"]
|
630
|
-
|
631
|
-
signer = RequestSigner(
|
632
|
-
service_id=service_id,
|
633
|
-
region_name=session.region_name,
|
634
|
-
signing_name="sts",
|
635
|
-
signature_version="v4",
|
636
|
-
credentials=session.get_credentials(),
|
637
|
-
event_emitter=session.events,
|
638
|
-
)
|
639
|
-
|
640
|
-
request_params = {
|
641
|
-
"method": "GET",
|
642
|
-
"url": sts_url,
|
643
|
-
"body": {},
|
644
|
-
"headers": {"x-k8s-aws-id": eks_cluster_name},
|
645
|
-
"context": {},
|
646
|
-
}
|
647
|
-
|
648
|
-
signed_url = signer.generate_presigned_url(
|
649
|
-
request_dict=request_params,
|
650
|
-
region_name=session.region_name,
|
651
|
-
expires_in=STS_TOKEN_EXPIRES_IN,
|
652
|
-
operation_name="",
|
653
|
-
)
|
654
|
-
|
655
|
-
base64_url = base64.urlsafe_b64encode(signed_url.encode("utf-8")).decode("utf-8")
|
656
|
-
|
657
|
-
# remove any base64 encoding padding:
|
658
|
-
return "k8s-aws-v1." + base64_url.rstrip("=")
|
@@ -26,9 +26,12 @@ if TYPE_CHECKING:
|
|
26
26
|
from airflow.sdk.execution_time.secrets_masker import mask_secret
|
27
27
|
else:
|
28
28
|
try:
|
29
|
-
from airflow.sdk.
|
29
|
+
from airflow.sdk.log import mask_secret
|
30
30
|
except ImportError:
|
31
|
-
|
31
|
+
try:
|
32
|
+
from airflow.sdk.execution_time.secrets_masker import mask_secret
|
33
|
+
except ImportError:
|
34
|
+
from airflow.utils.log.secrets_masker import mask_secret
|
32
35
|
|
33
36
|
|
34
37
|
class SsmHook(AwsBaseHook):
|
@@ -476,10 +476,17 @@ class BedrockCreateKnowledgeBaseOperator(AwsBaseOperator[BedrockAgentHook]):
|
|
476
476
|
**self.create_knowledge_base_kwargs,
|
477
477
|
)["knowledgeBase"]["knowledgeBaseId"]
|
478
478
|
except ClientError as error:
|
479
|
+
error_message = error.response["Error"]["Message"].lower()
|
480
|
+
is_known_retryable_message = (
|
481
|
+
"no such index" in error_message
|
482
|
+
# It may also be that permissions haven't even propagated yet to check for the index
|
483
|
+
or "server returned 401" in error_message
|
484
|
+
or "user does not have permissions" in error_message
|
485
|
+
)
|
479
486
|
if all(
|
480
487
|
[
|
481
488
|
error.response["Error"]["Code"] == "ValidationException",
|
482
|
-
|
489
|
+
is_known_retryable_message,
|
483
490
|
self.wait_for_indexing,
|
484
491
|
self.indexing_error_max_attempts > 0,
|
485
492
|
]
|
@@ -488,7 +495,7 @@ class BedrockCreateKnowledgeBaseOperator(AwsBaseOperator[BedrockAgentHook]):
|
|
488
495
|
self.log.warning(
|
489
496
|
"Vector index not ready, retrying in %s seconds.", self.indexing_error_retry_delay
|
490
497
|
)
|
491
|
-
self.log.
|
498
|
+
self.log.info("%s retries remaining.", self.indexing_error_max_attempts)
|
492
499
|
sleep(self.indexing_error_retry_delay)
|
493
500
|
return _create_kb()
|
494
501
|
raise
|
@@ -1069,10 +1069,17 @@ class EksPodOperator(KubernetesPodOperator):
|
|
1069
1069
|
aws_conn_id=self.aws_conn_id,
|
1070
1070
|
region_name=self.region,
|
1071
1071
|
)
|
1072
|
-
|
1073
|
-
|
1074
|
-
|
1075
|
-
|
1072
|
+
session = eks_hook.get_session()
|
1073
|
+
credentials = session.get_credentials().get_frozen_credentials()
|
1074
|
+
with eks_hook._secure_credential_context(
|
1075
|
+
credentials.access_key, credentials.secret_key, credentials.token
|
1076
|
+
) as credentials_file:
|
1077
|
+
with eks_hook.generate_config_file(
|
1078
|
+
eks_cluster_name=self.cluster_name,
|
1079
|
+
pod_namespace=self.namespace,
|
1080
|
+
credentials_file=credentials_file,
|
1081
|
+
) as self.config_file:
|
1082
|
+
return super().execute(context)
|
1076
1083
|
|
1077
1084
|
def trigger_reentry(self, context: Context, event: dict[str, Any]) -> Any:
|
1078
1085
|
eks_hook = EksHook(
|
@@ -1081,7 +1088,14 @@ class EksPodOperator(KubernetesPodOperator):
|
|
1081
1088
|
)
|
1082
1089
|
eks_cluster_name = event["eks_cluster_name"]
|
1083
1090
|
pod_namespace = event["namespace"]
|
1084
|
-
|
1085
|
-
|
1086
|
-
|
1087
|
-
|
1091
|
+
session = eks_hook.get_session()
|
1092
|
+
credentials = session.get_credentials().get_frozen_credentials()
|
1093
|
+
with eks_hook._secure_credential_context(
|
1094
|
+
credentials.access_key, credentials.secret_key, credentials.token
|
1095
|
+
) as credentials_file:
|
1096
|
+
with eks_hook.generate_config_file(
|
1097
|
+
eks_cluster_name=eks_cluster_name,
|
1098
|
+
pod_namespace=pod_namespace,
|
1099
|
+
credentials_file=credentials_file,
|
1100
|
+
) as self.config_file:
|
1101
|
+
return super().trigger_reentry(context, event)
|
@@ -38,7 +38,35 @@ QUEUE_REGEXP = r"^https://sqs\.[^.]+\.amazonaws\.com/[0-9]+/.+"
|
|
38
38
|
|
39
39
|
|
40
40
|
class SqsMessageQueueProvider(BaseMessageQueueProvider):
|
41
|
-
"""
|
41
|
+
"""
|
42
|
+
Configuration for SQS integration with common-messaging.
|
43
|
+
|
44
|
+
[START sqs_message_queue_provider_description]
|
45
|
+
|
46
|
+
* It uses ``sqs`` as scheme for identifying SQS queues.
|
47
|
+
* For parameter definitions take a look at :class:`~airflow.providers.amazon.aws.triggers.sqs.SqsSensorTrigger`.
|
48
|
+
|
49
|
+
.. code-block:: python
|
50
|
+
|
51
|
+
from airflow.providers.common.messaging.triggers.msg_queue import MessageQueueTrigger
|
52
|
+
from airflow.sdk import Asset, AssetWatcher
|
53
|
+
|
54
|
+
trigger = MessageQueueTrigger(
|
55
|
+
scheme="sqs",
|
56
|
+
# Additional AWS SqsSensorTrigger parameters as needed
|
57
|
+
sqs_queue="https://sqs.us-east-1.amazonaws.com/123456789012/my-queue",
|
58
|
+
aws_conn_id="aws_default",
|
59
|
+
)
|
60
|
+
|
61
|
+
asset = Asset("sqs_queue_asset", watchers=[AssetWatcher(name="sqs_watcher", trigger=trigger)])
|
62
|
+
|
63
|
+
For a complete example, see:
|
64
|
+
:mod:`tests.system.amazon.aws.example_dag_sqs_message_queue_trigger`
|
65
|
+
|
66
|
+
[END sqs_message_queue_provider_description]
|
67
|
+
"""
|
68
|
+
|
69
|
+
scheme = "sqs"
|
42
70
|
|
43
71
|
def queue_matches(self, queue: str) -> bool:
|
44
72
|
return bool(re.match(QUEUE_REGEXP, queue))
|
@@ -152,7 +152,6 @@ class MwaaDagRunSensor(AwsBaseSensor[MwaaHook]):
|
|
152
152
|
waiter_delay=int(self.poke_interval),
|
153
153
|
waiter_max_attempts=self.max_retries,
|
154
154
|
aws_conn_id=self.aws_conn_id,
|
155
|
-
end_from_trigger=True,
|
156
155
|
),
|
157
156
|
method_name="execute_complete",
|
158
157
|
)
|
@@ -294,7 +293,6 @@ class MwaaTaskSensor(AwsBaseSensor[MwaaHook]):
|
|
294
293
|
waiter_delay=int(self.poke_interval),
|
295
294
|
waiter_max_attempts=self.max_retries,
|
296
295
|
aws_conn_id=self.aws_conn_id,
|
297
|
-
end_from_trigger=True,
|
298
296
|
),
|
299
297
|
method_name="execute_complete",
|
300
298
|
)
|
@@ -31,6 +31,7 @@ from airflow.providers.amazon.version_compat import BaseOperator
|
|
31
31
|
from airflow.providers.google.cloud.hooks.gcs import GCSHook
|
32
32
|
|
33
33
|
if TYPE_CHECKING:
|
34
|
+
from airflow.providers.openlineage.extractors import OperatorLineage
|
34
35
|
from airflow.utils.context import Context
|
35
36
|
|
36
37
|
|
@@ -206,3 +207,14 @@ class GCSToS3Operator(BaseOperator):
|
|
206
207
|
self.log.info("In sync, no files needed to be uploaded to S3")
|
207
208
|
|
208
209
|
return gcs_files
|
210
|
+
|
211
|
+
def get_openlineage_facets_on_start(self) -> OperatorLineage:
|
212
|
+
from airflow.providers.common.compat.openlineage.facet import Dataset
|
213
|
+
from airflow.providers.openlineage.extractors import OperatorLineage
|
214
|
+
|
215
|
+
bucket_name, s3_key = S3Hook.parse_s3_url(self.dest_s3_key)
|
216
|
+
|
217
|
+
return OperatorLineage(
|
218
|
+
inputs=[Dataset(namespace=f"gs://{self.gcs_bucket}", name=self.prefix or "/")],
|
219
|
+
outputs=[Dataset(namespace=f"s3://{bucket_name}", name=s3_key or "/")],
|
220
|
+
)
|
@@ -21,7 +21,7 @@ from __future__ import annotations
|
|
21
21
|
|
22
22
|
import json
|
23
23
|
from collections.abc import Callable, Sequence
|
24
|
-
from typing import TYPE_CHECKING
|
24
|
+
from typing import TYPE_CHECKING, Literal
|
25
25
|
|
26
26
|
from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
|
27
27
|
from airflow.providers.amazon.version_compat import BaseOperator
|
@@ -53,6 +53,7 @@ class HiveToDynamoDBOperator(BaseOperator):
|
|
53
53
|
:param hiveserver2_conn_id: Reference to the
|
54
54
|
:ref: `Hive Server2 thrift service connection id <howto/connection:hiveserver2>`.
|
55
55
|
:param aws_conn_id: aws connection
|
56
|
+
:param df_type: DataFrame type to use ("pandas" or "polars").
|
56
57
|
"""
|
57
58
|
|
58
59
|
template_fields: Sequence[str] = ("sql",)
|
@@ -73,6 +74,7 @@ class HiveToDynamoDBOperator(BaseOperator):
|
|
73
74
|
schema: str = "default",
|
74
75
|
hiveserver2_conn_id: str = "hiveserver2_default",
|
75
76
|
aws_conn_id: str | None = "aws_default",
|
77
|
+
df_type: Literal["pandas", "polars"] = "pandas",
|
76
78
|
**kwargs,
|
77
79
|
) -> None:
|
78
80
|
super().__init__(**kwargs)
|
@@ -86,6 +88,7 @@ class HiveToDynamoDBOperator(BaseOperator):
|
|
86
88
|
self.schema = schema
|
87
89
|
self.hiveserver2_conn_id = hiveserver2_conn_id
|
88
90
|
self.aws_conn_id = aws_conn_id
|
91
|
+
self.df_type = df_type
|
89
92
|
|
90
93
|
def execute(self, context: Context):
|
91
94
|
hive = HiveServer2Hook(hiveserver2_conn_id=self.hiveserver2_conn_id)
|
@@ -93,7 +96,7 @@ class HiveToDynamoDBOperator(BaseOperator):
|
|
93
96
|
self.log.info("Extracting data from Hive")
|
94
97
|
self.log.info(self.sql)
|
95
98
|
|
96
|
-
data = hive.get_df(self.sql, schema=self.schema, df_type=
|
99
|
+
data = hive.get_df(self.sql, schema=self.schema, df_type=self.df_type)
|
97
100
|
dynamodb = DynamoDBHook(
|
98
101
|
aws_conn_id=self.aws_conn_id,
|
99
102
|
table_name=self.table_name,
|
@@ -104,7 +107,10 @@ class HiveToDynamoDBOperator(BaseOperator):
|
|
104
107
|
self.log.info("Inserting rows into dynamodb")
|
105
108
|
|
106
109
|
if self.pre_process is None:
|
107
|
-
|
110
|
+
if self.df_type == "polars":
|
111
|
+
dynamodb.write_batch_data(data.to_dicts()) # type:ignore[operator]
|
112
|
+
elif self.df_type == "pandas":
|
113
|
+
dynamodb.write_batch_data(json.loads(data.to_json(orient="records"))) # type:ignore[union-attr]
|
108
114
|
else:
|
109
115
|
dynamodb.write_batch_data(
|
110
116
|
self.pre_process(data=data, args=self.pre_process_args, kwargs=self.pre_process_kwargs)
|
@@ -20,16 +20,18 @@ from __future__ import annotations
|
|
20
20
|
import enum
|
21
21
|
import gzip
|
22
22
|
import io
|
23
|
+
import warnings
|
23
24
|
from collections import namedtuple
|
24
25
|
from collections.abc import Iterable, Mapping, Sequence
|
25
26
|
from typing import TYPE_CHECKING, Any, Literal, cast
|
26
27
|
|
27
|
-
from airflow.exceptions import AirflowException
|
28
|
+
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
|
28
29
|
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
|
29
30
|
from airflow.providers.amazon.version_compat import BaseHook, BaseOperator
|
30
31
|
|
31
32
|
if TYPE_CHECKING:
|
32
33
|
import pandas as pd
|
34
|
+
import polars as pl
|
33
35
|
|
34
36
|
from airflow.providers.common.sql.hooks.sql import DbApiHook
|
35
37
|
from airflow.utils.context import Context
|
@@ -69,7 +71,8 @@ class SqlToS3Operator(BaseOperator):
|
|
69
71
|
:param sql_hook_params: Extra config params to be passed to the underlying hook.
|
70
72
|
Should match the desired hook constructor params.
|
71
73
|
:param parameters: (optional) the parameters to render the SQL query with.
|
72
|
-
:param
|
74
|
+
:param read_kwargs: arguments to include in DataFrame when reading from SQL (supports both pandas and polars).
|
75
|
+
:param df_type: the type of DataFrame to use ('pandas' or 'polars'). Defaults to 'pandas'.
|
73
76
|
:param aws_conn_id: reference to a specific S3 connection
|
74
77
|
:param verify: Whether or not to verify SSL certificates for S3 connection.
|
75
78
|
By default SSL certificates are verified.
|
@@ -84,7 +87,7 @@ class SqlToS3Operator(BaseOperator):
|
|
84
87
|
:param max_rows_per_file: (optional) argument to set destination file number of rows limit, if source data
|
85
88
|
is larger than that, it will be dispatched into multiple files.
|
86
89
|
Will be ignored if ``groupby_kwargs`` argument is specified.
|
87
|
-
:param
|
90
|
+
:param df_kwargs: arguments to include in DataFrame ``.to_parquet()``, ``.to_json()`` or ``.to_csv()``.
|
88
91
|
:param groupby_kwargs: argument to include in DataFrame ``groupby()``.
|
89
92
|
"""
|
90
93
|
|
@@ -97,8 +100,9 @@ class SqlToS3Operator(BaseOperator):
|
|
97
100
|
template_ext: Sequence[str] = (".sql",)
|
98
101
|
template_fields_renderers = {
|
99
102
|
"query": "sql",
|
103
|
+
"df_kwargs": "json",
|
100
104
|
"pd_kwargs": "json",
|
101
|
-
"
|
105
|
+
"read_kwargs": "json",
|
102
106
|
}
|
103
107
|
|
104
108
|
def __init__(
|
@@ -110,12 +114,15 @@ class SqlToS3Operator(BaseOperator):
|
|
110
114
|
sql_conn_id: str,
|
111
115
|
sql_hook_params: dict | None = None,
|
112
116
|
parameters: None | Mapping[str, Any] | list | tuple = None,
|
117
|
+
read_kwargs: dict | None = None,
|
113
118
|
read_pd_kwargs: dict | None = None,
|
119
|
+
df_type: Literal["pandas", "polars"] = "pandas",
|
114
120
|
replace: bool = False,
|
115
121
|
aws_conn_id: str | None = "aws_default",
|
116
122
|
verify: bool | str | None = None,
|
117
123
|
file_format: Literal["csv", "json", "parquet"] = "csv",
|
118
124
|
max_rows_per_file: int = 0,
|
125
|
+
df_kwargs: dict | None = None,
|
119
126
|
pd_kwargs: dict | None = None,
|
120
127
|
groupby_kwargs: dict | None = None,
|
121
128
|
**kwargs,
|
@@ -128,14 +135,30 @@ class SqlToS3Operator(BaseOperator):
|
|
128
135
|
self.aws_conn_id = aws_conn_id
|
129
136
|
self.verify = verify
|
130
137
|
self.replace = replace
|
131
|
-
self.pd_kwargs = pd_kwargs or {}
|
132
138
|
self.parameters = parameters
|
133
|
-
self.read_pd_kwargs = read_pd_kwargs or {}
|
134
139
|
self.max_rows_per_file = max_rows_per_file
|
135
140
|
self.groupby_kwargs = groupby_kwargs or {}
|
136
141
|
self.sql_hook_params = sql_hook_params
|
142
|
+
self.df_type = df_type
|
137
143
|
|
138
|
-
if
|
144
|
+
if read_pd_kwargs is not None:
|
145
|
+
warnings.warn(
|
146
|
+
"The 'read_pd_kwargs' parameter is deprecated. Use 'read_kwargs' instead.",
|
147
|
+
AirflowProviderDeprecationWarning,
|
148
|
+
stacklevel=2,
|
149
|
+
)
|
150
|
+
self.read_kwargs = read_kwargs if read_kwargs is not None else read_pd_kwargs or {}
|
151
|
+
|
152
|
+
if pd_kwargs is not None:
|
153
|
+
warnings.warn(
|
154
|
+
"The 'pd_kwargs' parameter is deprecated. Use 'df_kwargs' instead.",
|
155
|
+
AirflowProviderDeprecationWarning,
|
156
|
+
stacklevel=2,
|
157
|
+
)
|
158
|
+
|
159
|
+
self.df_kwargs = df_kwargs if df_kwargs is not None else pd_kwargs or {}
|
160
|
+
|
161
|
+
if "path_or_buf" in self.df_kwargs:
|
139
162
|
raise AirflowException("The argument path_or_buf is not allowed, please remove it")
|
140
163
|
|
141
164
|
if self.max_rows_per_file and self.groupby_kwargs:
|
@@ -186,33 +209,51 @@ class SqlToS3Operator(BaseOperator):
|
|
186
209
|
df[col] = np.where(df[col].isnull(), None, df[col]) # type: ignore[call-overload]
|
187
210
|
df[col] = df[col].astype(pd.Float64Dtype())
|
188
211
|
|
212
|
+
@staticmethod
|
213
|
+
def _strip_suffixes(
|
214
|
+
path: str,
|
215
|
+
) -> str:
|
216
|
+
suffixes = [".json.gz", ".csv.gz", ".json", ".csv", ".parquet"]
|
217
|
+
for suffix in sorted(suffixes, key=len, reverse=True):
|
218
|
+
if path.endswith(suffix):
|
219
|
+
return path[: -len(suffix)]
|
220
|
+
return path
|
221
|
+
|
189
222
|
def execute(self, context: Context) -> None:
|
190
223
|
sql_hook = self._get_hook()
|
191
224
|
s3_conn = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
|
192
225
|
data_df = sql_hook.get_df(
|
193
|
-
sql=self.query, parameters=self.parameters, df_type=
|
226
|
+
sql=self.query, parameters=self.parameters, df_type=self.df_type, **self.read_kwargs
|
194
227
|
)
|
195
228
|
self.log.info("Data from SQL obtained")
|
196
|
-
|
197
|
-
|
229
|
+
# Only apply dtype fixes to pandas DataFrames since Polars doesn't have the same NaN/None inconsistencies as panda
|
230
|
+
if ("dtype_backend", "pyarrow") not in self.read_kwargs.items() and self.df_type == "pandas":
|
231
|
+
self._fix_dtypes(data_df, self.file_format) # type: ignore[arg-type]
|
198
232
|
file_options = FILE_OPTIONS_MAP[self.file_format]
|
199
233
|
|
200
234
|
for group_name, df in self._partition_dataframe(df=data_df):
|
201
235
|
buf = io.BytesIO()
|
202
236
|
self.log.info("Writing data to in-memory buffer")
|
203
|
-
|
237
|
+
clean_key = self._strip_suffixes(self.s3_key)
|
238
|
+
object_key = (
|
239
|
+
f"{clean_key}_{group_name}{file_options.suffix}"
|
240
|
+
if group_name
|
241
|
+
else f"{clean_key}{file_options.suffix}"
|
242
|
+
)
|
204
243
|
|
205
|
-
if self.
|
206
|
-
|
244
|
+
if self.file_format != FILE_FORMAT.PARQUET and self.df_kwargs.get("compression") == "gzip":
|
245
|
+
object_key += ".gz"
|
246
|
+
df_kwargs = {k: v for k, v in self.df_kwargs.items() if k != "compression"}
|
207
247
|
with gzip.GzipFile(fileobj=buf, mode="wb", filename=object_key) as gz:
|
208
|
-
getattr(df, file_options.function)(gz, **
|
248
|
+
getattr(df, file_options.function)(gz, **df_kwargs)
|
209
249
|
else:
|
210
250
|
if self.file_format == FILE_FORMAT.PARQUET:
|
211
|
-
getattr(df, file_options.function)(buf, **self.
|
251
|
+
getattr(df, file_options.function)(buf, **self.df_kwargs)
|
212
252
|
else:
|
213
253
|
text_buf = io.TextIOWrapper(buf, encoding="utf-8", write_through=True)
|
214
|
-
getattr(df, file_options.function)(text_buf, **self.
|
254
|
+
getattr(df, file_options.function)(text_buf, **self.df_kwargs)
|
215
255
|
text_buf.flush()
|
256
|
+
|
216
257
|
buf.seek(0)
|
217
258
|
|
218
259
|
self.log.info("Uploading data to S3")
|
@@ -220,32 +261,56 @@ class SqlToS3Operator(BaseOperator):
|
|
220
261
|
file_obj=buf, key=object_key, bucket_name=self.s3_bucket, replace=self.replace
|
221
262
|
)
|
222
263
|
|
223
|
-
def _partition_dataframe(
|
224
|
-
|
264
|
+
def _partition_dataframe(
|
265
|
+
self, df: pd.DataFrame | pl.DataFrame
|
266
|
+
) -> Iterable[tuple[str, pd.DataFrame | pl.DataFrame]]:
|
267
|
+
"""Partition dataframe using pandas or polars groupby() method."""
|
225
268
|
try:
|
226
269
|
import secrets
|
227
270
|
import string
|
228
271
|
|
229
272
|
import numpy as np
|
273
|
+
import pandas as pd
|
274
|
+
import polars as pl
|
230
275
|
except ImportError:
|
231
276
|
pass
|
277
|
+
|
232
278
|
# if max_rows_per_file argument is specified, a temporary column with a random unusual name will be
|
233
279
|
# added to the dataframe. This column is used to dispatch the dataframe into smaller ones using groupby()
|
234
|
-
random_column_name =
|
280
|
+
random_column_name = None
|
235
281
|
if self.max_rows_per_file and not self.groupby_kwargs:
|
236
282
|
random_column_name = "".join(secrets.choice(string.ascii_letters) for _ in range(20))
|
237
|
-
df[random_column_name] = np.arange(len(df)) // self.max_rows_per_file
|
238
283
|
self.groupby_kwargs = {"by": random_column_name}
|
284
|
+
|
285
|
+
if random_column_name:
|
286
|
+
if isinstance(df, pd.DataFrame):
|
287
|
+
df[random_column_name] = np.arange(len(df)) // self.max_rows_per_file
|
288
|
+
elif isinstance(df, pl.DataFrame):
|
289
|
+
df = df.with_columns(
|
290
|
+
(pl.int_range(pl.len()) // self.max_rows_per_file).alias(random_column_name)
|
291
|
+
)
|
292
|
+
|
239
293
|
if not self.groupby_kwargs:
|
240
294
|
yield "", df
|
241
295
|
return
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
grouped_df.get_group(group_label)
|
246
|
-
|
247
|
-
|
248
|
-
|
296
|
+
|
297
|
+
if isinstance(df, pd.DataFrame):
|
298
|
+
for group_label in (grouped_df := df.groupby(**self.groupby_kwargs)).groups:
|
299
|
+
group_df = grouped_df.get_group(group_label)
|
300
|
+
if random_column_name:
|
301
|
+
group_df = group_df.drop(random_column_name, axis=1, errors="ignore")
|
302
|
+
yield (
|
303
|
+
cast("str", group_label[0] if isinstance(group_label, tuple) else group_label),
|
304
|
+
group_df.reset_index(drop=True),
|
305
|
+
)
|
306
|
+
elif isinstance(df, pl.DataFrame):
|
307
|
+
for group_label, group_df in df.group_by(**self.groupby_kwargs): # type: ignore[assignment]
|
308
|
+
if random_column_name:
|
309
|
+
group_df = group_df.drop(random_column_name)
|
310
|
+
yield (
|
311
|
+
cast("str", group_label[0] if isinstance(group_label, tuple) else group_label),
|
312
|
+
group_df,
|
313
|
+
)
|
249
314
|
|
250
315
|
def _get_hook(self) -> DbApiHook:
|
251
316
|
self.log.debug("Get connection for %s", self.sql_conn_id)
|
@@ -17,12 +17,16 @@
|
|
17
17
|
from __future__ import annotations
|
18
18
|
|
19
19
|
import argparse
|
20
|
+
import base64
|
21
|
+
import os
|
20
22
|
from datetime import datetime, timedelta, timezone
|
21
23
|
|
22
|
-
|
24
|
+
import boto3
|
25
|
+
from botocore.signers import RequestSigner
|
23
26
|
|
24
27
|
# Presigned STS urls are valid for 15 minutes, set token expiration to 1 minute before it expires for
|
25
28
|
# some cushion
|
29
|
+
STS_TOKEN_EXPIRES_IN = 60
|
26
30
|
TOKEN_EXPIRATION_MINUTES = 14
|
27
31
|
|
28
32
|
|
@@ -37,25 +41,59 @@ def get_parser():
|
|
37
41
|
parser.add_argument(
|
38
42
|
"--cluster-name", help="The name of the cluster to generate kubeconfig file for.", required=True
|
39
43
|
)
|
40
|
-
parser.add_argument(
|
41
|
-
"--aws-conn-id",
|
42
|
-
help=(
|
43
|
-
"The Airflow connection used for AWS credentials. "
|
44
|
-
"If not specified or empty then the default boto3 behaviour is used."
|
45
|
-
),
|
46
|
-
)
|
47
44
|
parser.add_argument(
|
48
45
|
"--region-name", help="AWS region_name. If not specified then the default boto3 behaviour is used."
|
49
46
|
)
|
47
|
+
parser.add_argument("--sts-url", help="Provide the STS url", required=True)
|
50
48
|
|
51
49
|
return parser
|
52
50
|
|
53
51
|
|
52
|
+
def fetch_access_token_for_cluster(eks_cluster_name: str, sts_url: str, region_name: str) -> str:
|
53
|
+
# This will use the credentials from the caller set as the standard AWS env variables
|
54
|
+
session = boto3.Session(region_name=region_name)
|
55
|
+
eks_client = session.client("eks")
|
56
|
+
# This env variable is required so that we get a regionalized endpoint for STS in regions that
|
57
|
+
# otherwise default to global endpoints. The mechanism below to generate the token is very picky that
|
58
|
+
# the endpoint is regional.
|
59
|
+
os.environ["AWS_STS_REGIONAL_ENDPOINTS"] = "regional"
|
60
|
+
|
61
|
+
signer = RequestSigner(
|
62
|
+
service_id=eks_client.meta.service_model.service_id,
|
63
|
+
region_name=session.region_name,
|
64
|
+
signing_name="sts",
|
65
|
+
signature_version="v4",
|
66
|
+
credentials=session.get_credentials(),
|
67
|
+
event_emitter=session.events,
|
68
|
+
)
|
69
|
+
|
70
|
+
request_params = {
|
71
|
+
"method": "GET",
|
72
|
+
"url": sts_url,
|
73
|
+
"body": {},
|
74
|
+
"headers": {"x-k8s-aws-id": eks_cluster_name},
|
75
|
+
"context": {},
|
76
|
+
}
|
77
|
+
|
78
|
+
signed_url = signer.generate_presigned_url(
|
79
|
+
request_dict=request_params,
|
80
|
+
region_name=session.region_name,
|
81
|
+
expires_in=STS_TOKEN_EXPIRES_IN,
|
82
|
+
operation_name="",
|
83
|
+
)
|
84
|
+
|
85
|
+
base64_url = base64.urlsafe_b64encode(signed_url.encode("utf-8")).decode("utf-8")
|
86
|
+
|
87
|
+
# remove any base64 encoding padding:
|
88
|
+
return "k8s-aws-v1." + base64_url.rstrip("=")
|
89
|
+
|
90
|
+
|
54
91
|
def main():
|
55
92
|
parser = get_parser()
|
56
93
|
args = parser.parse_args()
|
57
|
-
|
58
|
-
|
94
|
+
access_token = fetch_access_token_for_cluster(
|
95
|
+
args.cluster_name, args.sts_url, region_name=args.region_name
|
96
|
+
)
|
59
97
|
access_token_expiration = get_expiration_time()
|
60
98
|
print(f"expirationTimestamp: {access_token_expiration}, token: {access_token}")
|
61
99
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: apache-airflow-providers-amazon
|
3
|
-
Version: 9.
|
3
|
+
Version: 9.13.0rc1
|
4
4
|
Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
|
5
5
|
Keywords: airflow-provider,amazon,airflow,integration
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
@@ -38,7 +38,7 @@ Requires-Dist: marshmallow>=3
|
|
38
38
|
Requires-Dist: aiobotocore[boto3]>=2.21.1 ; extra == "aiobotocore"
|
39
39
|
Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache-hive"
|
40
40
|
Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0rc1 ; extra == "cncf-kubernetes"
|
41
|
-
Requires-Dist: apache-airflow-providers-common-messaging>=
|
41
|
+
Requires-Dist: apache-airflow-providers-common-messaging>=2.0.0rc1 ; extra == "common-messaging"
|
42
42
|
Requires-Dist: apache-airflow-providers-exasol ; extra == "exasol"
|
43
43
|
Requires-Dist: apache-airflow-providers-fab>=2.2.0rc1 ; extra == "fab" and ( python_version < '3.13')
|
44
44
|
Requires-Dist: apache-airflow-providers-ftp ; extra == "ftp"
|
@@ -55,8 +55,8 @@ Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
|
|
55
55
|
Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
|
56
56
|
Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
|
57
57
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
58
|
-
Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.
|
59
|
-
Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.
|
58
|
+
Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.13.0/changelog.html
|
59
|
+
Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.13.0
|
60
60
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
61
61
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
62
62
|
Project-URL: Source Code, https://github.com/apache/airflow
|
@@ -104,9 +104,8 @@ Provides-Extra: standard
|
|
104
104
|
|
105
105
|
Package ``apache-airflow-providers-amazon``
|
106
106
|
|
107
|
-
Release: ``9.
|
107
|
+
Release: ``9.13.0``
|
108
108
|
|
109
|
-
Release Date: ``|PypiReleaseDate|``
|
110
109
|
|
111
110
|
Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
|
112
111
|
|
@@ -118,12 +117,12 @@ This is a provider package for ``amazon`` provider. All classes for this provide
|
|
118
117
|
are in ``airflow.providers.amazon`` python package.
|
119
118
|
|
120
119
|
You can find package information and changelog for the provider
|
121
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.
|
120
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.13.0/>`_.
|
122
121
|
|
123
122
|
Installation
|
124
123
|
------------
|
125
124
|
|
126
|
-
You can install this package on top of an existing Airflow
|
125
|
+
You can install this package on top of an existing Airflow installation (see ``Requirements`` below
|
127
126
|
for the minimum Airflow version supported) via
|
128
127
|
``pip install apache-airflow-providers-amazon``
|
129
128
|
|
@@ -186,5 +185,5 @@ Dependent package
|
|
186
185
|
======================================================================================================================== ====================
|
187
186
|
|
188
187
|
The changelog for the provider package can be found in the
|
189
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.
|
188
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.13.0/changelog.html>`_.
|
190
189
|
|
@@ -1,5 +1,5 @@
|
|
1
1
|
airflow/providers/amazon/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
2
|
-
airflow/providers/amazon/__init__.py,sha256=
|
2
|
+
airflow/providers/amazon/__init__.py,sha256=0Ac_umsba8UdAvpcFb-u5u3Wgq5p4O2tTjtP7UTDBV0,1496
|
3
3
|
airflow/providers/amazon/get_provider_info.py,sha256=HqgOY-2XbaX7Nhb11ySGgUIrQJ_C8tBWRx9b6XO32zg,73282
|
4
4
|
airflow/providers/amazon/version_compat.py,sha256=8biVK8TSccWSZKPfRoA5w9N9R6YznPWPq8RALrVDWuY,2309
|
5
5
|
airflow/providers/amazon/aws/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
@@ -11,9 +11,9 @@ airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=8Gc7luaHkKW
|
|
11
11
|
airflow/providers/amazon/aws/auth_manager/constants.py,sha256=Jdluo42InhyNGkYHB_dRtoFMpKanJLJdH0hyR9-5AZg,1050
|
12
12
|
airflow/providers/amazon/aws/auth_manager/user.py,sha256=zds3U6gHmwAy1MuxFFPtGTYikMj-RjYVki9-TSdfnbg,2043
|
13
13
|
airflow/providers/amazon/aws/auth_manager/avp/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
14
|
-
airflow/providers/amazon/aws/auth_manager/avp/entities.py,sha256=
|
15
|
-
airflow/providers/amazon/aws/auth_manager/avp/facade.py,sha256=
|
16
|
-
airflow/providers/amazon/aws/auth_manager/avp/schema.json,sha256
|
14
|
+
airflow/providers/amazon/aws/auth_manager/avp/entities.py,sha256=MAAPZulvCnmRR7xvwZWriqNlV_h2v-41ZL-3qPo1Bq8,2163
|
15
|
+
airflow/providers/amazon/aws/auth_manager/avp/facade.py,sha256=7ZdYdqSoM5lSlNA8fTAFOXSzxLcA8wYEXwX7ytQi74Q,11516
|
16
|
+
airflow/providers/amazon/aws/auth_manager/avp/schema.json,sha256=-8eT5DUdWP7y6nN-YuuN6UMNRIG8WdRxGBanF5zgNi0,9644
|
17
17
|
airflow/providers/amazon/aws/auth_manager/cli/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
18
18
|
airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py,sha256=dnKfF-PFvkaZMicHjVPxrLkQr1y003898i2AyjT1XnY,5376
|
19
19
|
airflow/providers/amazon/aws/auth_manager/cli/definition.py,sha256=Z-WWCT38F2yn8hA4CTzf-YdzsgwOaavU-tftCySWaJs,2030
|
@@ -50,7 +50,7 @@ airflow/providers/amazon/aws/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2
|
|
50
50
|
airflow/providers/amazon/aws/hooks/appflow.py,sha256=-le6RsIMWIqTav7KGknsph9Td42znSm_eIYztxc_RsE,5263
|
51
51
|
airflow/providers/amazon/aws/hooks/athena.py,sha256=ubpgNafkIRzFh1h7Bexhj4L83SLvyZf-yDcmGmAIVt8,13463
|
52
52
|
airflow/providers/amazon/aws/hooks/athena_sql.py,sha256=0LzWXpmUYVphkuhnH5dUgnopmuAAvjkC0nAFq-C0Xk8,7201
|
53
|
-
airflow/providers/amazon/aws/hooks/base_aws.py,sha256=
|
53
|
+
airflow/providers/amazon/aws/hooks/base_aws.py,sha256=9TtvkBc46LkHHikCduFDP4MgidLGAcqP0SeJC-Jd--4,46594
|
54
54
|
airflow/providers/amazon/aws/hooks/batch_client.py,sha256=0FGUSCtcgvuO6A8oKNykZ6T8ZZGQav4OP-F2PcrIw7o,22016
|
55
55
|
airflow/providers/amazon/aws/hooks/batch_waiters.json,sha256=eoN5YDgeTNZ2Xz17TrbKBPhd7z9-6KD3RhaDKXXOvqU,2511
|
56
56
|
airflow/providers/amazon/aws/hooks/batch_waiters.py,sha256=V-ZvO6cAAVAJqOHx8aU5QEzaCYnPnCC8jBWLESi9-qs,10981
|
@@ -62,9 +62,9 @@ airflow/providers/amazon/aws/hooks/datasync.py,sha256=RcYRlpP7jsUzTUJDOEVYnfTkm5
|
|
62
62
|
airflow/providers/amazon/aws/hooks/dms.py,sha256=zR8zMuR_uR1_38meyDE9sATq8cZKvzCnFDuTlQykp7g,14398
|
63
63
|
airflow/providers/amazon/aws/hooks/dynamodb.py,sha256=gS0KlQFvHh74DEt3gyQaGdXcb0yT6SXaxO1JFhPe1sI,3997
|
64
64
|
airflow/providers/amazon/aws/hooks/ec2.py,sha256=M5XIGQ2LamXS2JQlHF_pMCdUWUmFmA8uDVlCcI-vTlo,8094
|
65
|
-
airflow/providers/amazon/aws/hooks/ecr.py,sha256=
|
65
|
+
airflow/providers/amazon/aws/hooks/ecr.py,sha256=wFJmaFN_vgYiIRYMrD7Yy3e6pjEjSf3VDgehnfl4L7c,4112
|
66
66
|
airflow/providers/amazon/aws/hooks/ecs.py,sha256=9HMiR21Mj4PAeLMHyY87rI4WstNqRqRwJAicAGQiEHQ,6307
|
67
|
-
airflow/providers/amazon/aws/hooks/eks.py,sha256=
|
67
|
+
airflow/providers/amazon/aws/hooks/eks.py,sha256=WEwy9vb5v7cr0glUL54sBq2LUk9mQlS9yzO32aKPQrY,25819
|
68
68
|
airflow/providers/amazon/aws/hooks/elasticache_replication_group.py,sha256=x6kkaR2nzDF8w1kqolbaS3-XCbHl5qlJMcpGYmlsxuU,12089
|
69
69
|
airflow/providers/amazon/aws/hooks/emr.py,sha256=B8vNVLOFCkCEXnongySjcFH2fqnK3sBN8d6LbJrAYNA,22240
|
70
70
|
airflow/providers/amazon/aws/hooks/eventbridge.py,sha256=dSaKbFB8ueOUJGl6YLIz70zXy0Xzr3yMflKS2wGFDSM,3364
|
@@ -92,7 +92,7 @@ airflow/providers/amazon/aws/hooks/secrets_manager.py,sha256=6srh3jUeSGoqyrSj1M6
|
|
92
92
|
airflow/providers/amazon/aws/hooks/ses.py,sha256=DuCJwFhtg3D3mu0RSjRrebyUpwBAhrWSr-kdu8VR9qU,4174
|
93
93
|
airflow/providers/amazon/aws/hooks/sns.py,sha256=SwhLeurqaV8TLhgCGsZgtf98D0_kLVLlhXer_96Anv4,4101
|
94
94
|
airflow/providers/amazon/aws/hooks/sqs.py,sha256=306cpNAgRVuu1Dg8lrsg9o3aWsEJ9ELxxl7GhG-AbLA,3380
|
95
|
-
airflow/providers/amazon/aws/hooks/ssm.py,sha256
|
95
|
+
airflow/providers/amazon/aws/hooks/ssm.py,sha256=QSouBw7JshWLY9YrB7qryvE05EBoTr7qVHwQBptZ8Qo,2734
|
96
96
|
airflow/providers/amazon/aws/hooks/step_function.py,sha256=TSmPPF-CFR76a-K9f2yGtgdgd98UKZS71SP6crC_pIY,3964
|
97
97
|
airflow/providers/amazon/aws/hooks/sts.py,sha256=6KYyou-tOhbGhRfnGHH95TUi3ENNHkISUJf0nskmuiw,1827
|
98
98
|
airflow/providers/amazon/aws/hooks/verified_permissions.py,sha256=-5vLcpBX_V43tY37a5PpeC60DIUAa2AXBe3sSxpqlGY,1799
|
@@ -121,14 +121,14 @@ airflow/providers/amazon/aws/operators/appflow.py,sha256=TlQSJc1frCw7yKfobjCLf2f
|
|
121
121
|
airflow/providers/amazon/aws/operators/athena.py,sha256=CPKPZXN4dGELg0sW0LJIkG0X2-v5qPgWt4TMqYtTS18,14754
|
122
122
|
airflow/providers/amazon/aws/operators/base_aws.py,sha256=Y8hbT2uDA2CUfaEhHqm9qc_P81OS1ZnPh_34sHn0FrA,3898
|
123
123
|
airflow/providers/amazon/aws/operators/batch.py,sha256=a3fGEw9uQ1RvVLaDVU1obmceiCas2Sm9OOwES2EBE08,21935
|
124
|
-
airflow/providers/amazon/aws/operators/bedrock.py,sha256=
|
124
|
+
airflow/providers/amazon/aws/operators/bedrock.py,sha256=xPYocvopa7dP-g6E25yVymYPvT0Bbre12iYeIepgn6Q,46167
|
125
125
|
airflow/providers/amazon/aws/operators/cloud_formation.py,sha256=bDzAHcs2QjrnwE3Z9w4s6JFeu5Xl74JWKR8Y5Ez03C4,5005
|
126
126
|
airflow/providers/amazon/aws/operators/comprehend.py,sha256=Vkdw0i2iW9_WRQLSDKNncNkVIQaNWG8jz-DxHy47Fmg,17607
|
127
127
|
airflow/providers/amazon/aws/operators/datasync.py,sha256=7DZtLjYxQqk7kV968CFHtWk5pbQPausbvE8DAkvPhqw,20265
|
128
128
|
airflow/providers/amazon/aws/operators/dms.py,sha256=XmIcXpkp_--PBQF1m7NFfeHDTp4aRV4lnXUu5h6kEa4,34658
|
129
129
|
airflow/providers/amazon/aws/operators/ec2.py,sha256=SclBzOLo3GbQe3kw4S3MKf8zLm8IaKNSiGTc_U-OxRo,19700
|
130
130
|
airflow/providers/amazon/aws/operators/ecs.py,sha256=xlkTNCNCnNZBW8ntrULLICrpsbZnVBaF6FvazOe-I1A,32793
|
131
|
-
airflow/providers/amazon/aws/operators/eks.py,sha256=
|
131
|
+
airflow/providers/amazon/aws/operators/eks.py,sha256=djaoaMhj3N5JzKeQVZSMlJb9XGkiGYUAQx-xVocoBC0,52086
|
132
132
|
airflow/providers/amazon/aws/operators/emr.py,sha256=Cw1qiA0eiPJODCSxHhPayo2_0TZOlA4mj8pcveV0WNc,75983
|
133
133
|
airflow/providers/amazon/aws/operators/eventbridge.py,sha256=NacTdvRzZZFizSzC3rb0Z7g8dHQWkKQEXGYzFKOp3fc,10421
|
134
134
|
airflow/providers/amazon/aws/operators/glacier.py,sha256=6TFC07B0EOmtRxLs7Bok4jwV84po2yVDa-DnlbnAOVg,3681
|
@@ -151,7 +151,7 @@ airflow/providers/amazon/aws/operators/sqs.py,sha256=o9rH2Pm5DNmccLh5I2wr96hZiux
|
|
151
151
|
airflow/providers/amazon/aws/operators/ssm.py,sha256=4WX38BTzM33iAow1MRnIxJb4MvLAfUYO-BMVf8kFtjU,5716
|
152
152
|
airflow/providers/amazon/aws/operators/step_function.py,sha256=isee1oy4X3YnpoJInPI6sYq67E5kIYj9zhhBpHwvDJI,9582
|
153
153
|
airflow/providers/amazon/aws/queues/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
154
|
-
airflow/providers/amazon/aws/queues/sqs.py,sha256
|
154
|
+
airflow/providers/amazon/aws/queues/sqs.py,sha256=-CMPgA4eGF5K_ajphUzHVVcU9e4hFUWE-8GBA1x9oug,2870
|
155
155
|
airflow/providers/amazon/aws/secrets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
156
156
|
airflow/providers/amazon/aws/secrets/secrets_manager.py,sha256=KHN4gwGvPg9fXbkR3sK158Ys8LsGCmKFxcjyGWAgrdY,13610
|
157
157
|
airflow/providers/amazon/aws/secrets/systems_manager.py,sha256=4o9x02hR3i9BIEHJsElLMxDFPhA7MTgt-t-3Fg3Aqtg,8496
|
@@ -174,7 +174,7 @@ airflow/providers/amazon/aws/sensors/glue_catalog_partition.py,sha256=YXvkQRnu5T
|
|
174
174
|
airflow/providers/amazon/aws/sensors/glue_crawler.py,sha256=ps-fG09QV_QNs8dxlmFKXNVKhvPIuvJoMJCeYi-6QBc,3375
|
175
175
|
airflow/providers/amazon/aws/sensors/kinesis_analytics.py,sha256=TDsQKi5nx10CgMoTSVbYRo4m-PiKFDhyhnO7dQZSnuI,9933
|
176
176
|
airflow/providers/amazon/aws/sensors/lambda_function.py,sha256=kO4UyrEMaMYSYbQyBN3F2eoysze_kIYCbMaF4tqiKo0,3287
|
177
|
-
airflow/providers/amazon/aws/sensors/mwaa.py,sha256=
|
177
|
+
airflow/providers/amazon/aws/sensors/mwaa.py,sha256=Fpj8vMQP3bKhl4Ozdp0SIhD0D8Mhrrz9jW9GnL2bOWQ,13977
|
178
178
|
airflow/providers/amazon/aws/sensors/opensearch_serverless.py,sha256=cSaZvCvAC7zhFqBYNympTiQHtgCZ7srC5-TrbS4l2GQ,5508
|
179
179
|
airflow/providers/amazon/aws/sensors/quicksight.py,sha256=lm1omzh01BKh0KHU3g2I1yH9LAXtddUDiuIS3uIeOrE,3575
|
180
180
|
airflow/providers/amazon/aws/sensors/rds.py,sha256=HWYQOQ7n9s48Ci2WxBOtrAp17aB-at5werAljq3NDYE,7420
|
@@ -191,10 +191,10 @@ airflow/providers/amazon/aws/transfers/base.py,sha256=lQHNWxIbMZmWmA2oGeQ2zoOyl_
|
|
191
191
|
airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py,sha256=VDcbCXMnlTWvcuN1FnxDE4_EIvVUTJZOkbn7Xy0ahXA,10171
|
192
192
|
airflow/providers/amazon/aws/transfers/exasol_to_s3.py,sha256=k86XX7pKCLCE07i1HZ_Rd7DnysMUBEFnP8MZw4_DDv8,4470
|
193
193
|
airflow/providers/amazon/aws/transfers/ftp_to_s3.py,sha256=nVhQCGNcAc35DlcLl99Qs1pU-DZiU8m2ZgKzFlrN4JI,6418
|
194
|
-
airflow/providers/amazon/aws/transfers/gcs_to_s3.py,sha256=
|
194
|
+
airflow/providers/amazon/aws/transfers/gcs_to_s3.py,sha256=TxXJQ5Ds02gyovB4mDgZFr99geayjD-ts9Zb4JZFffc,9829
|
195
195
|
airflow/providers/amazon/aws/transfers/glacier_to_gcs.py,sha256=Qt6KE1qJbFRQYrA6WQ3MVcfnuACq2XwTGl6PxMaaMEI,4762
|
196
196
|
airflow/providers/amazon/aws/transfers/google_api_to_s3.py,sha256=9scBbrYP2Rrkn2XyYD7uMjI8hKq9jz7hyxM7G7TAiIg,9251
|
197
|
-
airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py,sha256=
|
197
|
+
airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py,sha256=xVatW748OWbMYks9XicYJPIjc1wk-US8ZSfvOimK6ro,4610
|
198
198
|
airflow/providers/amazon/aws/transfers/http_to_s3.py,sha256=PTms3EA-7sZTRVvE2H_J2CnqLGQCavnzc783urzAgSE,7563
|
199
199
|
airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py,sha256=9CEwqrEOlStYXI_AQdmmsxL-gnUFxJ8IfA8mp0a1UX4,4588
|
200
200
|
airflow/providers/amazon/aws/transfers/local_to_s3.py,sha256=uECaDdn-FfS0R_Jz2zu6L6FOXc-VqQFM-Tqa5ygOhMU,4217
|
@@ -207,7 +207,7 @@ airflow/providers/amazon/aws/transfers/s3_to_sftp.py,sha256=sor0_phlRN-yltJiaz0l
|
|
207
207
|
airflow/providers/amazon/aws/transfers/s3_to_sql.py,sha256=RLuAwBHJOMcGaZcDgrivAhLRsOuZsjwTxJEOcLB_1MY,4971
|
208
208
|
airflow/providers/amazon/aws/transfers/salesforce_to_s3.py,sha256=noALwo6dpjEHF33ZDDZY0z47HK0Gsv-BU3Zr2NE3zRA,5738
|
209
209
|
airflow/providers/amazon/aws/transfers/sftp_to_s3.py,sha256=o5IDLFmeHzqBH6_Uh_fGTk9iymjQYsuGznnH-qZ1M-Y,4234
|
210
|
-
airflow/providers/amazon/aws/transfers/sql_to_s3.py,sha256
|
210
|
+
airflow/providers/amazon/aws/transfers/sql_to_s3.py,sha256=cqQd9qXLuMaSqJ2o6nck9jWMYOguTmr-cMh_JM17NI4,14164
|
211
211
|
airflow/providers/amazon/aws/triggers/README.md,sha256=ax2F0w2CuQSDN4ghJADozrrv5W4OeCDPA8Vzp00BXOU,10919
|
212
212
|
airflow/providers/amazon/aws/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
213
213
|
airflow/providers/amazon/aws/triggers/athena.py,sha256=62ty40zejcm5Y0d1rTQZuYzSjq3hUkmAs0d_zxM_Kjw,2596
|
@@ -239,7 +239,7 @@ airflow/providers/amazon/aws/triggers/ssm.py,sha256=47brvFXU1DajrccmzD5KuabG_ywo
|
|
239
239
|
airflow/providers/amazon/aws/triggers/step_function.py,sha256=M1HGdrnxL_T9KSCBNy2t531xMNJaFc-Y792T9cSmLGM,2685
|
240
240
|
airflow/providers/amazon/aws/utils/__init__.py,sha256=-Q5XK8ZV7EK6unj_4hlciqztACPuftMjNKMuBA21q84,3178
|
241
241
|
airflow/providers/amazon/aws/utils/connection_wrapper.py,sha256=KJsYG3qnESxxh2PFWvf83gHKzqEEAE9jBANTMoyRn3A,16435
|
242
|
-
airflow/providers/amazon/aws/utils/eks_get_token.py,sha256=
|
242
|
+
airflow/providers/amazon/aws/utils/eks_get_token.py,sha256=qVRDEZVrfFjvCgDyXxcGm64z-Ex8D3aCm7I5Yak3qyw,3624
|
243
243
|
airflow/providers/amazon/aws/utils/emailer.py,sha256=y-bzg1BZzOQ8J9-ed-74LY3VMv6LrLfBDtw5S4t3Tv4,1855
|
244
244
|
airflow/providers/amazon/aws/utils/identifiers.py,sha256=KqkEJ96mz4BYt0wuKX-_DaFk-8Lv9CuDVo-VrlAK29U,1944
|
245
245
|
airflow/providers/amazon/aws/utils/mixins.py,sha256=lY2b8n5K-jrJM_TMnDJRm3o6UAH8YRi1KzgCpo2_Ud0,5601
|
@@ -280,7 +280,7 @@ airflow/providers/amazon/aws/waiters/rds.json,sha256=HNmNQm5J-VaFHzjWb1pE5P7-Ix-
|
|
280
280
|
airflow/providers/amazon/aws/waiters/redshift.json,sha256=jOBotCgbkko1b_CHcGEbhhRvusgt0YSzVuFiZrqVP30,1742
|
281
281
|
airflow/providers/amazon/aws/waiters/sagemaker.json,sha256=JPHuQtUFZ1B7EMLfVmCRevNZ9jgpB71LM0dva8ZEO9A,5254
|
282
282
|
airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=GsOH-emGerKGBAUFmI5lpMfNGH4c0ol_PSiea25DCEY,1033
|
283
|
-
apache_airflow_providers_amazon-9.
|
284
|
-
apache_airflow_providers_amazon-9.
|
285
|
-
apache_airflow_providers_amazon-9.
|
286
|
-
apache_airflow_providers_amazon-9.
|
283
|
+
apache_airflow_providers_amazon-9.13.0rc1.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
|
284
|
+
apache_airflow_providers_amazon-9.13.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
285
|
+
apache_airflow_providers_amazon-9.13.0rc1.dist-info/METADATA,sha256=OVfHjHM7qIdl0h4bNCj2E5T1LLzbVuScP__BDRmr47g,10166
|
286
|
+
apache_airflow_providers_amazon-9.13.0rc1.dist-info/RECORD,,
|
File without changes
|