apache-airflow-providers-amazon 8.18.0rc1__py3-none-any.whl → 8.18.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py +2 -2
- airflow/providers/amazon/aws/auth_manager/cli/definition.py +14 -0
- airflow/providers/amazon/aws/auth_manager/cli/idc_commands.py +148 -0
- airflow/providers/amazon/aws/hooks/base_aws.py +2 -2
- airflow/providers/amazon/aws/hooks/emr.py +6 -0
- airflow/providers/amazon/aws/hooks/redshift_cluster.py +1 -1
- airflow/providers/amazon/aws/links/emr.py +122 -2
- airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +2 -2
- airflow/providers/amazon/aws/operators/athena.py +4 -1
- airflow/providers/amazon/aws/operators/batch.py +5 -6
- airflow/providers/amazon/aws/operators/ecs.py +6 -2
- airflow/providers/amazon/aws/operators/eks.py +23 -20
- airflow/providers/amazon/aws/operators/emr.py +192 -26
- airflow/providers/amazon/aws/operators/glue.py +5 -2
- airflow/providers/amazon/aws/operators/glue_crawler.py +5 -2
- airflow/providers/amazon/aws/operators/glue_databrew.py +5 -2
- airflow/providers/amazon/aws/operators/lambda_function.py +3 -0
- airflow/providers/amazon/aws/operators/rds.py +21 -12
- airflow/providers/amazon/aws/operators/redshift_cluster.py +12 -18
- airflow/providers/amazon/aws/operators/redshift_data.py +2 -4
- airflow/providers/amazon/aws/operators/sagemaker.py +24 -20
- airflow/providers/amazon/aws/operators/step_function.py +4 -1
- airflow/providers/amazon/aws/sensors/ec2.py +4 -2
- airflow/providers/amazon/aws/sensors/emr.py +13 -6
- airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +4 -1
- airflow/providers/amazon/aws/sensors/redshift_cluster.py +2 -4
- airflow/providers/amazon/aws/sensors/s3.py +3 -0
- airflow/providers/amazon/aws/sensors/sqs.py +4 -1
- airflow/providers/amazon/aws/utils/__init__.py +10 -0
- airflow/providers/amazon/aws/utils/task_log_fetcher.py +2 -2
- airflow/providers/amazon/get_provider_info.py +4 -0
- {apache_airflow_providers_amazon-8.18.0rc1.dist-info → apache_airflow_providers_amazon-8.18.0rc2.dist-info}/METADATA +2 -2
- {apache_airflow_providers_amazon-8.18.0rc1.dist-info → apache_airflow_providers_amazon-8.18.0rc2.dist-info}/RECORD +35 -34
- {apache_airflow_providers_amazon-8.18.0rc1.dist-info → apache_airflow_providers_amazon-8.18.0rc2.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_amazon-8.18.0rc1.dist-info → apache_airflow_providers_amazon-8.18.0rc2.dist-info}/entry_points.txt +0 -0
@@ -39,7 +39,7 @@ from airflow.providers.amazon.aws.triggers.sagemaker import (
|
|
39
39
|
SageMakerTrainingPrintLogTrigger,
|
40
40
|
SageMakerTrigger,
|
41
41
|
)
|
42
|
-
from airflow.providers.amazon.aws.utils import trim_none_values
|
42
|
+
from airflow.providers.amazon.aws.utils import trim_none_values, validate_execute_complete_event
|
43
43
|
from airflow.providers.amazon.aws.utils.sagemaker import ApprovalStatus
|
44
44
|
from airflow.providers.amazon.aws.utils.tags import format_tags
|
45
45
|
from airflow.utils.helpers import prune_dict
|
@@ -315,11 +315,13 @@ class SageMakerProcessingOperator(SageMakerBaseOperator):
|
|
315
315
|
self.serialized_job = serialize(self.hook.describe_processing_job(self.config["ProcessingJobName"]))
|
316
316
|
return {"Processing": self.serialized_job}
|
317
317
|
|
318
|
-
def execute_complete(self, context, event=None):
|
318
|
+
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> dict[str, dict]:
|
319
|
+
event = validate_execute_complete_event(event)
|
320
|
+
|
319
321
|
if event["status"] != "success":
|
320
322
|
raise AirflowException(f"Error while running job: {event}")
|
321
|
-
|
322
|
-
|
323
|
+
|
324
|
+
self.log.info(event["message"])
|
323
325
|
self.serialized_job = serialize(self.hook.describe_processing_job(self.config["ProcessingJobName"]))
|
324
326
|
self.log.info("%s completed successfully.", self.task_id)
|
325
327
|
return {"Processing": self.serialized_job}
|
@@ -566,7 +568,9 @@ class SageMakerEndpointOperator(SageMakerBaseOperator):
|
|
566
568
|
"Endpoint": serialize(self.hook.describe_endpoint(endpoint_info["EndpointName"])),
|
567
569
|
}
|
568
570
|
|
569
|
-
def execute_complete(self, context, event=None):
|
571
|
+
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> dict[str, dict]:
|
572
|
+
event = validate_execute_complete_event(event)
|
573
|
+
|
570
574
|
if event["status"] != "success":
|
571
575
|
raise AirflowException(f"Error while running job: {event}")
|
572
576
|
endpoint_info = self.config.get("Endpoint", self.config)
|
@@ -749,10 +753,7 @@ class SageMakerTransformOperator(SageMakerBaseOperator):
|
|
749
753
|
return self.serialize_result()
|
750
754
|
|
751
755
|
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> dict[str, dict]:
|
752
|
-
|
753
|
-
err_msg = "Trigger error: event is None"
|
754
|
-
self.log.error(err_msg)
|
755
|
-
raise AirflowException(err_msg)
|
756
|
+
event = validate_execute_complete_event(event)
|
756
757
|
|
757
758
|
self.log.info(event["message"])
|
758
759
|
return self.serialize_result()
|
@@ -924,7 +925,9 @@ class SageMakerTuningOperator(SageMakerBaseOperator):
|
|
924
925
|
|
925
926
|
return {"Tuning": serialize(description)}
|
926
927
|
|
927
|
-
def execute_complete(self, context, event=None):
|
928
|
+
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> dict[str, dict]:
|
929
|
+
event = validate_execute_complete_event(event)
|
930
|
+
|
928
931
|
if event["status"] != "success":
|
929
932
|
raise AirflowException(f"Error while running job: {event}")
|
930
933
|
return {
|
@@ -1154,10 +1157,7 @@ class SageMakerTrainingOperator(SageMakerBaseOperator):
|
|
1154
1157
|
return self.serialize_result()
|
1155
1158
|
|
1156
1159
|
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> dict[str, dict]:
|
1157
|
-
|
1158
|
-
err_msg = "Trigger error: event is None"
|
1159
|
-
self.log.error(err_msg)
|
1160
|
-
raise AirflowException(err_msg)
|
1160
|
+
event = validate_execute_complete_event(event)
|
1161
1161
|
|
1162
1162
|
if event["status"] != "success":
|
1163
1163
|
raise AirflowException(f"Error while running job: {event}")
|
@@ -1296,7 +1296,9 @@ class SageMakerStartPipelineOperator(SageMakerBaseOperator):
|
|
1296
1296
|
return arn
|
1297
1297
|
|
1298
1298
|
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str:
|
1299
|
-
|
1299
|
+
event = validate_execute_complete_event(event)
|
1300
|
+
|
1301
|
+
if event["status"] != "success":
|
1300
1302
|
raise AirflowException(f"Failure during pipeline execution: {event}")
|
1301
1303
|
return event["value"]
|
1302
1304
|
|
@@ -1389,12 +1391,14 @@ class SageMakerStopPipelineOperator(SageMakerBaseOperator):
|
|
1389
1391
|
return status
|
1390
1392
|
|
1391
1393
|
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str:
|
1392
|
-
|
1394
|
+
event = validate_execute_complete_event(event)
|
1395
|
+
|
1396
|
+
if event["status"] != "success":
|
1393
1397
|
raise AirflowException(f"Failure during pipeline execution: {event}")
|
1394
|
-
|
1395
|
-
|
1396
|
-
|
1397
|
-
|
1398
|
+
|
1399
|
+
# theoretically we should do a `describe` call to know this,
|
1400
|
+
# but if we reach this point, this is the only possible status
|
1401
|
+
return "Stopped"
|
1398
1402
|
|
1399
1403
|
|
1400
1404
|
class SageMakerRegisterModelVersionOperator(SageMakerBaseOperator):
|
@@ -29,6 +29,7 @@ from airflow.providers.amazon.aws.links.step_function import (
|
|
29
29
|
)
|
30
30
|
from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
|
31
31
|
from airflow.providers.amazon.aws.triggers.step_function import StepFunctionsExecutionCompleteTrigger
|
32
|
+
from airflow.providers.amazon.aws.utils import validate_execute_complete_event
|
32
33
|
from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
|
33
34
|
|
34
35
|
if TYPE_CHECKING:
|
@@ -129,7 +130,9 @@ class StepFunctionStartExecutionOperator(AwsBaseOperator[StepFunctionHook]):
|
|
129
130
|
return execution_arn
|
130
131
|
|
131
132
|
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
|
132
|
-
|
133
|
+
event = validate_execute_complete_event(event)
|
134
|
+
|
135
|
+
if event["status"] != "success":
|
133
136
|
raise AirflowException(f"Trigger error: event is {event}")
|
134
137
|
|
135
138
|
self.log.info("State Machine execution completed successfully")
|
@@ -24,6 +24,7 @@ from airflow.configuration import conf
|
|
24
24
|
from airflow.exceptions import AirflowException, AirflowSkipException
|
25
25
|
from airflow.providers.amazon.aws.hooks.ec2 import EC2Hook
|
26
26
|
from airflow.providers.amazon.aws.triggers.ec2 import EC2StateSensorTrigger
|
27
|
+
from airflow.providers.amazon.aws.utils import validate_execute_complete_event
|
27
28
|
from airflow.sensors.base import BaseSensorOperator
|
28
29
|
|
29
30
|
if TYPE_CHECKING:
|
@@ -92,11 +93,12 @@ class EC2InstanceStateSensor(BaseSensorOperator):
|
|
92
93
|
self.log.info("instance state: %s", instance_state)
|
93
94
|
return instance_state == self.target_state
|
94
95
|
|
95
|
-
def execute_complete(self, context, event=None):
|
96
|
+
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
|
97
|
+
event = validate_execute_complete_event(event)
|
98
|
+
|
96
99
|
if event["status"] != "success":
|
97
100
|
# TODO: remove this if check when min_airflow_version is set to higher than 2.7.1
|
98
101
|
message = f"Error: {event}"
|
99
102
|
if self.soft_fail:
|
100
103
|
raise AirflowSkipException(message)
|
101
104
|
raise AirflowException(message)
|
102
|
-
return
|
@@ -32,6 +32,7 @@ from airflow.providers.amazon.aws.triggers.emr import (
|
|
32
32
|
EmrStepSensorTrigger,
|
33
33
|
EmrTerminateJobFlowTrigger,
|
34
34
|
)
|
35
|
+
from airflow.providers.amazon.aws.utils import validate_execute_complete_event
|
35
36
|
from airflow.sensors.base import BaseSensorOperator
|
36
37
|
|
37
38
|
if TYPE_CHECKING:
|
@@ -335,15 +336,17 @@ class EmrContainerSensor(BaseSensorOperator):
|
|
335
336
|
method_name="execute_complete",
|
336
337
|
)
|
337
338
|
|
338
|
-
def execute_complete(self, context, event=None):
|
339
|
+
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
|
340
|
+
event = validate_execute_complete_event(event)
|
341
|
+
|
339
342
|
if event["status"] != "success":
|
340
343
|
# TODO: remove this if check when min_airflow_version is set to higher than 2.7.1
|
341
344
|
message = f"Error while running job: {event}"
|
342
345
|
if self.soft_fail:
|
343
346
|
raise AirflowSkipException(message)
|
344
347
|
raise AirflowException(message)
|
345
|
-
|
346
|
-
|
348
|
+
|
349
|
+
self.log.info("Job completed.")
|
347
350
|
|
348
351
|
|
349
352
|
class EmrNotebookExecutionSensor(EmrBaseSensor):
|
@@ -526,7 +529,9 @@ class EmrJobFlowSensor(EmrBaseSensor):
|
|
526
529
|
method_name="execute_complete",
|
527
530
|
)
|
528
531
|
|
529
|
-
def execute_complete(self, context: Context, event=None) -> None:
|
532
|
+
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
|
533
|
+
event = validate_execute_complete_event(event)
|
534
|
+
|
530
535
|
if event["status"] != "success":
|
531
536
|
# TODO: remove this if check when min_airflow_version is set to higher than 2.7.1
|
532
537
|
message = f"Error while running job: {event}"
|
@@ -657,7 +662,9 @@ class EmrStepSensor(EmrBaseSensor):
|
|
657
662
|
method_name="execute_complete",
|
658
663
|
)
|
659
664
|
|
660
|
-
def execute_complete(self, context, event=None):
|
665
|
+
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
|
666
|
+
event = validate_execute_complete_event(event)
|
667
|
+
|
661
668
|
if event["status"] != "success":
|
662
669
|
# TODO: remove this if check when min_airflow_version is set to higher than 2.7.1
|
663
670
|
message = f"Error while running job: {event}"
|
@@ -665,4 +672,4 @@ class EmrStepSensor(EmrBaseSensor):
|
|
665
672
|
raise AirflowSkipException(message)
|
666
673
|
raise AirflowException(message)
|
667
674
|
|
668
|
-
self.log.info("Job completed.")
|
675
|
+
self.log.info("Job %s completed.", self.job_flow_id)
|
@@ -27,6 +27,7 @@ from airflow.configuration import conf
|
|
27
27
|
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
|
28
28
|
from airflow.providers.amazon.aws.hooks.glue_catalog import GlueCatalogHook
|
29
29
|
from airflow.providers.amazon.aws.triggers.glue import GlueCatalogPartitionTrigger
|
30
|
+
from airflow.providers.amazon.aws.utils import validate_execute_complete_event
|
30
31
|
from airflow.sensors.base import BaseSensorOperator
|
31
32
|
|
32
33
|
if TYPE_CHECKING:
|
@@ -111,7 +112,9 @@ class GlueCatalogPartitionSensor(BaseSensorOperator):
|
|
111
112
|
return self.hook.check_for_partition(self.database_name, self.table_name, self.expression)
|
112
113
|
|
113
114
|
def execute_complete(self, context: Context, event: dict | None = None) -> None:
|
114
|
-
|
115
|
+
event = validate_execute_complete_event(event)
|
116
|
+
|
117
|
+
if event["status"] != "success":
|
115
118
|
# TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
|
116
119
|
message = f"Trigger error: event is {event}"
|
117
120
|
if self.soft_fail:
|
@@ -26,6 +26,7 @@ from airflow.configuration import conf
|
|
26
26
|
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
|
27
27
|
from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftHook
|
28
28
|
from airflow.providers.amazon.aws.triggers.redshift_cluster import RedshiftClusterTrigger
|
29
|
+
from airflow.providers.amazon.aws.utils import validate_execute_complete_event
|
29
30
|
from airflow.sensors.base import BaseSensorOperator
|
30
31
|
|
31
32
|
if TYPE_CHECKING:
|
@@ -88,10 +89,7 @@ class RedshiftClusterSensor(BaseSensorOperator):
|
|
88
89
|
)
|
89
90
|
|
90
91
|
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
|
91
|
-
|
92
|
-
err_msg = "Trigger error: event is None"
|
93
|
-
self.log.error(err_msg)
|
94
|
-
raise AirflowException(err_msg)
|
92
|
+
event = validate_execute_complete_event(event)
|
95
93
|
|
96
94
|
status = event["status"]
|
97
95
|
if status == "error":
|
@@ -27,6 +27,7 @@ from typing import TYPE_CHECKING, Any, Callable, Sequence, cast
|
|
27
27
|
from deprecated import deprecated
|
28
28
|
|
29
29
|
from airflow.configuration import conf
|
30
|
+
from airflow.providers.amazon.aws.utils import validate_execute_complete_event
|
30
31
|
|
31
32
|
if TYPE_CHECKING:
|
32
33
|
from airflow.utils.context import Context
|
@@ -371,6 +372,8 @@ class S3KeysUnchangedSensor(BaseSensorOperator):
|
|
371
372
|
|
372
373
|
Relies on trigger to throw an exception, otherwise it assumes execution was successful.
|
373
374
|
"""
|
375
|
+
event = validate_execute_complete_event(event)
|
376
|
+
|
374
377
|
if event and event["status"] == "error":
|
375
378
|
# TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
|
376
379
|
if self.soft_fail:
|
@@ -28,6 +28,7 @@ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarni
|
|
28
28
|
from airflow.providers.amazon.aws.hooks.sqs import SqsHook
|
29
29
|
from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
|
30
30
|
from airflow.providers.amazon.aws.triggers.sqs import SqsSensorTrigger
|
31
|
+
from airflow.providers.amazon.aws.utils import validate_execute_complete_event
|
31
32
|
from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
|
32
33
|
from airflow.providers.amazon.aws.utils.sqs import MessageFilteringType, process_response
|
33
34
|
|
@@ -155,7 +156,9 @@ class SqsSensor(AwsBaseSensor[SqsHook]):
|
|
155
156
|
super().execute(context=context)
|
156
157
|
|
157
158
|
def execute_complete(self, context: Context, event: dict | None = None) -> None:
|
158
|
-
|
159
|
+
event = validate_execute_complete_event(event)
|
160
|
+
|
161
|
+
if event["status"] != "success":
|
159
162
|
# TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
|
160
163
|
message = f"Trigger error: event is {event}"
|
161
164
|
if self.soft_fail:
|
@@ -20,7 +20,9 @@ import logging
|
|
20
20
|
import re
|
21
21
|
from datetime import datetime, timezone
|
22
22
|
from enum import Enum
|
23
|
+
from typing import Any
|
23
24
|
|
25
|
+
from airflow.exceptions import AirflowException
|
24
26
|
from airflow.utils.helpers import prune_dict
|
25
27
|
from airflow.version import version
|
26
28
|
|
@@ -72,6 +74,14 @@ def get_airflow_version() -> tuple[int, ...]:
|
|
72
74
|
return tuple(int(x) for x in match.groups())
|
73
75
|
|
74
76
|
|
77
|
+
def validate_execute_complete_event(event: dict[str, Any] | None = None) -> dict[str, Any]:
|
78
|
+
if event is None:
|
79
|
+
err_msg = "Trigger error: event is None"
|
80
|
+
log.error(err_msg)
|
81
|
+
raise AirflowException(err_msg)
|
82
|
+
return event
|
83
|
+
|
84
|
+
|
75
85
|
class _StringCompareEnum(Enum):
|
76
86
|
"""
|
77
87
|
An Enum class which can be compared with regular `str` and subclasses.
|
@@ -18,7 +18,7 @@
|
|
18
18
|
from __future__ import annotations
|
19
19
|
|
20
20
|
import time
|
21
|
-
from datetime import datetime, timedelta
|
21
|
+
from datetime import datetime, timedelta, timezone
|
22
22
|
from threading import Event, Thread
|
23
23
|
from typing import TYPE_CHECKING, Generator
|
24
24
|
|
@@ -87,7 +87,7 @@ class AwsTaskLogFetcher(Thread):
|
|
87
87
|
|
88
88
|
@staticmethod
|
89
89
|
def event_to_str(event: dict) -> str:
|
90
|
-
event_dt = datetime.
|
90
|
+
event_dt = datetime.fromtimestamp(event["timestamp"] / 1000.0, tz=timezone.utc)
|
91
91
|
formatted_event_dt = event_dt.strftime("%Y-%m-%d %H:%M:%S,%f")[:-3]
|
92
92
|
message = event["message"]
|
93
93
|
return f"[{formatted_event_dt}] {message}"
|
@@ -956,6 +956,10 @@ def get_provider_info():
|
|
956
956
|
"airflow.providers.amazon.aws.links.batch.BatchJobQueueLink",
|
957
957
|
"airflow.providers.amazon.aws.links.emr.EmrClusterLink",
|
958
958
|
"airflow.providers.amazon.aws.links.emr.EmrLogsLink",
|
959
|
+
"airflow.providers.amazon.aws.links.emr.EmrServerlessCloudWatchLogsLink",
|
960
|
+
"airflow.providers.amazon.aws.links.emr.EmrServerlessDashboardLink",
|
961
|
+
"airflow.providers.amazon.aws.links.emr.EmrServerlessLogsLink",
|
962
|
+
"airflow.providers.amazon.aws.links.emr.EmrServerlessS3LogsLink",
|
959
963
|
"airflow.providers.amazon.aws.links.glue.GlueJobRunDetailsLink",
|
960
964
|
"airflow.providers.amazon.aws.links.logs.CloudWatchEventsLink",
|
961
965
|
"airflow.providers.amazon.aws.links.step_function.StateMachineDetailsLink",
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: apache-airflow-providers-amazon
|
3
|
-
Version: 8.18.
|
3
|
+
Version: 8.18.0rc2
|
4
4
|
Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
|
5
5
|
Keywords: airflow-provider,amazon,airflow,integration
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
@@ -118,7 +118,7 @@ Provides-Extra: ssh
|
|
118
118
|
|
119
119
|
Package ``apache-airflow-providers-amazon``
|
120
120
|
|
121
|
-
Release: ``8.18.0.
|
121
|
+
Release: ``8.18.0.rc2``
|
122
122
|
|
123
123
|
|
124
124
|
Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
|
@@ -1,6 +1,6 @@
|
|
1
1
|
airflow/providers/amazon/LICENSE,sha256=ywUBpKZc7Jb96rVt5I3IDbg7dIJAbUSHkuoDcF3jbH4,13569
|
2
2
|
airflow/providers/amazon/__init__.py,sha256=cYFbjuffLPNrzfJibpVuDHG0IKPXJrKvzhix1PndlSg,1582
|
3
|
-
airflow/providers/amazon/get_provider_info.py,sha256=
|
3
|
+
airflow/providers/amazon/get_provider_info.py,sha256=ErLNC_IT5zjMe_OOIrJOPZzCVLjfvmn9GSR4svcwmOE,58934
|
4
4
|
airflow/providers/amazon/aws/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
5
5
|
airflow/providers/amazon/aws/exceptions.py,sha256=UVoxpfQEdWI1319h0U78Z_r5wRFQL6DN14hJw3G1Rgo,1731
|
6
6
|
airflow/providers/amazon/aws/auth_manager/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
@@ -11,8 +11,9 @@ airflow/providers/amazon/aws/auth_manager/avp/__init__.py,sha256=9hdXHABrVpkbpjZ
|
|
11
11
|
airflow/providers/amazon/aws/auth_manager/avp/entities.py,sha256=Jpk_jKHTveBVn8etfHl5jr9Q1MLvrLzlEGbYE_h4NVM,1925
|
12
12
|
airflow/providers/amazon/aws/auth_manager/avp/facade.py,sha256=4ATi_el62uvJKbaaBurpo-prz7ifz9uMVx_8x_LKgII,5465
|
13
13
|
airflow/providers/amazon/aws/auth_manager/cli/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
14
|
-
airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py,sha256=
|
15
|
-
airflow/providers/amazon/aws/auth_manager/cli/definition.py,sha256=
|
14
|
+
airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py,sha256=GUM3kywjL6BtQZrnr7Axoph3E9ghnmD1ANsWenq6hUk,6354
|
15
|
+
airflow/providers/amazon/aws/auth_manager/cli/definition.py,sha256=tw3Y6i4WejE4bUlTqSv_EWYj6WIzYqktxRQFnApwqG0,2749
|
16
|
+
airflow/providers/amazon/aws/auth_manager/cli/idc_commands.py,sha256=NV_95_vQxoZYRAM3PSb3V5QITw6XbK9a13f3opExljE,5836
|
16
17
|
airflow/providers/amazon/aws/auth_manager/cli/schema.json,sha256=j3_Krq0M1ClYMh_gHuJEQJ3nXMDjEbzI-SNl_U_KFIc,5401
|
17
18
|
airflow/providers/amazon/aws/auth_manager/security_manager/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
18
19
|
airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py,sha256=5XLvMpdpNpTYxntN2xL1-XfuxevQzz5EYHJb4TBOLFI,1566
|
@@ -33,7 +34,7 @@ airflow/providers/amazon/aws/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2
|
|
33
34
|
airflow/providers/amazon/aws/hooks/appflow.py,sha256=U3kw4TUCopti1bAaTuArHnkSJlPfksN_c6mGdV8e64w,5257
|
34
35
|
airflow/providers/amazon/aws/hooks/athena.py,sha256=ylbpvq8boc3-EVo9ZW5E164rr2NLPpOMEIhKQo_x0SM,13566
|
35
36
|
airflow/providers/amazon/aws/hooks/athena_sql.py,sha256=GDQ2VM0ECs3wRBPYPLB3yJ4EfKecyJCUfc6X3cnScR0,6839
|
36
|
-
airflow/providers/amazon/aws/hooks/base_aws.py,sha256=
|
37
|
+
airflow/providers/amazon/aws/hooks/base_aws.py,sha256=inQw6cnuHd8BVXTzeF_yXYVjkBG9Wx4-1HDqhcRJnfk,49223
|
37
38
|
airflow/providers/amazon/aws/hooks/batch_client.py,sha256=Mc_SsRw3lylbCW1nPyO_N0-9W8Ar3YsMypkcl1uvmW4,21345
|
38
39
|
airflow/providers/amazon/aws/hooks/batch_waiters.json,sha256=eoN5YDgeTNZ2Xz17TrbKBPhd7z9-6KD3RhaDKXXOvqU,2511
|
39
40
|
airflow/providers/amazon/aws/hooks/batch_waiters.py,sha256=ikOprhU23yvUTQv03zXspZtQt-D--g0VMjvZaetjdP0,10578
|
@@ -47,7 +48,7 @@ airflow/providers/amazon/aws/hooks/ecr.py,sha256=AA70VFsoixztaqCpL_kiJEf-lXC6BWg
|
|
47
48
|
airflow/providers/amazon/aws/hooks/ecs.py,sha256=p_7qHGZbZWY-WHJEJD186o6mP5HBXmUfCJZ7usdydO0,6666
|
48
49
|
airflow/providers/amazon/aws/hooks/eks.py,sha256=0nVjYryhhFrUe7cQ6ALOKn3V68J4qHtx97nm99G9JRE,24439
|
49
50
|
airflow/providers/amazon/aws/hooks/elasticache_replication_group.py,sha256=x6kkaR2nzDF8w1kqolbaS3-XCbHl5qlJMcpGYmlsxuU,12089
|
50
|
-
airflow/providers/amazon/aws/hooks/emr.py,sha256
|
51
|
+
airflow/providers/amazon/aws/hooks/emr.py,sha256=EK99rlcx_qKjC20rmyTVxOAyE7mXkCQDX6a73ZPJMHQ,21380
|
51
52
|
airflow/providers/amazon/aws/hooks/eventbridge.py,sha256=dSaKbFB8ueOUJGl6YLIz70zXy0Xzr3yMflKS2wGFDSM,3364
|
52
53
|
airflow/providers/amazon/aws/hooks/glacier.py,sha256=BTDavN3NUwWe1hBCLWYWhmpliuwybEByMGtWnYPRL5Q,3463
|
53
54
|
airflow/providers/amazon/aws/hooks/glue.py,sha256=D7XR2EEUfWob5ebg5oldp8Vv-uYt5nEPCAj6W-KaGvo,16819
|
@@ -60,7 +61,7 @@ airflow/providers/amazon/aws/hooks/logs.py,sha256=pS987tewWr8HY7KTzIhImoYpdaconH
|
|
60
61
|
airflow/providers/amazon/aws/hooks/neptune.py,sha256=IvQjAdtdDKWwHmfs-t7YG516p6Y3xHe453WgU0C47j0,3264
|
61
62
|
airflow/providers/amazon/aws/hooks/quicksight.py,sha256=MFTlrWV88wLky2swo-b5fFQDLbMQCw6w6lcgAvJqveU,7957
|
62
63
|
airflow/providers/amazon/aws/hooks/rds.py,sha256=FxJYUL7C_5gnKc7xUr-JTRLKBbb0osYx1ttyVjWCFrw,15141
|
63
|
-
airflow/providers/amazon/aws/hooks/redshift_cluster.py,sha256=
|
64
|
+
airflow/providers/amazon/aws/hooks/redshift_cluster.py,sha256=wpgoCZ5bENxK-gvcejA1EbcGz20EEXbl7QDUMJ5buWc,13015
|
64
65
|
airflow/providers/amazon/aws/hooks/redshift_data.py,sha256=I6XEwAzHmBYx_JS0r2zPqJ8cqffboqO3IcHgE1NugQ0,10229
|
65
66
|
airflow/providers/amazon/aws/hooks/redshift_sql.py,sha256=17W1q0qGJDmVpxJbin67pnhV6bpz-W7J1KXlehv67wQ,11001
|
66
67
|
airflow/providers/amazon/aws/hooks/s3.py,sha256=L21XRnPv4tCbM2V09vXL45mOaZCjc-mLJyDGVm1dY4E,59404
|
@@ -77,12 +78,12 @@ airflow/providers/amazon/aws/links/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2
|
|
77
78
|
airflow/providers/amazon/aws/links/athena.py,sha256=bf__mn0W1H0x1EVJBFevf-ap1GDpuiVDuhw4mnEkr9k,1235
|
78
79
|
airflow/providers/amazon/aws/links/base_aws.py,sha256=OATai8djxCbxb1VK1KHku4UJmduZFriuC1Az2K1gLKw,3070
|
79
80
|
airflow/providers/amazon/aws/links/batch.py,sha256=-bnVCTEjgXrLOxvdz2mwmG0wIVAIzlaqvKMq0dJyxqM,1770
|
80
|
-
airflow/providers/amazon/aws/links/emr.py,sha256=
|
81
|
+
airflow/providers/amazon/aws/links/emr.py,sha256=mre28eNDW7uo_4xaLPcNHikjvBRN4zbJjxgwGLSfr7c,7055
|
81
82
|
airflow/providers/amazon/aws/links/glue.py,sha256=NrXZ-jKmX3jNxGngBruudRcm7vgsxZt8CApAyC2ulKI,1229
|
82
83
|
airflow/providers/amazon/aws/links/logs.py,sha256=BgRd61V_IvZpnZLShRN6zDqeoxjXC4M-6sfSgL0TGpM,1608
|
83
84
|
airflow/providers/amazon/aws/links/step_function.py,sha256=xSL4vfKLnCn-QboRtruajpH5elRrNfw0XkY7eSfPpE4,2099
|
84
85
|
airflow/providers/amazon/aws/log/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
85
|
-
airflow/providers/amazon/aws/log/cloudwatch_task_handler.py,sha256=
|
86
|
+
airflow/providers/amazon/aws/log/cloudwatch_task_handler.py,sha256=CMjD5z_HPCs_wkZ_dSB1JtnLb30s6OdTjrF4amguqqw,6950
|
86
87
|
airflow/providers/amazon/aws/log/s3_task_handler.py,sha256=n361LFeRY_2-OVZiwjvvCbt03cmO-Ts7cPeoiN6kiXE,8467
|
87
88
|
airflow/providers/amazon/aws/notifications/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
88
89
|
airflow/providers/amazon/aws/notifications/chime.py,sha256=QCEdvVO7oCIbf_rGHtQZeitAUKARgsgtKjokxYb_kB0,2122
|
@@ -90,32 +91,32 @@ airflow/providers/amazon/aws/notifications/sns.py,sha256=VCh3MpKl86RBBmI2zw0eyHG
|
|
90
91
|
airflow/providers/amazon/aws/notifications/sqs.py,sha256=ixxkxonDvSk_x3BRAlFAQe771tGlwFli2JzVx-2f8gg,3591
|
91
92
|
airflow/providers/amazon/aws/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
92
93
|
airflow/providers/amazon/aws/operators/appflow.py,sha256=bE7vOIDRNoPrIRwIdZsgZpqyLFr98goulRKA9_KsIJc,20634
|
93
|
-
airflow/providers/amazon/aws/operators/athena.py,sha256=
|
94
|
+
airflow/providers/amazon/aws/operators/athena.py,sha256=qR9qmGlGG34qvEawm1EgKboR3WowMLd1eYknVUbbi6g,14158
|
94
95
|
airflow/providers/amazon/aws/operators/base_aws.py,sha256=yWKIeV5I0asE3GVBFFM-mVxEhi2u4HpDtlWzaZ8eIvk,3636
|
95
|
-
airflow/providers/amazon/aws/operators/batch.py,sha256=
|
96
|
+
airflow/providers/amazon/aws/operators/batch.py,sha256=naqUGgWV9YkhoNALf5cSI-cHE0oiYzgTMOBUJDwekrM,22374
|
96
97
|
airflow/providers/amazon/aws/operators/cloud_formation.py,sha256=_pBd-p-fvB9_Suge4Z88mFhsXHSSa-gHneEi4PjWBiU,5058
|
97
98
|
airflow/providers/amazon/aws/operators/datasync.py,sha256=SVmBADZF13VhAx0SX5ajVwdFDTIpMDsODaVDZ586Ago,18919
|
98
99
|
airflow/providers/amazon/aws/operators/dms.py,sha256=6nd7Is7FMdq-xv7EeqZznNm8xdmo4eVVcw_031EPqKU,12299
|
99
100
|
airflow/providers/amazon/aws/operators/ec2.py,sha256=7gGORwYTgQ4WA83Ty_TrYSmXpp_-vVXW5tDwddzCgSA,15233
|
100
|
-
airflow/providers/amazon/aws/operators/ecs.py,sha256=
|
101
|
-
airflow/providers/amazon/aws/operators/eks.py,sha256=
|
102
|
-
airflow/providers/amazon/aws/operators/emr.py,sha256=
|
101
|
+
airflow/providers/amazon/aws/operators/ecs.py,sha256=b7yi4cHnMtjxd3BGxAEd1hCQZkSKsdhk3T1kwhxKw-w,32739
|
102
|
+
airflow/providers/amazon/aws/operators/eks.py,sha256=x7PAxd0Ua3avKQf1SPKIeP7RboPcnHDSn94eTcvnIvI,50552
|
103
|
+
airflow/providers/amazon/aws/operators/emr.py,sha256=SrfypUWxD3Y0qr7SdhDuKZzTBh4GBYfczwQG14O4iMw,83592
|
103
104
|
airflow/providers/amazon/aws/operators/eventbridge.py,sha256=e686XFhVi54DbaCk7oVc0fhvH6GIPU3p8jgyCie1yBU,10394
|
104
105
|
airflow/providers/amazon/aws/operators/glacier.py,sha256=zxwC6lLk6sWerjlogXq6HgNOJx4h0hkqpGpqn23hJWk,3654
|
105
|
-
airflow/providers/amazon/aws/operators/glue.py,sha256=
|
106
|
-
airflow/providers/amazon/aws/operators/glue_crawler.py,sha256=
|
107
|
-
airflow/providers/amazon/aws/operators/glue_databrew.py,sha256=
|
108
|
-
airflow/providers/amazon/aws/operators/lambda_function.py,sha256=
|
106
|
+
airflow/providers/amazon/aws/operators/glue.py,sha256=m3UH__7jzq3zE4pAFQ4kPZdqz3irn6XlmknFldMTOfo,10141
|
107
|
+
airflow/providers/amazon/aws/operators/glue_crawler.py,sha256=GGjreSnpWPP-CRvL5d1LNuQAV6PwwXhKY2hhfwnHm-A,4539
|
108
|
+
airflow/providers/amazon/aws/operators/glue_databrew.py,sha256=Ot7qdXIhJ4AYBPeYkvJLjyOrrNVj_anFXcuJHKelh9A,4468
|
109
|
+
airflow/providers/amazon/aws/operators/lambda_function.py,sha256=96KtK5KUpMPW2i8Xay1UdKPMX211hS6FqweFnRNuTFQ,10619
|
109
110
|
airflow/providers/amazon/aws/operators/neptune.py,sha256=aBfNhB_tnYwyfeqz7AR3c_H2VETMn7jQL_g-rCTczOU,9644
|
110
111
|
airflow/providers/amazon/aws/operators/quicksight.py,sha256=jc3Eof19UfLt5IqbQswRzaHaK8h0ACLY99i_1Prtq10,4089
|
111
|
-
airflow/providers/amazon/aws/operators/rds.py,sha256=
|
112
|
-
airflow/providers/amazon/aws/operators/redshift_cluster.py,sha256=
|
113
|
-
airflow/providers/amazon/aws/operators/redshift_data.py,sha256=
|
112
|
+
airflow/providers/amazon/aws/operators/rds.py,sha256=oLhzlf7LIaudu2B41WsK9Svh4EsFayBeRKsmsUETdFE,39441
|
113
|
+
airflow/providers/amazon/aws/operators/redshift_cluster.py,sha256=IuDijhu5JQWEphWh7IK2f03YnAI8da2GoDq_rdp5sFs,35018
|
114
|
+
airflow/providers/amazon/aws/operators/redshift_data.py,sha256=wK-vTDcn0MqOuF9e-71JYIEkLKihah6oGU-p_8VT2HI,8612
|
114
115
|
airflow/providers/amazon/aws/operators/s3.py,sha256=RRZs_qZEeedZWGppzFoqa5Yt_T6bQU-JXKXbzf8cfBE,34074
|
115
|
-
airflow/providers/amazon/aws/operators/sagemaker.py,sha256=
|
116
|
+
airflow/providers/amazon/aws/operators/sagemaker.py,sha256=l8ZoUmnW2T4WSaJEaL8Md3MAY4I18QvIuM1cFtwyH2Y,80807
|
116
117
|
airflow/providers/amazon/aws/operators/sns.py,sha256=Mz-3jDOd6GfvA-1kJ0ZSxChbyYa-fGTXNhbkimye0lw,3729
|
117
118
|
airflow/providers/amazon/aws/operators/sqs.py,sha256=UlURQfXaueF4jF_eKmonrBOH8Op-PMzvxLSrrAizR-o,4326
|
118
|
-
airflow/providers/amazon/aws/operators/step_function.py,sha256=
|
119
|
+
airflow/providers/amazon/aws/operators/step_function.py,sha256=vybKlc7N9udtw9R7a8n1lU1pWF7lai9zn8Csk7k3BBQ,8846
|
119
120
|
airflow/providers/amazon/aws/secrets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
120
121
|
airflow/providers/amazon/aws/secrets/secrets_manager.py,sha256=Hf00-vqAH57wGD82X8W9SWFbu1rcx8mwV7KnrH80ZeQ,15540
|
121
122
|
airflow/providers/amazon/aws/secrets/systems_manager.py,sha256=LZGygsO9grto9KggfuHXDUjLEPQMG6ZD_dEHeDvEJpk,8495
|
@@ -126,21 +127,21 @@ airflow/providers/amazon/aws/sensors/batch.py,sha256=rSnVh_Yo_y3um2sUDnip_Ftmwg1
|
|
126
127
|
airflow/providers/amazon/aws/sensors/cloud_formation.py,sha256=AUR2wTs4_2ejrJkbnK2lv7KIHlFRPFmGg98ldnps7pg,5418
|
127
128
|
airflow/providers/amazon/aws/sensors/dms.py,sha256=SzKUgEtL0agejWKWJvjqmrJdtwbd2vJriHcCsYV0j8s,5744
|
128
129
|
airflow/providers/amazon/aws/sensors/dynamodb.py,sha256=P43g73ACBoDo0Lrxzm9wOoEepbRVAOjqtwWYvIq1Tls,5008
|
129
|
-
airflow/providers/amazon/aws/sensors/ec2.py,sha256=
|
130
|
+
airflow/providers/amazon/aws/sensors/ec2.py,sha256=jAiZXontV61z7fO5OBgH67Cg1nqQxrzdPpGqvzRxqVs,4112
|
130
131
|
airflow/providers/amazon/aws/sensors/ecs.py,sha256=Bju2xJHNI8SdddD1muDcqtihL__EAHQwa-RtYxPjfoI,7087
|
131
132
|
airflow/providers/amazon/aws/sensors/eks.py,sha256=eDRxJ73fvb7UzzahwwldogMjkjkB5s94DQJ3yJxrPy8,9839
|
132
|
-
airflow/providers/amazon/aws/sensors/emr.py,sha256=
|
133
|
+
airflow/providers/amazon/aws/sensors/emr.py,sha256=oY21IzmWvxQRVkzrF6dXlgP0dsdIfjpUTOXQjfV3iMs,24792
|
133
134
|
airflow/providers/amazon/aws/sensors/glacier.py,sha256=2UUI-y-x07DH8I5OikA_d5_FHCMQjBpxMKxRvlZSlS4,4282
|
134
135
|
airflow/providers/amazon/aws/sensors/glue.py,sha256=AjMUWDQftq1samkXmvZx42Htc3aD-K6U9D1OwUNqIoE,3540
|
135
|
-
airflow/providers/amazon/aws/sensors/glue_catalog_partition.py,sha256=
|
136
|
+
airflow/providers/amazon/aws/sensors/glue_catalog_partition.py,sha256=iKz0VNcSqef_4WQn6HLTLbV4qdrnQ7Pv4G7iS3hFyE4,5603
|
136
137
|
airflow/providers/amazon/aws/sensors/glue_crawler.py,sha256=ERdrXtQvN2KgkaBA4hixXDsljtXN88QIPHXOcVn_Cb4,3274
|
137
138
|
airflow/providers/amazon/aws/sensors/lambda_function.py,sha256=6eP4OzU_YcoAmNYTpCkHogJiOps7lraMoeYibeRls5E,3190
|
138
139
|
airflow/providers/amazon/aws/sensors/quicksight.py,sha256=_jw5455fWYAttuLl63uDmzt9EYU1FjaRvXtG_S_1CUE,4625
|
139
140
|
airflow/providers/amazon/aws/sensors/rds.py,sha256=4mLAFLjY8yI3bExVdGATXIIiiuwZyQUtGM9qKwzjuew,6434
|
140
|
-
airflow/providers/amazon/aws/sensors/redshift_cluster.py,sha256=
|
141
|
-
airflow/providers/amazon/aws/sensors/s3.py,sha256=
|
141
|
+
airflow/providers/amazon/aws/sensors/redshift_cluster.py,sha256=qoI7cWtCB1Haza4wr7OAlxGISLzEO-pClKoYz9320C0,4526
|
142
|
+
airflow/providers/amazon/aws/sensors/s3.py,sha256=GefLX6l7K5Qw3O4njpXlWz2PhIU6ACelDamaH6ZY4xE,16182
|
142
143
|
airflow/providers/amazon/aws/sensors/sagemaker.py,sha256=YY5uFcvia_JpjG-Ic-0nfwTsTkYC43XkGT-DrL7cHPs,12977
|
143
|
-
airflow/providers/amazon/aws/sensors/sqs.py,sha256=
|
144
|
+
airflow/providers/amazon/aws/sensors/sqs.py,sha256=GVaakDD7nCcDQfd5WIxdlewNWH5PAlt4N9phEs8Eowg,11328
|
144
145
|
airflow/providers/amazon/aws/sensors/step_function.py,sha256=pqAtBJd3m003qvaJwr4BrKBHhYWGrJ67yaqczjcE1_w,4089
|
145
146
|
airflow/providers/amazon/aws/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
146
147
|
airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py,sha256=j7PCHYRIQhHLHF2yQOYt2keGMp84VbLp9QV7blf6IQs,7016
|
@@ -185,7 +186,7 @@ airflow/providers/amazon/aws/triggers/s3.py,sha256=3UJmYIHujd4YYnBh7fj91uqWDsUGc
|
|
185
186
|
airflow/providers/amazon/aws/triggers/sagemaker.py,sha256=D4_8pJNPf4n7zOff4jHgjhvblFYFQEA-t5a5_Y79X7M,11448
|
186
187
|
airflow/providers/amazon/aws/triggers/sqs.py,sha256=tVA1i8XzV5AqbVQAdWrgrLKoZF8ewqgEwV7ggk1hrQM,8257
|
187
188
|
airflow/providers/amazon/aws/triggers/step_function.py,sha256=M1HGdrnxL_T9KSCBNy2t531xMNJaFc-Y792T9cSmLGM,2685
|
188
|
-
airflow/providers/amazon/aws/utils/__init__.py,sha256=
|
189
|
+
airflow/providers/amazon/aws/utils/__init__.py,sha256=J9uY4Ywc_4psxr1e69RZJDvzRPRSxYkkjkbtc_DQJnU,3623
|
189
190
|
airflow/providers/amazon/aws/utils/connection_wrapper.py,sha256=OzKUJjBlZQaGWN4E2Z58e0GUkI6ICg071zxiMdIzpNY,22799
|
190
191
|
airflow/providers/amazon/aws/utils/eks_get_token.py,sha256=q4utFF2c02T2Lm6KIZLABOiXJeglVZKCOxq6gn14dsk,2342
|
191
192
|
airflow/providers/amazon/aws/utils/emailer.py,sha256=fHzzErXFs74KP6VXJrCc-1T0K1srXf0vcPRqyf5i6Y8,1854
|
@@ -197,7 +198,7 @@ airflow/providers/amazon/aws/utils/sagemaker.py,sha256=893W8DBPhsyPINbFph9MKKP4O
|
|
197
198
|
airflow/providers/amazon/aws/utils/sqs.py,sha256=s97MhAX-6pWdxkrpFfknaIDvL2QzYr411J9l4pL_no8,3493
|
198
199
|
airflow/providers/amazon/aws/utils/suppress.py,sha256=5jFviuoFOJ0L3vBKI0qoCSgpVxMxUMgAeXPsQ1Iyq80,2360
|
199
200
|
airflow/providers/amazon/aws/utils/tags.py,sha256=-WPb4MpzZxV4MHS6OD09EronbR_jlfuVQeEqu4cVnj0,1762
|
200
|
-
airflow/providers/amazon/aws/utils/task_log_fetcher.py,sha256=
|
201
|
+
airflow/providers/amazon/aws/utils/task_log_fetcher.py,sha256=bOF0WriEfnCUu0jpeZXfSPW_couD3aefiOTTJF5apsQ,4552
|
201
202
|
airflow/providers/amazon/aws/utils/waiter.py,sha256=FO1WupdK7Z9AonrC8w_XcRpQE7A-o4VlgaqQxV65dbk,3509
|
202
203
|
airflow/providers/amazon/aws/utils/waiter_with_logging.py,sha256=Y2yKAy6v64kj4miDvC5bcK0jP8GDyWDzy-jUaI9ONMM,5892
|
203
204
|
airflow/providers/amazon/aws/waiters/README.md,sha256=ftfKyOH1Rqxa77DyLHkqRF1IltQty3uczLXWX7ekE0A,4535
|
@@ -218,7 +219,7 @@ airflow/providers/amazon/aws/waiters/neptune.json,sha256=4IP0FPqdItVmyP_au9hxpMT
|
|
218
219
|
airflow/providers/amazon/aws/waiters/redshift.json,sha256=jOBotCgbkko1b_CHcGEbhhRvusgt0YSzVuFiZrqVP30,1742
|
219
220
|
airflow/providers/amazon/aws/waiters/sagemaker.json,sha256=JPHuQtUFZ1B7EMLfVmCRevNZ9jgpB71LM0dva8ZEO9A,5254
|
220
221
|
airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=aBaAZaGv8ZZGdN-2gvYEbq3fL_WHI_7s6SSDL-nWS1A,1034
|
221
|
-
apache_airflow_providers_amazon-8.18.
|
222
|
-
apache_airflow_providers_amazon-8.18.
|
223
|
-
apache_airflow_providers_amazon-8.18.
|
224
|
-
apache_airflow_providers_amazon-8.18.
|
222
|
+
apache_airflow_providers_amazon-8.18.0rc2.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
|
223
|
+
apache_airflow_providers_amazon-8.18.0rc2.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
224
|
+
apache_airflow_providers_amazon-8.18.0rc2.dist-info/METADATA,sha256=rS3Kw_O0UvZmHB1feQAd55-QTOsr7F-nXUfSWGVOUsI,10001
|
225
|
+
apache_airflow_providers_amazon-8.18.0rc2.dist-info/RECORD,,
|
File without changes
|