apache-airflow-providers-amazon 9.1.0__py3-none-any.whl → 9.1.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +1 -12
- airflow/providers/amazon/aws/hooks/appflow.py +4 -4
- airflow/providers/amazon/aws/hooks/athena.py +15 -25
- airflow/providers/amazon/aws/hooks/eks.py +2 -2
- airflow/providers/amazon/aws/hooks/glue.py +1 -5
- airflow/providers/amazon/aws/hooks/s3.py +2 -2
- airflow/providers/amazon/aws/operators/appflow.py +1 -1
- airflow/providers/amazon/aws/operators/athena.py +1 -3
- airflow/providers/amazon/aws/operators/comprehend.py +3 -3
- airflow/providers/amazon/aws/operators/dms.py +3 -3
- airflow/providers/amazon/aws/operators/ecs.py +3 -11
- airflow/providers/amazon/aws/operators/eks.py +2 -4
- airflow/providers/amazon/aws/operators/glue.py +1 -10
- airflow/providers/amazon/aws/operators/kinesis_analytics.py +3 -3
- airflow/providers/amazon/aws/operators/sagemaker.py +2 -2
- airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py +1 -1
- airflow/providers/amazon/aws/triggers/athena.py +2 -1
- airflow/providers/amazon/aws/triggers/glue.py +1 -1
- airflow/providers/amazon/get_provider_info.py +2 -2
- {apache_airflow_providers_amazon-9.1.0.dist-info → apache_airflow_providers_amazon-9.1.0rc1.dist-info}/METADATA +17 -17
- {apache_airflow_providers_amazon-9.1.0.dist-info → apache_airflow_providers_amazon-9.1.0rc1.dist-info}/RECORD +23 -23
- {apache_airflow_providers_amazon-9.1.0.dist-info → apache_airflow_providers_amazon-9.1.0rc1.dist-info}/WHEEL +1 -1
- {apache_airflow_providers_amazon-9.1.0.dist-info → apache_airflow_providers_amazon-9.1.0rc1.dist-info}/entry_points.txt +0 -0
@@ -17,7 +17,6 @@
|
|
17
17
|
from __future__ import annotations
|
18
18
|
|
19
19
|
import argparse
|
20
|
-
import warnings
|
21
20
|
from collections import defaultdict
|
22
21
|
from functools import cached_property
|
23
22
|
from typing import TYPE_CHECKING, Container, Sequence, cast
|
@@ -25,7 +24,7 @@ from typing import TYPE_CHECKING, Container, Sequence, cast
|
|
25
24
|
from flask import session, url_for
|
26
25
|
|
27
26
|
from airflow.cli.cli_config import CLICommand, DefaultHelpParser, GroupCommand
|
28
|
-
from airflow.exceptions import AirflowOptionalProviderFeatureException
|
27
|
+
from airflow.exceptions import AirflowOptionalProviderFeatureException
|
29
28
|
from airflow.providers.amazon.aws.auth_manager.avp.entities import AvpEntities
|
30
29
|
from airflow.providers.amazon.aws.auth_manager.avp.facade import (
|
31
30
|
AwsAuthManagerAmazonVerifiedPermissionsFacade,
|
@@ -167,16 +166,6 @@ class AwsAuthManager(BaseAuthManager):
|
|
167
166
|
method=method, entity_type=AvpEntities.ASSET, user=user or self.get_user(), entity_id=asset_uri
|
168
167
|
)
|
169
168
|
|
170
|
-
def is_authorized_dataset(
|
171
|
-
self, *, method: ResourceMethod, details: AssetDetails | None = None, user: BaseUser | None = None
|
172
|
-
) -> bool:
|
173
|
-
warnings.warn(
|
174
|
-
"is_authorized_dataset will be renamed as is_authorized_asset in Airflow 3 and will be removed when the minimum Airflow version is set to 3.0 for the amazon provider",
|
175
|
-
AirflowProviderDeprecationWarning,
|
176
|
-
stacklevel=2,
|
177
|
-
)
|
178
|
-
return self.is_authorized_asset(method=method, user=user)
|
179
|
-
|
180
169
|
def is_authorized_pool(
|
181
170
|
self, *, method: ResourceMethod, details: PoolDetails | None = None, user: BaseUser | None = None
|
182
171
|
) -> bool:
|
@@ -117,9 +117,9 @@ class AppflowHook(AwsGenericHook["AppflowClient"]):
|
|
117
117
|
|
118
118
|
self.conn.update_flow(
|
119
119
|
flowName=response["flowName"],
|
120
|
-
destinationFlowConfigList=response["destinationFlowConfigList"],
|
121
|
-
sourceFlowConfig=response["sourceFlowConfig"],
|
122
|
-
triggerConfig=response["triggerConfig"],
|
120
|
+
destinationFlowConfigList=response["destinationFlowConfigList"], # type: ignore[arg-type]
|
121
|
+
sourceFlowConfig=response["sourceFlowConfig"], # type: ignore[arg-type]
|
122
|
+
triggerConfig=response["triggerConfig"], # type: ignore[arg-type]
|
123
123
|
description=response.get("description", "Flow description."),
|
124
|
-
tasks=tasks,
|
124
|
+
tasks=tasks, # type: ignore[arg-type]
|
125
125
|
)
|
@@ -155,15 +155,14 @@ class AthenaHook(AwsBaseHook):
|
|
155
155
|
state = None
|
156
156
|
try:
|
157
157
|
state = response["QueryExecution"]["Status"]["State"]
|
158
|
-
except Exception
|
159
|
-
# The error is being absorbed here and is being handled by the caller.
|
160
|
-
# The error is being absorbed to implement retries.
|
158
|
+
except Exception:
|
161
159
|
self.log.exception(
|
162
|
-
"Exception while getting query state. Query execution id: %s,
|
163
|
-
query_execution_id,
|
164
|
-
e,
|
160
|
+
"Exception while getting query state. Query execution id: %s", query_execution_id
|
165
161
|
)
|
166
|
-
|
162
|
+
finally:
|
163
|
+
# The error is being absorbed here and is being handled by the caller.
|
164
|
+
# The error is being absorbed to implement retries.
|
165
|
+
return state
|
167
166
|
|
168
167
|
def get_state_change_reason(self, query_execution_id: str, use_cache: bool = False) -> str | None:
|
169
168
|
"""
|
@@ -178,15 +177,15 @@ class AthenaHook(AwsBaseHook):
|
|
178
177
|
reason = None
|
179
178
|
try:
|
180
179
|
reason = response["QueryExecution"]["Status"]["StateChangeReason"]
|
181
|
-
except Exception
|
182
|
-
# The error is being absorbed here and is being handled by the caller.
|
183
|
-
# The error is being absorbed to implement retries.
|
180
|
+
except Exception:
|
184
181
|
self.log.exception(
|
185
|
-
"Exception while getting query state change reason. Query execution id: %s
|
182
|
+
"Exception while getting query state change reason. Query execution id: %s",
|
186
183
|
query_execution_id,
|
187
|
-
e,
|
188
184
|
)
|
189
|
-
|
185
|
+
finally:
|
186
|
+
# The error is being absorbed here and is being handled by the caller.
|
187
|
+
# The error is being absorbed to implement retries.
|
188
|
+
return reason
|
190
189
|
|
191
190
|
def get_query_results(
|
192
191
|
self, query_execution_id: str, next_token_id: str | None = None, max_results: int = 1000
|
@@ -288,18 +287,9 @@ class AthenaHook(AwsBaseHook):
|
|
288
287
|
)
|
289
288
|
except AirflowException as error:
|
290
289
|
# this function does not raise errors to keep previous behavior.
|
291
|
-
self.log.warning(
|
292
|
-
|
293
|
-
|
294
|
-
error,
|
295
|
-
)
|
296
|
-
except Exception as e:
|
297
|
-
self.log.warning(
|
298
|
-
"Unexpected exception while polling query status. Query execution id: %s, Exception: %s",
|
299
|
-
query_execution_id,
|
300
|
-
e,
|
301
|
-
)
|
302
|
-
return self.check_query_status(query_execution_id)
|
290
|
+
self.log.warning(error)
|
291
|
+
finally:
|
292
|
+
return self.check_query_status(query_execution_id)
|
303
293
|
|
304
294
|
def get_output_location(self, query_execution_id: str) -> str:
|
305
295
|
"""
|
@@ -85,8 +85,8 @@ COMMAND = """
|
|
85
85
|
exit 1
|
86
86
|
fi
|
87
87
|
|
88
|
-
expiration_timestamp=$(echo "$output" | grep -oP 'expirationTimestamp
|
89
|
-
token=$(echo "$output" | grep -oP 'token
|
88
|
+
expiration_timestamp=$(echo "$output" | grep -oP 'expirationTimestamp:\s*\K[^,]+')
|
89
|
+
token=$(echo "$output" | grep -oP 'token:\s*\K[^,]+')
|
90
90
|
|
91
91
|
json_string=$(printf '{{"kind": "ExecCredential","apiVersion": \
|
92
92
|
"client.authentication.k8s.io/v1alpha1","spec": {{}},"status": \
|
@@ -282,16 +282,13 @@ class GlueJobHook(AwsBaseHook):
|
|
282
282
|
log_group_error, continuation_tokens.error_stream_continuation
|
283
283
|
)
|
284
284
|
|
285
|
-
def job_completion(
|
286
|
-
self, job_name: str, run_id: str, verbose: bool = False, sleep_before_return: int = 0
|
287
|
-
) -> dict[str, str]:
|
285
|
+
def job_completion(self, job_name: str, run_id: str, verbose: bool = False) -> dict[str, str]:
|
288
286
|
"""
|
289
287
|
Wait until Glue job with job_name finishes; return final state if finished or raises AirflowException.
|
290
288
|
|
291
289
|
:param job_name: unique job name per AWS account
|
292
290
|
:param run_id: The job-run ID of the predecessor job run
|
293
291
|
:param verbose: If True, more Glue Job Run logs show in the Airflow Task Logs. (default: False)
|
294
|
-
:param sleep_before_return: time in seconds to wait before returning final status.
|
295
292
|
:return: Dict of JobRunState and JobRunId
|
296
293
|
"""
|
297
294
|
next_log_tokens = self.LogContinuationTokens()
|
@@ -299,7 +296,6 @@ class GlueJobHook(AwsBaseHook):
|
|
299
296
|
job_run_state = self.get_job_state(job_name, run_id)
|
300
297
|
ret = self._handle_state(job_run_state, job_name, run_id, verbose, next_log_tokens)
|
301
298
|
if ret:
|
302
|
-
time.sleep(sleep_before_return)
|
303
299
|
return ret
|
304
300
|
else:
|
305
301
|
time.sleep(self.job_poll_interval)
|
@@ -86,7 +86,7 @@ def provide_bucket_name(func: Callable) -> Callable:
|
|
86
86
|
async def maybe_add_bucket_name(*args, **kwargs):
|
87
87
|
bound_args = function_signature.bind(*args, **kwargs)
|
88
88
|
|
89
|
-
if not bound_args.arguments
|
89
|
+
if "bucket_name" not in bound_args.arguments:
|
90
90
|
self = args[0]
|
91
91
|
if self.aws_conn_id:
|
92
92
|
connection = await sync_to_async(self.get_connection)(self.aws_conn_id)
|
@@ -116,7 +116,7 @@ def provide_bucket_name(func: Callable) -> Callable:
|
|
116
116
|
def wrapper(*args, **kwargs) -> Callable:
|
117
117
|
bound_args = function_signature.bind(*args, **kwargs)
|
118
118
|
|
119
|
-
if not bound_args.arguments
|
119
|
+
if "bucket_name" not in bound_args.arguments:
|
120
120
|
self = args[0]
|
121
121
|
|
122
122
|
if "bucket_name" in self.service_config:
|
@@ -21,11 +21,11 @@ from datetime import datetime, timedelta
|
|
21
21
|
from typing import TYPE_CHECKING, cast
|
22
22
|
|
23
23
|
from airflow.exceptions import AirflowException
|
24
|
+
from airflow.operators.python import ShortCircuitOperator
|
24
25
|
from airflow.providers.amazon.aws.hooks.appflow import AppflowHook
|
25
26
|
from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
|
26
27
|
from airflow.providers.amazon.aws.utils import datetime_to_epoch_ms
|
27
28
|
from airflow.providers.amazon.aws.utils.mixins import AwsBaseHookMixin, AwsHookParams, aws_template_fields
|
28
|
-
from airflow.providers.common.compat.standard.operators import ShortCircuitOperator
|
29
29
|
|
30
30
|
if TYPE_CHECKING:
|
31
31
|
from mypy_boto3_appflow.type_defs import (
|
@@ -311,9 +311,7 @@ class AthenaOperator(AwsBaseOperator[AthenaHook]):
|
|
311
311
|
}
|
312
312
|
fields = [
|
313
313
|
SchemaDatasetFacetFields(
|
314
|
-
name=column["Name"],
|
315
|
-
type=column["Type"],
|
316
|
-
description=column.get("Comment"),
|
314
|
+
name=column["Name"], type=column["Type"], description=column["Comment"]
|
317
315
|
)
|
318
316
|
for column in table_metadata["TableMetadata"]["Columns"]
|
319
317
|
]
|
@@ -17,7 +17,7 @@
|
|
17
17
|
from __future__ import annotations
|
18
18
|
|
19
19
|
from functools import cached_property
|
20
|
-
from typing import TYPE_CHECKING, Any,
|
20
|
+
from typing import TYPE_CHECKING, Any, Sequence
|
21
21
|
|
22
22
|
from airflow.configuration import conf
|
23
23
|
from airflow.exceptions import AirflowException
|
@@ -55,7 +55,7 @@ class ComprehendBaseOperator(AwsBaseOperator[ComprehendHook]):
|
|
55
55
|
"input_data_config", "output_data_config", "data_access_role_arn", "language_code"
|
56
56
|
)
|
57
57
|
|
58
|
-
template_fields_renderers:
|
58
|
+
template_fields_renderers: dict = {"input_data_config": "json", "output_data_config": "json"}
|
59
59
|
|
60
60
|
def __init__(
|
61
61
|
self,
|
@@ -248,7 +248,7 @@ class ComprehendCreateDocumentClassifierOperator(AwsBaseOperator[ComprehendHook]
|
|
248
248
|
"document_classifier_kwargs",
|
249
249
|
)
|
250
250
|
|
251
|
-
template_fields_renderers:
|
251
|
+
template_fields_renderers: dict = {
|
252
252
|
"input_data_config": "json",
|
253
253
|
"output_data_config": "json",
|
254
254
|
"document_classifier_kwargs": "json",
|
@@ -17,7 +17,7 @@
|
|
17
17
|
# under the License.
|
18
18
|
from __future__ import annotations
|
19
19
|
|
20
|
-
from typing import TYPE_CHECKING,
|
20
|
+
from typing import TYPE_CHECKING, Sequence
|
21
21
|
|
22
22
|
from airflow.providers.amazon.aws.hooks.dms import DmsHook
|
23
23
|
from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
|
@@ -64,7 +64,7 @@ class DmsCreateTaskOperator(AwsBaseOperator[DmsHook]):
|
|
64
64
|
"migration_type",
|
65
65
|
"create_task_kwargs",
|
66
66
|
)
|
67
|
-
template_fields_renderers
|
67
|
+
template_fields_renderers = {
|
68
68
|
"table_mappings": "json",
|
69
69
|
"create_task_kwargs": "json",
|
70
70
|
}
|
@@ -173,7 +173,7 @@ class DmsDescribeTasksOperator(AwsBaseOperator[DmsHook]):
|
|
173
173
|
|
174
174
|
aws_hook_class = DmsHook
|
175
175
|
template_fields: Sequence[str] = aws_template_fields("describe_tasks_kwargs")
|
176
|
-
template_fields_renderers:
|
176
|
+
template_fields_renderers: dict[str, str] = {"describe_tasks_kwargs": "json"}
|
177
177
|
|
178
178
|
def __init__(self, *, describe_tasks_kwargs: dict | None = None, **kwargs):
|
179
179
|
super().__init__(**kwargs)
|
@@ -368,7 +368,7 @@ class EcsRunTaskOperator(EcsBaseOperator):
|
|
368
368
|
If None, this is the same as the `region` parameter. If that is also None,
|
369
369
|
this is the default AWS region based on your connection settings.
|
370
370
|
:param awslogs_stream_prefix: the stream prefix that is used for the CloudWatch logs.
|
371
|
-
This
|
371
|
+
This is usually based on some custom name combined with the name of the container.
|
372
372
|
Only required if you want logs to be shown in the Airflow UI after your job has
|
373
373
|
finished.
|
374
374
|
:param awslogs_fetch_interval: the interval that the ECS task log fetcher should wait
|
@@ -481,7 +481,6 @@ class EcsRunTaskOperator(EcsBaseOperator):
|
|
481
481
|
self.awslogs_region = self.region_name
|
482
482
|
|
483
483
|
self.arn: str | None = None
|
484
|
-
self.container_name: str | None = None
|
485
484
|
self._started_by: str | None = None
|
486
485
|
|
487
486
|
self.retry_args = quota_retry
|
@@ -598,10 +597,10 @@ class EcsRunTaskOperator(EcsBaseOperator):
|
|
598
597
|
|
599
598
|
if self.capacity_provider_strategy:
|
600
599
|
run_opts["capacityProviderStrategy"] = self.capacity_provider_strategy
|
600
|
+
if self.volume_configurations is not None:
|
601
|
+
run_opts["volumeConfigurations"] = self.volume_configurations
|
601
602
|
elif self.launch_type:
|
602
603
|
run_opts["launchType"] = self.launch_type
|
603
|
-
if self.volume_configurations is not None:
|
604
|
-
run_opts["volumeConfigurations"] = self.volume_configurations
|
605
604
|
if self.platform_version is not None:
|
606
605
|
run_opts["platformVersion"] = self.platform_version
|
607
606
|
if self.group is not None:
|
@@ -625,7 +624,6 @@ class EcsRunTaskOperator(EcsBaseOperator):
|
|
625
624
|
self.log.info("ECS Task started: %s", response)
|
626
625
|
|
627
626
|
self.arn = response["tasks"][0]["taskArn"]
|
628
|
-
self.container_name = response["tasks"][0]["containers"][0]["name"]
|
629
627
|
self.log.info("ECS task ID is: %s", self._get_ecs_task_id(self.arn))
|
630
628
|
|
631
629
|
def _try_reattach_task(self, started_by: str):
|
@@ -661,12 +659,6 @@ class EcsRunTaskOperator(EcsBaseOperator):
|
|
661
659
|
return self.awslogs_group and self.awslogs_stream_prefix
|
662
660
|
|
663
661
|
def _get_logs_stream_name(self) -> str:
|
664
|
-
if (
|
665
|
-
self.awslogs_stream_prefix
|
666
|
-
and self.container_name
|
667
|
-
and not self.awslogs_stream_prefix.endswith(f"/{self.container_name}")
|
668
|
-
):
|
669
|
-
return f"{self.awslogs_stream_prefix}/{self.container_name}/{self._get_ecs_task_id(self.arn)}"
|
670
662
|
return f"{self.awslogs_stream_prefix}/{self._get_ecs_task_id(self.arn)}"
|
671
663
|
|
672
664
|
def _get_task_log_fetcher(self) -> AwsTaskLogFetcher:
|
@@ -45,10 +45,8 @@ from airflow.providers.cncf.kubernetes.utils.pod_manager import OnFinishAction
|
|
45
45
|
try:
|
46
46
|
from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator
|
47
47
|
except ImportError:
|
48
|
-
# preserve backward compatibility for older versions of cncf.kubernetes provider
|
49
|
-
from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import
|
50
|
-
KubernetesPodOperator,
|
51
|
-
)
|
48
|
+
# preserve backward compatibility for older versions of cncf.kubernetes provider
|
49
|
+
from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import KubernetesPodOperator
|
52
50
|
|
53
51
|
if TYPE_CHECKING:
|
54
52
|
from airflow.utils.context import Context
|
@@ -74,11 +74,6 @@ class GlueJobOperator(BaseOperator):
|
|
74
74
|
:param update_config: If True, Operator will update job configuration. (default: False)
|
75
75
|
:param replace_script_file: If True, the script file will be replaced in S3. (default: False)
|
76
76
|
:param stop_job_run_on_kill: If True, Operator will stop the job run when task is killed.
|
77
|
-
:param sleep_before_return: time in seconds to wait before returning final status. This is meaningful in case
|
78
|
-
of limiting concurrency, Glue needs 5-10 seconds to clean up resources.
|
79
|
-
Thus if status is returned immediately it might end up in case of more than 1 concurrent run.
|
80
|
-
It is recommended to set this parameter to 10 when you are using concurrency=1.
|
81
|
-
For more information see: https://repost.aws/questions/QUaKgpLBMPSGWO0iq2Fob_bw/glue-run-concurrent-jobs#ANFpCL2fRnQRqgDFuIU_rpvA
|
82
77
|
"""
|
83
78
|
|
84
79
|
template_fields: Sequence[str] = (
|
@@ -123,7 +118,6 @@ class GlueJobOperator(BaseOperator):
|
|
123
118
|
update_config: bool = False,
|
124
119
|
job_poll_interval: int | float = 6,
|
125
120
|
stop_job_run_on_kill: bool = False,
|
126
|
-
sleep_before_return: int = 0,
|
127
121
|
**kwargs,
|
128
122
|
):
|
129
123
|
super().__init__(**kwargs)
|
@@ -151,7 +145,6 @@ class GlueJobOperator(BaseOperator):
|
|
151
145
|
self.job_poll_interval = job_poll_interval
|
152
146
|
self.stop_job_run_on_kill = stop_job_run_on_kill
|
153
147
|
self._job_run_id: str | None = None
|
154
|
-
self.sleep_before_return: int = sleep_before_return
|
155
148
|
|
156
149
|
@cached_property
|
157
150
|
def glue_job_hook(self) -> GlueJobHook:
|
@@ -227,9 +220,7 @@ class GlueJobOperator(BaseOperator):
|
|
227
220
|
method_name="execute_complete",
|
228
221
|
)
|
229
222
|
elif self.wait_for_completion:
|
230
|
-
glue_job_run = self.glue_job_hook.job_completion(
|
231
|
-
self.job_name, self._job_run_id, self.verbose, self.sleep_before_return
|
232
|
-
)
|
223
|
+
glue_job_run = self.glue_job_hook.job_completion(self.job_name, self._job_run_id, self.verbose)
|
233
224
|
self.log.info(
|
234
225
|
"AWS Glue Job: %s status: %s. Run Id: %s",
|
235
226
|
self.job_name,
|
@@ -16,7 +16,7 @@
|
|
16
16
|
# under the License.
|
17
17
|
from __future__ import annotations
|
18
18
|
|
19
|
-
from typing import TYPE_CHECKING, Any,
|
19
|
+
from typing import TYPE_CHECKING, Any, Sequence
|
20
20
|
|
21
21
|
from botocore.exceptions import ClientError
|
22
22
|
|
@@ -70,7 +70,7 @@ class KinesisAnalyticsV2CreateApplicationOperator(AwsBaseOperator[KinesisAnalyti
|
|
70
70
|
"create_application_kwargs",
|
71
71
|
"application_description",
|
72
72
|
)
|
73
|
-
template_fields_renderers:
|
73
|
+
template_fields_renderers: dict = {
|
74
74
|
"create_application_kwargs": "json",
|
75
75
|
}
|
76
76
|
|
@@ -149,7 +149,7 @@ class KinesisAnalyticsV2StartApplicationOperator(AwsBaseOperator[KinesisAnalytic
|
|
149
149
|
"application_name",
|
150
150
|
"run_configuration",
|
151
151
|
)
|
152
|
-
template_fields_renderers:
|
152
|
+
template_fields_renderers: dict = {
|
153
153
|
"run_configuration": "json",
|
154
154
|
}
|
155
155
|
|
@@ -20,7 +20,7 @@ import datetime
|
|
20
20
|
import json
|
21
21
|
import time
|
22
22
|
from functools import cached_property
|
23
|
-
from typing import TYPE_CHECKING, Any, Callable,
|
23
|
+
from typing import TYPE_CHECKING, Any, Callable, Sequence
|
24
24
|
|
25
25
|
from botocore.exceptions import ClientError
|
26
26
|
|
@@ -65,7 +65,7 @@ class SageMakerBaseOperator(BaseOperator):
|
|
65
65
|
|
66
66
|
template_fields: Sequence[str] = ("config",)
|
67
67
|
template_ext: Sequence[str] = ()
|
68
|
-
template_fields_renderers:
|
68
|
+
template_fields_renderers: dict = {"config": "json"}
|
69
69
|
ui_color: str = "#ededed"
|
70
70
|
integer_fields: list[list[Any]] = []
|
71
71
|
|
@@ -240,7 +240,7 @@ class S3ToDynamoDBOperator(BaseOperator):
|
|
240
240
|
finally:
|
241
241
|
self.log.info("Delete tmp DynamoDB table %s", self.tmp_table_name)
|
242
242
|
client.delete_table(TableName=self.tmp_table_name)
|
243
|
-
|
243
|
+
return dynamodb_hook.get_conn().Table(self.dynamodb_table_name).table_arn
|
244
244
|
|
245
245
|
def execute(self, context: Context) -> str:
|
246
246
|
"""
|
@@ -29,7 +29,8 @@ class AthenaTrigger(AwsBaseWaiterTrigger):
|
|
29
29
|
"""
|
30
30
|
Trigger for AthenaOperator.
|
31
31
|
|
32
|
-
The trigger will asynchronously poll the boto3 API
|
32
|
+
The trigger will asynchronously poll the boto3 API and wait for the
|
33
|
+
Redshift cluster to be in the `available` state.
|
33
34
|
|
34
35
|
:param query_execution_id: ID of the Athena query execution to watch
|
35
36
|
:param waiter_delay: The amount of time in seconds to wait between attempts.
|
@@ -100,7 +100,7 @@ def get_provider_info():
|
|
100
100
|
"dependencies": [
|
101
101
|
"apache-airflow>=2.8.0",
|
102
102
|
"apache-airflow-providers-common-compat>=1.2.1",
|
103
|
-
"apache-airflow-providers-common-sql>=1.
|
103
|
+
"apache-airflow-providers-common-sql>=1.3.1",
|
104
104
|
"apache-airflow-providers-http",
|
105
105
|
"boto3>=1.34.90",
|
106
106
|
"botocore>=1.34.90",
|
@@ -130,7 +130,7 @@ def get_provider_info():
|
|
130
130
|
"aiobotocore>=2.13.0",
|
131
131
|
"aws_xray_sdk>=2.12.0",
|
132
132
|
"moto[cloudformation,glue]>=5.0.0",
|
133
|
-
"mypy-boto3-appflow>=1.34.0
|
133
|
+
"mypy-boto3-appflow>=1.34.0",
|
134
134
|
"mypy-boto3-rds>=1.34.90",
|
135
135
|
"mypy-boto3-redshift-data>=1.34.0",
|
136
136
|
"mypy-boto3-s3>=1.34.90",
|
@@ -1,6 +1,6 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.1
|
2
2
|
Name: apache-airflow-providers-amazon
|
3
|
-
Version: 9.1.
|
3
|
+
Version: 9.1.0rc1
|
4
4
|
Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
|
5
5
|
Keywords: airflow-provider,amazon,airflow,integration
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
@@ -21,10 +21,10 @@ Classifier: Programming Language :: Python :: 3.11
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.12
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
23
23
|
Requires-Dist: PyAthena>=3.0.10
|
24
|
-
Requires-Dist: apache-airflow-providers-common-compat>=1.2.
|
25
|
-
Requires-Dist: apache-airflow-providers-common-sql>=1.
|
24
|
+
Requires-Dist: apache-airflow-providers-common-compat>=1.2.1rc0
|
25
|
+
Requires-Dist: apache-airflow-providers-common-sql>=1.3.1rc0
|
26
26
|
Requires-Dist: apache-airflow-providers-http
|
27
|
-
Requires-Dist: apache-airflow>=2.8.
|
27
|
+
Requires-Dist: apache-airflow>=2.8.0rc0
|
28
28
|
Requires-Dist: asgiref>=2.3.0
|
29
29
|
Requires-Dist: boto3>=1.34.90
|
30
30
|
Requires-Dist: botocore>=1.34.90
|
@@ -35,16 +35,16 @@ Requires-Dist: python3-saml>=1.16.0
|
|
35
35
|
Requires-Dist: redshift_connector>=2.0.918
|
36
36
|
Requires-Dist: watchtower>=3.0.0,!=3.3.0,<4
|
37
37
|
Requires-Dist: aiobotocore[boto3]>=2.13.0 ; extra == "aiobotocore"
|
38
|
-
Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache
|
39
|
-
Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.
|
40
|
-
Requires-Dist: apache-airflow-providers-common-compat ; extra == "common
|
41
|
-
Requires-Dist: apache-airflow-providers-common-sql ; extra == "common
|
38
|
+
Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache.hive"
|
39
|
+
Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0rc0 ; extra == "cncf.kubernetes"
|
40
|
+
Requires-Dist: apache-airflow-providers-common-compat ; extra == "common.compat"
|
41
|
+
Requires-Dist: apache-airflow-providers-common-sql ; extra == "common.sql"
|
42
42
|
Requires-Dist: apache-airflow-providers-exasol ; extra == "exasol"
|
43
43
|
Requires-Dist: apache-airflow-providers-ftp ; extra == "ftp"
|
44
44
|
Requires-Dist: apache-airflow-providers-google ; extra == "google"
|
45
45
|
Requires-Dist: apache-airflow-providers-http ; extra == "http"
|
46
46
|
Requires-Dist: apache-airflow-providers-imap ; extra == "imap"
|
47
|
-
Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft
|
47
|
+
Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft.azure"
|
48
48
|
Requires-Dist: apache-airflow-providers-mongo ; extra == "mongo"
|
49
49
|
Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
|
50
50
|
Requires-Dist: pandas>=2.1.2,<2.2 ; extra == "pandas" and (python_version>="3.9")
|
@@ -61,16 +61,16 @@ Project-URL: Source Code, https://github.com/apache/airflow
|
|
61
61
|
Project-URL: Twitter, https://twitter.com/ApacheAirflow
|
62
62
|
Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
63
63
|
Provides-Extra: aiobotocore
|
64
|
-
Provides-Extra: apache
|
65
|
-
Provides-Extra: cncf
|
66
|
-
Provides-Extra: common
|
67
|
-
Provides-Extra: common
|
64
|
+
Provides-Extra: apache.hive
|
65
|
+
Provides-Extra: cncf.kubernetes
|
66
|
+
Provides-Extra: common.compat
|
67
|
+
Provides-Extra: common.sql
|
68
68
|
Provides-Extra: exasol
|
69
69
|
Provides-Extra: ftp
|
70
70
|
Provides-Extra: google
|
71
71
|
Provides-Extra: http
|
72
72
|
Provides-Extra: imap
|
73
|
-
Provides-Extra: microsoft
|
73
|
+
Provides-Extra: microsoft.azure
|
74
74
|
Provides-Extra: mongo
|
75
75
|
Provides-Extra: openlineage
|
76
76
|
Provides-Extra: pandas
|
@@ -123,7 +123,7 @@ Provides-Extra: ssh
|
|
123
123
|
|
124
124
|
Package ``apache-airflow-providers-amazon``
|
125
125
|
|
126
|
-
Release: ``9.1.0``
|
126
|
+
Release: ``9.1.0.rc1``
|
127
127
|
|
128
128
|
|
129
129
|
Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
|
@@ -155,7 +155,7 @@ PIP package Version required
|
|
155
155
|
========================================== ======================
|
156
156
|
``apache-airflow`` ``>=2.8.0``
|
157
157
|
``apache-airflow-providers-common-compat`` ``>=1.2.1``
|
158
|
-
``apache-airflow-providers-common-sql`` ``>=1.
|
158
|
+
``apache-airflow-providers-common-sql`` ``>=1.3.1``
|
159
159
|
``apache-airflow-providers-http``
|
160
160
|
``boto3`` ``>=1.34.90``
|
161
161
|
``botocore`` ``>=1.34.90``
|
@@ -1,12 +1,12 @@
|
|
1
1
|
airflow/providers/amazon/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
|
2
2
|
airflow/providers/amazon/__init__.py,sha256=MuTYiz17zqZxTSbZ537dOIWjnfLeKguUmhr0_326bro,1493
|
3
|
-
airflow/providers/amazon/get_provider_info.py,sha256=
|
3
|
+
airflow/providers/amazon/get_provider_info.py,sha256=JnyQK8t0-7kcB2ed5tBqkxh0PtzYoFpe1yYseO_dU6I,68960
|
4
4
|
airflow/providers/amazon/aws/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
5
5
|
airflow/providers/amazon/aws/exceptions.py,sha256=uRGNMgXvgdzfphpOTiyj74lQhjzb70J-X8n6fsx5Jog,1864
|
6
6
|
airflow/providers/amazon/aws/assets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
7
7
|
airflow/providers/amazon/aws/assets/s3.py,sha256=wNaJiOM90-SCauD4EQneZVXMO54yDRjLPfI8D5o0-fw,1861
|
8
8
|
airflow/providers/amazon/aws/auth_manager/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
9
|
-
airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=
|
9
|
+
airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=0E38_x_EjV6uwMqkqe0vfmLGGCpcQKEAvnr7FPQwXTY,16625
|
10
10
|
airflow/providers/amazon/aws/auth_manager/constants.py,sha256=Jdluo42InhyNGkYHB_dRtoFMpKanJLJdH0hyR9-5AZg,1050
|
11
11
|
airflow/providers/amazon/aws/auth_manager/user.py,sha256=SoiiA3sVB1-G02qhQDSTst_25MjW4xbSE0vVDxwR-uw,1882
|
12
12
|
airflow/providers/amazon/aws/auth_manager/avp/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
@@ -38,8 +38,8 @@ airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py,sha256
|
|
38
38
|
airflow/providers/amazon/aws/fs/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
39
39
|
airflow/providers/amazon/aws/fs/s3.py,sha256=Ty9XT9c1XArkUYcQkalvNZhuoTlEg3uKy-AIzNW9LgY,4797
|
40
40
|
airflow/providers/amazon/aws/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
41
|
-
airflow/providers/amazon/aws/hooks/appflow.py,sha256
|
42
|
-
airflow/providers/amazon/aws/hooks/athena.py,sha256=
|
41
|
+
airflow/providers/amazon/aws/hooks/appflow.py,sha256=v7o6BgFDm8McE3JtB0oMkc80_nTP0e_u_uEDWFa0sVI,5367
|
42
|
+
airflow/providers/amazon/aws/hooks/athena.py,sha256=tnck2Ts9QFVDU5MsY4_9mTULyMO9QHyMm_KTJPtCsk0,13001
|
43
43
|
airflow/providers/amazon/aws/hooks/athena_sql.py,sha256=vFIUbMMTem3xvYAUTvW3h1ypjpKVLNck3VbrAlupVLA,6844
|
44
44
|
airflow/providers/amazon/aws/hooks/base_aws.py,sha256=LDxXMKjlYFEhsZ0u0DsIjf7qt-Wr6aRn9NpfoNJc0nc,43630
|
45
45
|
airflow/providers/amazon/aws/hooks/batch_client.py,sha256=w4pIsSfHyGz2rRH95cIxCHocqXwxjpYVDT9Tf3sqUso,21669
|
@@ -55,12 +55,12 @@ airflow/providers/amazon/aws/hooks/dynamodb.py,sha256=14HDVrIeeK6TlLI767qsgGw1mu
|
|
55
55
|
airflow/providers/amazon/aws/hooks/ec2.py,sha256=DP4RjJoO2P8I5qT4FqTUFy0GQ4-SrJ354ET4YUVDfXE,8088
|
56
56
|
airflow/providers/amazon/aws/hooks/ecr.py,sha256=k89qfKwGZDMRa39TUtSjlf0wJag0SEbIIe1WgDW-e7I,3823
|
57
57
|
airflow/providers/amazon/aws/hooks/ecs.py,sha256=wr4W0P2wlPj1kd31X79uYtI7BqDVk6RW1IWyR2IvIeU,6720
|
58
|
-
airflow/providers/amazon/aws/hooks/eks.py,sha256=
|
58
|
+
airflow/providers/amazon/aws/hooks/eks.py,sha256=rj15KoRbcYicLUO4SOFq4-G-T_iLh0WzRkVtZ8wVM6M,24440
|
59
59
|
airflow/providers/amazon/aws/hooks/elasticache_replication_group.py,sha256=x6kkaR2nzDF8w1kqolbaS3-XCbHl5qlJMcpGYmlsxuU,12089
|
60
60
|
airflow/providers/amazon/aws/hooks/emr.py,sha256=7x2YCGKr5nylRfbBitqRXmvW0RwYgARXv4pXXeu_WJ0,21430
|
61
61
|
airflow/providers/amazon/aws/hooks/eventbridge.py,sha256=dSaKbFB8ueOUJGl6YLIz70zXy0Xzr3yMflKS2wGFDSM,3364
|
62
62
|
airflow/providers/amazon/aws/hooks/glacier.py,sha256=Ia4xE8D1hGnUWNs8CqNhDAsqSJiqY4HliE2-knrMHrw,3495
|
63
|
-
airflow/providers/amazon/aws/hooks/glue.py,sha256
|
63
|
+
airflow/providers/amazon/aws/hooks/glue.py,sha256=bGwKpY0UY3YdI8H7cJ5gpsnj2rnWTY7EeFW-JuJkxO8,22200
|
64
64
|
airflow/providers/amazon/aws/hooks/glue_catalog.py,sha256=XQu9v_b37TXO7F_V3u7WuLS7UuCigm4UEy2tuzF8ZiY,9213
|
65
65
|
airflow/providers/amazon/aws/hooks/glue_crawler.py,sha256=C9O2YG63BiNS6UvvB1Mn1aHWdRYzDBf2a5brimLU9IQ,7926
|
66
66
|
airflow/providers/amazon/aws/hooks/glue_databrew.py,sha256=96duZVYtLDQgfJ02XUdov-QWPoG2Wp0O0RFuwB-6nkU,2580
|
@@ -75,7 +75,7 @@ airflow/providers/amazon/aws/hooks/rds.py,sha256=h7NF3GZ42RKeh70rlg2BQFVpa8vNadS
|
|
75
75
|
airflow/providers/amazon/aws/hooks/redshift_cluster.py,sha256=ywJxbcOy91-oGGkApo6_nRKucyaEhxXqEXikG7q9uZ4,7977
|
76
76
|
airflow/providers/amazon/aws/hooks/redshift_data.py,sha256=5Kz7tsTbDwDDAqTud1--vyi74IksfHWBouIPRXYYFJk,11812
|
77
77
|
airflow/providers/amazon/aws/hooks/redshift_sql.py,sha256=UTH2cyCfdGp5o2l94le5HLCF82KpsoGgRAgd7jrmeVw,11301
|
78
|
-
airflow/providers/amazon/aws/hooks/s3.py,sha256=
|
78
|
+
airflow/providers/amazon/aws/hooks/s3.py,sha256=ChlYVLix_fQcZMl1-rpOx7ZxOUaxFGfGxhMPAiA-_lQ,61506
|
79
79
|
airflow/providers/amazon/aws/hooks/sagemaker.py,sha256=w3_zqbuNznP47TBLioHuBenNv82blDul-rppyZMRaKg,60461
|
80
80
|
airflow/providers/amazon/aws/hooks/secrets_manager.py,sha256=6srh3jUeSGoqyrSj1M6aSOaA9xT5kna0VGUC0kzH-q0,2690
|
81
81
|
airflow/providers/amazon/aws/hooks/ses.py,sha256=uOTjyhb87jNyf2B11zH1wg5Oomnsx0nM4aHteP-mCHs,4147
|
@@ -101,25 +101,25 @@ airflow/providers/amazon/aws/notifications/chime.py,sha256=QCEdvVO7oCIbf_rGHtQZe
|
|
101
101
|
airflow/providers/amazon/aws/notifications/sns.py,sha256=VCh3MpKl86RBBmI2zw0eyHG7Q8DxdL9ug8zaiC3YZyI,3101
|
102
102
|
airflow/providers/amazon/aws/notifications/sqs.py,sha256=ixxkxonDvSk_x3BRAlFAQe771tGlwFli2JzVx-2f8gg,3591
|
103
103
|
airflow/providers/amazon/aws/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
104
|
-
airflow/providers/amazon/aws/operators/appflow.py,sha256=
|
105
|
-
airflow/providers/amazon/aws/operators/athena.py,sha256=
|
104
|
+
airflow/providers/amazon/aws/operators/appflow.py,sha256=WnqziF7-TP8SVG3ORCK0_TKOxAbe3xupxG-0Dg_kQEs,20830
|
105
|
+
airflow/providers/amazon/aws/operators/athena.py,sha256=n18FbhkOGSHPHBXqcK5z9xEGA26bfKHr8tC4cu20JKY,14657
|
106
106
|
airflow/providers/amazon/aws/operators/base_aws.py,sha256=cdc5GZkl_YGDDtlV9CVsdbTH3j7bza6d3RrDm93seOo,3864
|
107
107
|
airflow/providers/amazon/aws/operators/batch.py,sha256=4H98PlZTx_pgINBoeifpBJw_dIEQb_KMSUVkvEP0y0w,21981
|
108
108
|
airflow/providers/amazon/aws/operators/bedrock.py,sha256=PJcsRoTht4w23XG8W1B4Rl6BEJiomQLnwTcuPfMM3pI,40140
|
109
109
|
airflow/providers/amazon/aws/operators/cloud_formation.py,sha256=-WMYq-oA8WpPN2i5aTgBenFj9-CjbeEcy9NuRCnSwpM,5066
|
110
|
-
airflow/providers/amazon/aws/operators/comprehend.py,sha256=
|
110
|
+
airflow/providers/amazon/aws/operators/comprehend.py,sha256=JL0UfGpAekOeRFx3IT32u3fWhMhCwTyziA_OWB6xgjk,15954
|
111
111
|
airflow/providers/amazon/aws/operators/datasync.py,sha256=7Kt9POmcqJXF_1ZVRLVnJV6prBwYcUcCkfFJMPtraPk,18636
|
112
|
-
airflow/providers/amazon/aws/operators/dms.py,sha256=
|
112
|
+
airflow/providers/amazon/aws/operators/dms.py,sha256=6RhUtbELAjp0LLkUWl73kdcH4MRmyTzwHi1NxOlkE0Q,12313
|
113
113
|
airflow/providers/amazon/aws/operators/ec2.py,sha256=aQj6cL3nZzu0tcn3dq6RBSPsByZe8fNtn6qcpQYtlNI,17051
|
114
|
-
airflow/providers/amazon/aws/operators/ecs.py,sha256=
|
115
|
-
airflow/providers/amazon/aws/operators/eks.py,sha256=
|
114
|
+
airflow/providers/amazon/aws/operators/ecs.py,sha256=Zem_ooknDf3LhjP7MBxXj0MI46DXsWV7MLvhTeUjNFY,32318
|
115
|
+
airflow/providers/amazon/aws/operators/eks.py,sha256=BAuEvi05YiqmT3XanZMWWFqKdUh45mpO7JQ4zk8h5cE,48925
|
116
116
|
airflow/providers/amazon/aws/operators/emr.py,sha256=s5M6QI90zwUobVUQzJ0UdNY8O_jKkwzu-iWA7zByBLM,71937
|
117
117
|
airflow/providers/amazon/aws/operators/eventbridge.py,sha256=e686XFhVi54DbaCk7oVc0fhvH6GIPU3p8jgyCie1yBU,10394
|
118
118
|
airflow/providers/amazon/aws/operators/glacier.py,sha256=zxwC6lLk6sWerjlogXq6HgNOJx4h0hkqpGpqn23hJWk,3654
|
119
|
-
airflow/providers/amazon/aws/operators/glue.py,sha256=
|
119
|
+
airflow/providers/amazon/aws/operators/glue.py,sha256=m8hdF6eTyzsK3onOqt6Td0dGshhgf_XU1f4EtMb42LU,28390
|
120
120
|
airflow/providers/amazon/aws/operators/glue_crawler.py,sha256=6646Ru_DrGjcv_hCy5EjPXcFY6pdB0bjj6ko8Wj3XDk,5253
|
121
121
|
airflow/providers/amazon/aws/operators/glue_databrew.py,sha256=IU9S4gjy8TtwfLxZWhslbHrl8Fpw72QDmL_In8MMecw,6092
|
122
|
-
airflow/providers/amazon/aws/operators/kinesis_analytics.py,sha256=
|
122
|
+
airflow/providers/amazon/aws/operators/kinesis_analytics.py,sha256=Phjx24ESi2QIszD1O6OTCV_R7Wkr04qjUsASuLzCOoM,15773
|
123
123
|
airflow/providers/amazon/aws/operators/lambda_function.py,sha256=96KtK5KUpMPW2i8Xay1UdKPMX211hS6FqweFnRNuTFQ,10619
|
124
124
|
airflow/providers/amazon/aws/operators/neptune.py,sha256=on5oNX5K4yHfW1POE0eeZujta71vkJdVL07vucGjX-4,14751
|
125
125
|
airflow/providers/amazon/aws/operators/quicksight.py,sha256=jc3Eof19UfLt5IqbQswRzaHaK8h0ACLY99i_1Prtq10,4089
|
@@ -127,7 +127,7 @@ airflow/providers/amazon/aws/operators/rds.py,sha256=U2YLPx5MZCdDrLIyy-9K93W5aUt
|
|
127
127
|
airflow/providers/amazon/aws/operators/redshift_cluster.py,sha256=rmBHCssxrYEJ8EnENY-AnzC004lbtHvxXHpy69sHtV0,36681
|
128
128
|
airflow/providers/amazon/aws/operators/redshift_data.py,sha256=36MVojiezDyGZ_4aQuY8xvs9doQlz_SWpJEp6Kwkw0U,10832
|
129
129
|
airflow/providers/amazon/aws/operators/s3.py,sha256=d_K2DDNXEXkoi-WZ02-bwCf244Ogiw1PBaHcbsX-8Sg,36272
|
130
|
-
airflow/providers/amazon/aws/operators/sagemaker.py,sha256
|
130
|
+
airflow/providers/amazon/aws/operators/sagemaker.py,sha256=nyGS6uLP3eUYPCwOXDhdlucSGvI2lrSV8PUJ1_1f_5w,82337
|
131
131
|
airflow/providers/amazon/aws/operators/sns.py,sha256=Rttd015UhLo4pCplGybxtLhflyu_26IFzYP7WTmQFk8,3730
|
132
132
|
airflow/providers/amazon/aws/operators/sqs.py,sha256=0KkhhIblMggNHLxAyrv5dbWcaXvdSWQA2AOQP2CzOlo,4327
|
133
133
|
airflow/providers/amazon/aws/operators/step_function.py,sha256=eXZAxZqG5VNPaFVEchyL4vKmOh54jc83ZjrIZDeld34,9515
|
@@ -176,7 +176,7 @@ airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py,sha256=xYJ94xNDs
|
|
176
176
|
airflow/providers/amazon/aws/transfers/local_to_s3.py,sha256=yp9m7aZuL6YgzYRsFcyZ1wcGTXZTMO0F0CuBfkH1eGo,4165
|
177
177
|
airflow/providers/amazon/aws/transfers/mongo_to_s3.py,sha256=OU7Cge_0WQd7xEb38V-0hjSHbjZRCQ7Ay4xntcG9R28,6020
|
178
178
|
airflow/providers/amazon/aws/transfers/redshift_to_s3.py,sha256=-XMmYoHmQiZqZ5rqdYp4ZZbUNUe-Vu1z8TrkH9pxNHA,13001
|
179
|
-
airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py,sha256=
|
179
|
+
airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py,sha256=BRYID2nCmjooVH4WVD7tcy5MnkGQPK4c9BwioeCsIsw,11658
|
180
180
|
airflow/providers/amazon/aws/transfers/s3_to_ftp.py,sha256=cxyNRW_FJQNNluuYr5fVluGLYnNRUvN75iHSSEHrVnY,2966
|
181
181
|
airflow/providers/amazon/aws/transfers/s3_to_redshift.py,sha256=GXMJV-_7Vy0CvPBOgSGWw_L2xf25XqRF9ztDFdMpWLc,11209
|
182
182
|
airflow/providers/amazon/aws/transfers/s3_to_sftp.py,sha256=bgHgKv7o8ueC_zkhzW5k2xZpFnMlBHMcDf0t4sQ7kHY,3488
|
@@ -186,7 +186,7 @@ airflow/providers/amazon/aws/transfers/sftp_to_s3.py,sha256=Z1D5y-Dhxgd4SxptlJpr
|
|
186
186
|
airflow/providers/amazon/aws/transfers/sql_to_s3.py,sha256=rEwuWF1BTZew32S-7ggCVuJQ00lyQ5EoR-Rc-OvpUhA,10591
|
187
187
|
airflow/providers/amazon/aws/triggers/README.md,sha256=6m48KR4SRC0mMwVBGZ2g5HtkvgBUJ9y-3pK6_vIPQ_E,10843
|
188
188
|
airflow/providers/amazon/aws/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
189
|
-
airflow/providers/amazon/aws/triggers/athena.py,sha256=
|
189
|
+
airflow/providers/amazon/aws/triggers/athena.py,sha256=TAn2snAGHPfRT-pG8r0EMKlTSgwKr4APsJMqAE8pORI,2666
|
190
190
|
airflow/providers/amazon/aws/triggers/base.py,sha256=QT-omauJw_ksUb6gS2erm_FVXSZdwHIpyznFXTkoMXo,6384
|
191
191
|
airflow/providers/amazon/aws/triggers/batch.py,sha256=GogZnPaSc1ms55_aNcSDtV4wIZL3kMCSCY7dqpGJH4o,4257
|
192
192
|
airflow/providers/amazon/aws/triggers/bedrock.py,sha256=IiKyl0UUax-ex4siLjZpQGDZWyAOOhvJ-9USyRi_r3c,7260
|
@@ -195,7 +195,7 @@ airflow/providers/amazon/aws/triggers/ec2.py,sha256=gMY3EP4TmL6SodLw12FNSLttlHd7
|
|
195
195
|
airflow/providers/amazon/aws/triggers/ecs.py,sha256=YXFXUpQ1ejvtMIwkiet4LTLdJSkG8nr_ZEUI5lpqRGA,9222
|
196
196
|
airflow/providers/amazon/aws/triggers/eks.py,sha256=9elEgIErRQRndk4sgPZ2F9bjcaWsUoUG18Qv758ly1U,16151
|
197
197
|
airflow/providers/amazon/aws/triggers/emr.py,sha256=og83L9BPUcqqVXHwfUTD9cA-276YDo3Fnc0e2svQfqE,16566
|
198
|
-
airflow/providers/amazon/aws/triggers/glue.py,sha256=
|
198
|
+
airflow/providers/amazon/aws/triggers/glue.py,sha256=hv_nLzBRPG13MetjEfU_-KuTphLE-xyF6yW4uQJQuBc,9480
|
199
199
|
airflow/providers/amazon/aws/triggers/glue_crawler.py,sha256=W6EYAizQQtVbH5SiZmof1GDKKHKAB3q3GRmQkCkiy1o,2372
|
200
200
|
airflow/providers/amazon/aws/triggers/glue_databrew.py,sha256=SWbsgUrEfPN2Efk3Jfu8mlCAlUJRCHp8PnnqI4YbUR8,2593
|
201
201
|
airflow/providers/amazon/aws/triggers/kinesis_analytics.py,sha256=FERA9pE2o4juRJZVlEauDcJcPkhlQ6K9Q6RHt2MZlcE,2937
|
@@ -249,7 +249,7 @@ airflow/providers/amazon/aws/waiters/rds.json,sha256=HNmNQm5J-VaFHzjWb1pE5P7-Ix-
|
|
249
249
|
airflow/providers/amazon/aws/waiters/redshift.json,sha256=jOBotCgbkko1b_CHcGEbhhRvusgt0YSzVuFiZrqVP30,1742
|
250
250
|
airflow/providers/amazon/aws/waiters/sagemaker.json,sha256=JPHuQtUFZ1B7EMLfVmCRevNZ9jgpB71LM0dva8ZEO9A,5254
|
251
251
|
airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=GsOH-emGerKGBAUFmI5lpMfNGH4c0ol_PSiea25DCEY,1033
|
252
|
-
apache_airflow_providers_amazon-9.1.
|
253
|
-
apache_airflow_providers_amazon-9.1.
|
254
|
-
apache_airflow_providers_amazon-9.1.
|
255
|
-
apache_airflow_providers_amazon-9.1.
|
252
|
+
apache_airflow_providers_amazon-9.1.0rc1.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
|
253
|
+
apache_airflow_providers_amazon-9.1.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
254
|
+
apache_airflow_providers_amazon-9.1.0rc1.dist-info/METADATA,sha256=qBjKdJd9WgtPT5L-V5k2D8IBorkkQxcQwRTKdnEXuEg,10687
|
255
|
+
apache_airflow_providers_amazon-9.1.0rc1.dist-info/RECORD,,
|