apache-airflow-providers-amazon 8.26.0rc1__py3-none-any.whl → 8.26.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +10 -0
- airflow/providers/amazon/aws/executors/batch/batch_executor.py +19 -16
- airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +22 -15
- airflow/providers/amazon/aws/operators/s3.py +11 -1
- airflow/providers/amazon/aws/operators/sagemaker.py +6 -9
- airflow/providers/amazon/aws/sensors/s3.py +11 -5
- {apache_airflow_providers_amazon-8.26.0rc1.dist-info → apache_airflow_providers_amazon-8.26.0rc2.dist-info}/METADATA +2 -2
- {apache_airflow_providers_amazon-8.26.0rc1.dist-info → apache_airflow_providers_amazon-8.26.0rc2.dist-info}/RECORD +10 -10
- {apache_airflow_providers_amazon-8.26.0rc1.dist-info → apache_airflow_providers_amazon-8.26.0rc2.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_amazon-8.26.0rc1.dist-info → apache_airflow_providers_amazon-8.26.0rc2.dist-info}/entry_points.txt +0 -0
@@ -81,6 +81,16 @@ class AwsAuthManager(BaseAuthManager):
|
|
81
81
|
"""
|
82
82
|
|
83
83
|
def __init__(self, appbuilder: AirflowAppBuilder) -> None:
|
84
|
+
from packaging.version import Version
|
85
|
+
|
86
|
+
from airflow.version import version
|
87
|
+
|
88
|
+
# TODO: remove this if block when min_airflow_version is set to higher than 2.9.0
|
89
|
+
if Version(version) < Version("2.9"):
|
90
|
+
raise AirflowOptionalProviderFeatureException(
|
91
|
+
"``AwsAuthManager`` is compatible with Airflow versions >= 2.9."
|
92
|
+
)
|
93
|
+
|
84
94
|
super().__init__(appbuilder)
|
85
95
|
self._check_avp_schema_version()
|
86
96
|
|
@@ -20,8 +20,9 @@
|
|
20
20
|
from __future__ import annotations
|
21
21
|
|
22
22
|
import contextlib
|
23
|
+
import logging
|
23
24
|
import time
|
24
|
-
from collections import
|
25
|
+
from collections import deque
|
25
26
|
from copy import deepcopy
|
26
27
|
from typing import TYPE_CHECKING, Any, Dict, List, Sequence
|
27
28
|
|
@@ -264,7 +265,6 @@ class AwsBatchExecutor(BaseExecutor):
|
|
264
265
|
in the next iteration of the sync() method, unless it has exceeded the maximum number of
|
265
266
|
attempts. If a job exceeds the maximum number of attempts, it is removed from the queue.
|
266
267
|
"""
|
267
|
-
failure_reasons = defaultdict(int)
|
268
268
|
for _ in range(len(self.pending_jobs)):
|
269
269
|
batch_job = self.pending_jobs.popleft()
|
270
270
|
key = batch_job.key
|
@@ -272,7 +272,7 @@ class AwsBatchExecutor(BaseExecutor):
|
|
272
272
|
queue = batch_job.queue
|
273
273
|
exec_config = batch_job.executor_config
|
274
274
|
attempt_number = batch_job.attempt_number
|
275
|
-
|
275
|
+
failure_reason: str | None = None
|
276
276
|
if timezone.utcnow() < batch_job.next_attempt_time:
|
277
277
|
self.pending_jobs.append(batch_job)
|
278
278
|
continue
|
@@ -286,18 +286,18 @@ class AwsBatchExecutor(BaseExecutor):
|
|
286
286
|
if error_code in INVALID_CREDENTIALS_EXCEPTIONS:
|
287
287
|
self.pending_jobs.append(batch_job)
|
288
288
|
raise
|
289
|
-
|
289
|
+
failure_reason = str(e)
|
290
290
|
except Exception as e:
|
291
|
-
|
292
|
-
|
293
|
-
if _failure_reason:
|
294
|
-
for reason in _failure_reason:
|
295
|
-
failure_reasons[reason] += 1
|
291
|
+
failure_reason = str(e)
|
296
292
|
|
293
|
+
if failure_reason:
|
297
294
|
if attempt_number >= int(self.__class__.MAX_SUBMIT_JOB_ATTEMPTS):
|
298
|
-
self.
|
299
|
-
|
295
|
+
self.send_message_to_task_logs(
|
296
|
+
logging.ERROR,
|
297
|
+
"This job has been unsuccessfully attempted too many times (%s). Dropping the task. Reason: %s",
|
300
298
|
attempt_number,
|
299
|
+
failure_reason,
|
300
|
+
ti=key,
|
301
301
|
)
|
302
302
|
self.fail(key=key)
|
303
303
|
else:
|
@@ -322,11 +322,6 @@ class AwsBatchExecutor(BaseExecutor):
|
|
322
322
|
# running_state is added in Airflow 2.10 and only needed to support task adoption
|
323
323
|
# (an optional executor feature).
|
324
324
|
self.running_state(key, job_id)
|
325
|
-
if failure_reasons:
|
326
|
-
self.log.error(
|
327
|
-
"Pending Batch jobs failed to launch for the following reasons: %s. Retrying later.",
|
328
|
-
dict(failure_reasons),
|
329
|
-
)
|
330
325
|
|
331
326
|
def _describe_jobs(self, job_ids) -> list[BatchJob]:
|
332
327
|
all_jobs = []
|
@@ -462,3 +457,11 @@ class AwsBatchExecutor(BaseExecutor):
|
|
462
457
|
|
463
458
|
not_adopted_tis = [ti for ti in tis if ti not in adopted_tis]
|
464
459
|
return not_adopted_tis
|
460
|
+
|
461
|
+
def send_message_to_task_logs(self, level: int, msg: str, *args, ti: TaskInstance | TaskInstanceKey):
|
462
|
+
# TODO: remove this method when min_airflow_version is set to higher than 2.10.0
|
463
|
+
try:
|
464
|
+
super().send_message_to_task_logs(level, msg, *args, ti=ti)
|
465
|
+
except AttributeError:
|
466
|
+
# ``send_message_to_task_logs`` is added in 2.10.0
|
467
|
+
self.log.error(msg, *args)
|
@@ -23,6 +23,7 @@ Each Airflow task gets delegated out to an Amazon ECS Task.
|
|
23
23
|
|
24
24
|
from __future__ import annotations
|
25
25
|
|
26
|
+
import logging
|
26
27
|
import time
|
27
28
|
from collections import defaultdict, deque
|
28
29
|
from copy import deepcopy
|
@@ -347,7 +348,7 @@ class AwsEcsExecutor(BaseExecutor):
|
|
347
348
|
queue = ecs_task.queue
|
348
349
|
exec_config = ecs_task.executor_config
|
349
350
|
attempt_number = ecs_task.attempt_number
|
350
|
-
|
351
|
+
failure_reasons = []
|
351
352
|
if timezone.utcnow() < ecs_task.next_attempt_time:
|
352
353
|
self.pending_tasks.append(ecs_task)
|
353
354
|
continue
|
@@ -361,23 +362,21 @@ class AwsEcsExecutor(BaseExecutor):
|
|
361
362
|
if error_code in INVALID_CREDENTIALS_EXCEPTIONS:
|
362
363
|
self.pending_tasks.append(ecs_task)
|
363
364
|
raise
|
364
|
-
|
365
|
+
failure_reasons.append(str(e))
|
365
366
|
except Exception as e:
|
366
367
|
# Failed to even get a response back from the Boto3 API or something else went
|
367
368
|
# wrong. For any possible failure we want to add the exception reasons to the
|
368
369
|
# failure list so that it is logged to the user and most importantly the task is
|
369
370
|
# added back to the pending list to be retried later.
|
370
|
-
|
371
|
+
failure_reasons.append(str(e))
|
371
372
|
else:
|
372
373
|
# We got a response back, check if there were failures. If so, add them to the
|
373
374
|
# failures list so that it is logged to the user and most importantly the task
|
374
375
|
# is added back to the pending list to be retried later.
|
375
376
|
if run_task_response["failures"]:
|
376
|
-
|
377
|
+
failure_reasons.extend([f["reason"] for f in run_task_response["failures"]])
|
377
378
|
|
378
|
-
if
|
379
|
-
for reason in _failure_reasons:
|
380
|
-
failure_reasons[reason] += 1
|
379
|
+
if failure_reasons:
|
381
380
|
# Make sure the number of attempts does not exceed MAX_RUN_TASK_ATTEMPTS
|
382
381
|
if int(attempt_number) < int(self.__class__.MAX_RUN_TASK_ATTEMPTS):
|
383
382
|
ecs_task.attempt_number += 1
|
@@ -386,14 +385,19 @@ class AwsEcsExecutor(BaseExecutor):
|
|
386
385
|
)
|
387
386
|
self.pending_tasks.append(ecs_task)
|
388
387
|
else:
|
389
|
-
self.
|
390
|
-
|
388
|
+
self.send_message_to_task_logs(
|
389
|
+
logging.ERROR,
|
390
|
+
"ECS task %s has failed a maximum of %s times. Marking as failed. Reasons: %s",
|
391
391
|
task_key,
|
392
392
|
attempt_number,
|
393
|
+
", ".join(failure_reasons),
|
394
|
+
ti=task_key,
|
393
395
|
)
|
394
396
|
self.fail(task_key)
|
395
397
|
elif not run_task_response["tasks"]:
|
396
|
-
self.
|
398
|
+
self.send_message_to_task_logs(
|
399
|
+
logging.ERROR, "ECS RunTask Response: %s", run_task_response, ti=task_key
|
400
|
+
)
|
397
401
|
raise EcsExecutorException(
|
398
402
|
"No failures and no ECS tasks provided in response. This should never happen."
|
399
403
|
)
|
@@ -407,11 +411,6 @@ class AwsEcsExecutor(BaseExecutor):
|
|
407
411
|
# executor feature).
|
408
412
|
# TODO: remove when min airflow version >= 2.9.2
|
409
413
|
pass
|
410
|
-
if failure_reasons:
|
411
|
-
self.log.error(
|
412
|
-
"Pending ECS tasks failed to launch for the following reasons: %s. Retrying later.",
|
413
|
-
dict(failure_reasons),
|
414
|
-
)
|
415
414
|
|
416
415
|
def _run_task(
|
417
416
|
self, task_id: TaskInstanceKey, cmd: CommandType, queue: str, exec_config: ExecutorConfigType
|
@@ -543,3 +542,11 @@ class AwsEcsExecutor(BaseExecutor):
|
|
543
542
|
|
544
543
|
not_adopted_tis = [ti for ti in tis if ti not in adopted_tis]
|
545
544
|
return not_adopted_tis
|
545
|
+
|
546
|
+
def send_message_to_task_logs(self, level: int, msg: str, *args, ti: TaskInstance | TaskInstanceKey):
|
547
|
+
# TODO: remove this method when min_airflow_version is set to higher than 2.10.0
|
548
|
+
try:
|
549
|
+
super().send_message_to_task_logs(level, msg, *args, ti=ti)
|
550
|
+
except AttributeError:
|
551
|
+
# ``send_message_to_task_logs`` is added in 2.10.0
|
552
|
+
self.log.error(msg, *args)
|
@@ -610,6 +610,7 @@ class S3FileTransformOperator(BaseOperator):
|
|
610
610
|
:param dest_s3_key: The key to be written from S3. (templated)
|
611
611
|
:param transform_script: location of the executable transformation script
|
612
612
|
:param select_expression: S3 Select expression
|
613
|
+
:param select_expr_serialization_config: A dictionary that contains input and output serialization configurations for S3 Select.
|
613
614
|
:param script_args: arguments for transformation script (templated)
|
614
615
|
:param source_aws_conn_id: source s3 connection
|
615
616
|
:param source_verify: Whether or not to verify SSL certificates for S3 connection.
|
@@ -641,6 +642,7 @@ class S3FileTransformOperator(BaseOperator):
|
|
641
642
|
dest_s3_key: str,
|
642
643
|
transform_script: str | None = None,
|
643
644
|
select_expression=None,
|
645
|
+
select_expr_serialization_config: dict[str, dict[str, dict]] | None = None,
|
644
646
|
script_args: Sequence[str] | None = None,
|
645
647
|
source_aws_conn_id: str | None = "aws_default",
|
646
648
|
source_verify: bool | str | None = None,
|
@@ -659,6 +661,7 @@ class S3FileTransformOperator(BaseOperator):
|
|
659
661
|
self.replace = replace
|
660
662
|
self.transform_script = transform_script
|
661
663
|
self.select_expression = select_expression
|
664
|
+
self.select_expr_serialization_config = select_expr_serialization_config or {}
|
662
665
|
self.script_args = script_args or []
|
663
666
|
self.output_encoding = sys.getdefaultencoding()
|
664
667
|
|
@@ -678,7 +681,14 @@ class S3FileTransformOperator(BaseOperator):
|
|
678
681
|
self.log.info("Dumping S3 file %s contents to local file %s", self.source_s3_key, f_source.name)
|
679
682
|
|
680
683
|
if self.select_expression is not None:
|
681
|
-
|
684
|
+
input_serialization = self.select_expr_serialization_config.get("input_serialization")
|
685
|
+
output_serialization = self.select_expr_serialization_config.get("output_serialization")
|
686
|
+
content = source_s3.select_key(
|
687
|
+
key=self.source_s3_key,
|
688
|
+
expression=self.select_expression,
|
689
|
+
input_serialization=input_serialization,
|
690
|
+
output_serialization=output_serialization,
|
691
|
+
)
|
682
692
|
f_source.write(content.encode("utf-8"))
|
683
693
|
else:
|
684
694
|
source_s3_key_object.download_fileobj(Fileobj=f_source)
|
@@ -361,7 +361,7 @@ class SageMakerProcessingOperator(SageMakerBaseOperator):
|
|
361
361
|
raise AirflowException(f"Error while running job: {event}")
|
362
362
|
|
363
363
|
self.log.info(event["message"])
|
364
|
-
self.serialized_job = serialize(self.hook.describe_processing_job(
|
364
|
+
self.serialized_job = serialize(self.hook.describe_processing_job(event["job_name"]))
|
365
365
|
self.log.info("%s completed successfully.", self.task_id)
|
366
366
|
return {"Processing": self.serialized_job}
|
367
367
|
|
@@ -612,12 +612,11 @@ class SageMakerEndpointOperator(SageMakerBaseOperator):
|
|
612
612
|
|
613
613
|
if event["status"] != "success":
|
614
614
|
raise AirflowException(f"Error while running job: {event}")
|
615
|
-
|
615
|
+
|
616
|
+
response = self.hook.describe_endpoint(event["job_name"])
|
616
617
|
return {
|
617
|
-
"EndpointConfig": serialize(
|
618
|
-
|
619
|
-
),
|
620
|
-
"Endpoint": serialize(self.hook.describe_endpoint(endpoint_info["EndpointName"])),
|
618
|
+
"EndpointConfig": serialize(self.hook.describe_endpoint_config(response["EndpointConfigName"])),
|
619
|
+
"Endpoint": serialize(self.hook.describe_endpoint(response["EndpointName"])),
|
621
620
|
}
|
622
621
|
|
623
622
|
|
@@ -997,9 +996,7 @@ class SageMakerTuningOperator(SageMakerBaseOperator):
|
|
997
996
|
|
998
997
|
if event["status"] != "success":
|
999
998
|
raise AirflowException(f"Error while running job: {event}")
|
1000
|
-
return {
|
1001
|
-
"Tuning": serialize(self.hook.describe_tuning_job(self.config["HyperParameterTuningJobName"]))
|
1002
|
-
}
|
999
|
+
return {"Tuning": serialize(self.hook.describe_tuning_job(event["job_name"]))}
|
1003
1000
|
|
1004
1001
|
|
1005
1002
|
class SageMakerModelOperator(SageMakerBaseOperator):
|
@@ -18,6 +18,7 @@
|
|
18
18
|
from __future__ import annotations
|
19
19
|
|
20
20
|
import fnmatch
|
21
|
+
import inspect
|
21
22
|
import os
|
22
23
|
import re
|
23
24
|
from datetime import datetime, timedelta
|
@@ -57,13 +58,13 @@ class S3KeySensor(BaseSensorOperator):
|
|
57
58
|
refers to this bucket
|
58
59
|
:param wildcard_match: whether the bucket_key should be interpreted as a
|
59
60
|
Unix wildcard pattern
|
60
|
-
:param check_fn: Function that receives the list of the S3 objects,
|
61
|
+
:param check_fn: Function that receives the list of the S3 objects with the context values,
|
61
62
|
and returns a boolean:
|
62
63
|
- ``True``: the criteria is met
|
63
64
|
- ``False``: the criteria isn't met
|
64
65
|
**Example**: Wait for any S3 object size more than 1 megabyte ::
|
65
66
|
|
66
|
-
def check_fn(files: List) -> bool:
|
67
|
+
def check_fn(files: List, **kwargs) -> bool:
|
67
68
|
return any(f.get('Size', 0) > 1048576 for f in files)
|
68
69
|
:param aws_conn_id: a reference to the s3 connection
|
69
70
|
:param verify: Whether to verify SSL certificates for S3 connection.
|
@@ -112,7 +113,7 @@ class S3KeySensor(BaseSensorOperator):
|
|
112
113
|
self.use_regex = use_regex
|
113
114
|
self.metadata_keys = metadata_keys if metadata_keys else ["Size"]
|
114
115
|
|
115
|
-
def _check_key(self, key):
|
116
|
+
def _check_key(self, key, context: Context):
|
116
117
|
bucket_name, key = S3Hook.get_s3_bucket_key(self.bucket_name, key, "bucket_name", "bucket_key")
|
117
118
|
self.log.info("Poking for key : s3://%s/%s", bucket_name, key)
|
118
119
|
|
@@ -167,15 +168,20 @@ class S3KeySensor(BaseSensorOperator):
|
|
167
168
|
files = [metadata]
|
168
169
|
|
169
170
|
if self.check_fn is not None:
|
171
|
+
# For backwards compatibility, check if the function takes a context argument
|
172
|
+
signature = inspect.signature(self.check_fn)
|
173
|
+
if any(param.kind == inspect.Parameter.VAR_KEYWORD for param in signature.parameters.values()):
|
174
|
+
return self.check_fn(files, **context)
|
175
|
+
# Otherwise, just pass the files
|
170
176
|
return self.check_fn(files)
|
171
177
|
|
172
178
|
return True
|
173
179
|
|
174
180
|
def poke(self, context: Context):
|
175
181
|
if isinstance(self.bucket_key, str):
|
176
|
-
return self._check_key(self.bucket_key)
|
182
|
+
return self._check_key(self.bucket_key, context=context)
|
177
183
|
else:
|
178
|
-
return all(self._check_key(key) for key in self.bucket_key)
|
184
|
+
return all(self._check_key(key, context=context) for key in self.bucket_key)
|
179
185
|
|
180
186
|
def execute(self, context: Context) -> None:
|
181
187
|
"""Airflow runs this method on the worker and defers using the trigger."""
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: apache-airflow-providers-amazon
|
3
|
-
Version: 8.26.
|
3
|
+
Version: 8.26.0rc2
|
4
4
|
Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
|
5
5
|
Keywords: airflow-provider,amazon,airflow,integration
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
@@ -121,7 +121,7 @@ Provides-Extra: ssh
|
|
121
121
|
|
122
122
|
Package ``apache-airflow-providers-amazon``
|
123
123
|
|
124
|
-
Release: ``8.26.0.
|
124
|
+
Release: ``8.26.0.rc2``
|
125
125
|
|
126
126
|
|
127
127
|
Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
|
@@ -4,7 +4,7 @@ airflow/providers/amazon/get_provider_info.py,sha256=aLXM68-PpHnOerofhxQjuVkr3eC
|
|
4
4
|
airflow/providers/amazon/aws/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
5
5
|
airflow/providers/amazon/aws/exceptions.py,sha256=uRGNMgXvgdzfphpOTiyj74lQhjzb70J-X8n6fsx5Jog,1864
|
6
6
|
airflow/providers/amazon/aws/auth_manager/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
7
|
-
airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=
|
7
|
+
airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=4h0SZ63YxsMKM5xk8mm5v05PbEXUIL79a0YCbJ8QP_Y,16483
|
8
8
|
airflow/providers/amazon/aws/auth_manager/constants.py,sha256=Jdluo42InhyNGkYHB_dRtoFMpKanJLJdH0hyR9-5AZg,1050
|
9
9
|
airflow/providers/amazon/aws/auth_manager/user.py,sha256=SoiiA3sVB1-G02qhQDSTst_25MjW4xbSE0vVDxwR-uw,1882
|
10
10
|
airflow/providers/amazon/aws/auth_manager/avp/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
@@ -22,13 +22,13 @@ airflow/providers/amazon/aws/auth_manager/views/auth.py,sha256=e5InDh2jYEBClkgn9
|
|
22
22
|
airflow/providers/amazon/aws/executors/Dockerfile,sha256=VZ-YOR59KSMoztJV_g7v5hUwetKR0Ii4wNNaKqDIfyQ,4275
|
23
23
|
airflow/providers/amazon/aws/executors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
24
24
|
airflow/providers/amazon/aws/executors/batch/__init__.py,sha256=TPSNZJ6E3zqN7mvdrMrarqwHeFYN9Efd2jD3hpN7tr0,970
|
25
|
-
airflow/providers/amazon/aws/executors/batch/batch_executor.py,sha256=
|
25
|
+
airflow/providers/amazon/aws/executors/batch/batch_executor.py,sha256=AxYTE8MFJ5jB2bCNOaGg6OHF6fnh3JKBAv09jeewa_Q,20827
|
26
26
|
airflow/providers/amazon/aws/executors/batch/batch_executor_config.py,sha256=7yYLKB1jRoBy0AeW5chcpz7i2UfvSQob9QLvMhYUWDQ,3223
|
27
27
|
airflow/providers/amazon/aws/executors/batch/boto_schema.py,sha256=Rqr_uk6Tx6hNVYsQRPNlLj0zC8TC_awWk2rv3tkUuYU,2445
|
28
28
|
airflow/providers/amazon/aws/executors/batch/utils.py,sha256=Jugs8lvvtWey_CcwMkHnRVe9G0Sn8wyVmbROVrjgk9A,5286
|
29
29
|
airflow/providers/amazon/aws/executors/ecs/__init__.py,sha256=J_B7TIPPQmn67Y7kzr4pgzcpFRr0wUp6gVsyfz5GKc4,962
|
30
30
|
airflow/providers/amazon/aws/executors/ecs/boto_schema.py,sha256=hxj76uoo4y9koshb5Ou2hyjvNKCtrSK5wXea3iVtPqs,3762
|
31
|
-
airflow/providers/amazon/aws/executors/ecs/ecs_executor.py,sha256=
|
31
|
+
airflow/providers/amazon/aws/executors/ecs/ecs_executor.py,sha256=bY_a9lGQicnZvS77PoF6CQ_XAq3lqsUtm0xw8aL8BuU,24577
|
32
32
|
airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py,sha256=iRP39ynsYFOisLN9NQsiLikTrBBN54bWaFQs60Snrsw,5436
|
33
33
|
airflow/providers/amazon/aws/executors/ecs/utils.py,sha256=RLsmPN5MpLpXQftkyoIb8i8HxAw2R3vQWK1zM_M5XDg,9477
|
34
34
|
airflow/providers/amazon/aws/executors/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
@@ -123,8 +123,8 @@ airflow/providers/amazon/aws/operators/quicksight.py,sha256=jc3Eof19UfLt5IqbQswR
|
|
123
123
|
airflow/providers/amazon/aws/operators/rds.py,sha256=XaZ-ablCJ9gkzxSsvRBN7egkvKMtecBCYPswC9pg_Pc,39448
|
124
124
|
airflow/providers/amazon/aws/operators/redshift_cluster.py,sha256=VdI57morYXIF8Y25HpdnL_aekDrzgYIcgbR3-fde0nY,35664
|
125
125
|
airflow/providers/amazon/aws/operators/redshift_data.py,sha256=wK-vTDcn0MqOuF9e-71JYIEkLKihah6oGU-p_8VT2HI,8612
|
126
|
-
airflow/providers/amazon/aws/operators/s3.py,sha256=
|
127
|
-
airflow/providers/amazon/aws/operators/sagemaker.py,sha256=
|
126
|
+
airflow/providers/amazon/aws/operators/s3.py,sha256=NgKrua9fRmYymkxRdK-TQ8d1nFjrbQZG_4VMBxOG-PM,35836
|
127
|
+
airflow/providers/amazon/aws/operators/sagemaker.py,sha256=XlvYACs2pHCG0BwflwAopPh-BVvBbMYcAdonO2ig6io,84000
|
128
128
|
airflow/providers/amazon/aws/operators/sns.py,sha256=Rttd015UhLo4pCplGybxtLhflyu_26IFzYP7WTmQFk8,3730
|
129
129
|
airflow/providers/amazon/aws/operators/sqs.py,sha256=0KkhhIblMggNHLxAyrv5dbWcaXvdSWQA2AOQP2CzOlo,4327
|
130
130
|
airflow/providers/amazon/aws/operators/step_function.py,sha256=wPCJ2Uvb22b9b8JqW3scOMx2FbCv_j4BmvSy1huBaAY,9141
|
@@ -153,7 +153,7 @@ airflow/providers/amazon/aws/sensors/opensearch_serverless.py,sha256=o2OSYxWLP12
|
|
153
153
|
airflow/providers/amazon/aws/sensors/quicksight.py,sha256=_jw5455fWYAttuLl63uDmzt9EYU1FjaRvXtG_S_1CUE,4625
|
154
154
|
airflow/providers/amazon/aws/sensors/rds.py,sha256=AB2dH7fLwAaQogj0NYRrOOftfeOk_INetsyVHr1_qfM,6476
|
155
155
|
airflow/providers/amazon/aws/sensors/redshift_cluster.py,sha256=4A2Fq-_mERIdNKMM8_kss1zC3C4CItRuqCdZCRRKcGo,4533
|
156
|
-
airflow/providers/amazon/aws/sensors/s3.py,sha256=
|
156
|
+
airflow/providers/amazon/aws/sensors/s3.py,sha256=CEzAumhalpNegZ8UCHDRqDMUF4dv19RxuuUqGXjQKY0,18201
|
157
157
|
airflow/providers/amazon/aws/sensors/sagemaker.py,sha256=Pd8S0hExbaqdqOKglQAi51EggJEMxHRn_sZu-QWqsts,12984
|
158
158
|
airflow/providers/amazon/aws/sensors/sqs.py,sha256=GFzHT5nFSyIMATqwqjhEmOWZfwdOcAe4T6yUFNUlvWk,11329
|
159
159
|
airflow/providers/amazon/aws/sensors/step_function.py,sha256=pqAtBJd3m003qvaJwr4BrKBHhYWGrJ67yaqczjcE1_w,4089
|
@@ -241,7 +241,7 @@ airflow/providers/amazon/aws/waiters/opensearchserverless.json,sha256=7UkPgv_tBm
|
|
241
241
|
airflow/providers/amazon/aws/waiters/redshift.json,sha256=jOBotCgbkko1b_CHcGEbhhRvusgt0YSzVuFiZrqVP30,1742
|
242
242
|
airflow/providers/amazon/aws/waiters/sagemaker.json,sha256=JPHuQtUFZ1B7EMLfVmCRevNZ9jgpB71LM0dva8ZEO9A,5254
|
243
243
|
airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=aBaAZaGv8ZZGdN-2gvYEbq3fL_WHI_7s6SSDL-nWS1A,1034
|
244
|
-
apache_airflow_providers_amazon-8.26.
|
245
|
-
apache_airflow_providers_amazon-8.26.
|
246
|
-
apache_airflow_providers_amazon-8.26.
|
247
|
-
apache_airflow_providers_amazon-8.26.
|
244
|
+
apache_airflow_providers_amazon-8.26.0rc2.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
|
245
|
+
apache_airflow_providers_amazon-8.26.0rc2.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
246
|
+
apache_airflow_providers_amazon-8.26.0rc2.dist-info/METADATA,sha256=myEFYpUk0pZsUkfRohEPE5vs3qjbjXFsreZYO9KV_ZA,10307
|
247
|
+
apache_airflow_providers_amazon-8.26.0rc2.dist-info/RECORD,,
|
File without changes
|