apache-airflow-providers-amazon 8.26.0__py3-none-any.whl → 8.26.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +0 -10
- airflow/providers/amazon/aws/executors/batch/batch_executor.py +16 -19
- airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +15 -22
- airflow/providers/amazon/aws/operators/s3.py +1 -11
- airflow/providers/amazon/aws/operators/sagemaker.py +9 -6
- airflow/providers/amazon/aws/sensors/s3.py +5 -11
- {apache_airflow_providers_amazon-8.26.0.dist-info → apache_airflow_providers_amazon-8.26.0rc1.dist-info}/METADATA +5 -5
- {apache_airflow_providers_amazon-8.26.0.dist-info → apache_airflow_providers_amazon-8.26.0rc1.dist-info}/RECORD +10 -10
- {apache_airflow_providers_amazon-8.26.0.dist-info → apache_airflow_providers_amazon-8.26.0rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_amazon-8.26.0.dist-info → apache_airflow_providers_amazon-8.26.0rc1.dist-info}/entry_points.txt +0 -0
@@ -81,16 +81,6 @@ class AwsAuthManager(BaseAuthManager):
|
|
81
81
|
"""
|
82
82
|
|
83
83
|
def __init__(self, appbuilder: AirflowAppBuilder) -> None:
|
84
|
-
from packaging.version import Version
|
85
|
-
|
86
|
-
from airflow.version import version
|
87
|
-
|
88
|
-
# TODO: remove this if block when min_airflow_version is set to higher than 2.9.0
|
89
|
-
if Version(version) < Version("2.9"):
|
90
|
-
raise AirflowOptionalProviderFeatureException(
|
91
|
-
"``AwsAuthManager`` is compatible with Airflow versions >= 2.9."
|
92
|
-
)
|
93
|
-
|
94
84
|
super().__init__(appbuilder)
|
95
85
|
self._check_avp_schema_version()
|
96
86
|
|
@@ -20,9 +20,8 @@
|
|
20
20
|
from __future__ import annotations
|
21
21
|
|
22
22
|
import contextlib
|
23
|
-
import logging
|
24
23
|
import time
|
25
|
-
from collections import deque
|
24
|
+
from collections import defaultdict, deque
|
26
25
|
from copy import deepcopy
|
27
26
|
from typing import TYPE_CHECKING, Any, Dict, List, Sequence
|
28
27
|
|
@@ -265,6 +264,7 @@ class AwsBatchExecutor(BaseExecutor):
|
|
265
264
|
in the next iteration of the sync() method, unless it has exceeded the maximum number of
|
266
265
|
attempts. If a job exceeds the maximum number of attempts, it is removed from the queue.
|
267
266
|
"""
|
267
|
+
failure_reasons = defaultdict(int)
|
268
268
|
for _ in range(len(self.pending_jobs)):
|
269
269
|
batch_job = self.pending_jobs.popleft()
|
270
270
|
key = batch_job.key
|
@@ -272,7 +272,7 @@ class AwsBatchExecutor(BaseExecutor):
|
|
272
272
|
queue = batch_job.queue
|
273
273
|
exec_config = batch_job.executor_config
|
274
274
|
attempt_number = batch_job.attempt_number
|
275
|
-
|
275
|
+
_failure_reason = []
|
276
276
|
if timezone.utcnow() < batch_job.next_attempt_time:
|
277
277
|
self.pending_jobs.append(batch_job)
|
278
278
|
continue
|
@@ -286,18 +286,18 @@ class AwsBatchExecutor(BaseExecutor):
|
|
286
286
|
if error_code in INVALID_CREDENTIALS_EXCEPTIONS:
|
287
287
|
self.pending_jobs.append(batch_job)
|
288
288
|
raise
|
289
|
-
|
289
|
+
_failure_reason.append(str(e))
|
290
290
|
except Exception as e:
|
291
|
-
|
291
|
+
_failure_reason.append(str(e))
|
292
|
+
|
293
|
+
if _failure_reason:
|
294
|
+
for reason in _failure_reason:
|
295
|
+
failure_reasons[reason] += 1
|
292
296
|
|
293
|
-
if failure_reason:
|
294
297
|
if attempt_number >= int(self.__class__.MAX_SUBMIT_JOB_ATTEMPTS):
|
295
|
-
self.
|
296
|
-
|
297
|
-
"This job has been unsuccessfully attempted too many times (%s). Dropping the task. Reason: %s",
|
298
|
+
self.log.error(
|
299
|
+
"This job has been unsuccessfully attempted too many times (%s). Dropping the task.",
|
298
300
|
attempt_number,
|
299
|
-
failure_reason,
|
300
|
-
ti=key,
|
301
301
|
)
|
302
302
|
self.fail(key=key)
|
303
303
|
else:
|
@@ -322,6 +322,11 @@ class AwsBatchExecutor(BaseExecutor):
|
|
322
322
|
# running_state is added in Airflow 2.10 and only needed to support task adoption
|
323
323
|
# (an optional executor feature).
|
324
324
|
self.running_state(key, job_id)
|
325
|
+
if failure_reasons:
|
326
|
+
self.log.error(
|
327
|
+
"Pending Batch jobs failed to launch for the following reasons: %s. Retrying later.",
|
328
|
+
dict(failure_reasons),
|
329
|
+
)
|
325
330
|
|
326
331
|
def _describe_jobs(self, job_ids) -> list[BatchJob]:
|
327
332
|
all_jobs = []
|
@@ -457,11 +462,3 @@ class AwsBatchExecutor(BaseExecutor):
|
|
457
462
|
|
458
463
|
not_adopted_tis = [ti for ti in tis if ti not in adopted_tis]
|
459
464
|
return not_adopted_tis
|
460
|
-
|
461
|
-
def send_message_to_task_logs(self, level: int, msg: str, *args, ti: TaskInstance | TaskInstanceKey):
|
462
|
-
# TODO: remove this method when min_airflow_version is set to higher than 2.10.0
|
463
|
-
try:
|
464
|
-
super().send_message_to_task_logs(level, msg, *args, ti=ti)
|
465
|
-
except AttributeError:
|
466
|
-
# ``send_message_to_task_logs`` is added in 2.10.0
|
467
|
-
self.log.error(msg, *args)
|
@@ -23,7 +23,6 @@ Each Airflow task gets delegated out to an Amazon ECS Task.
|
|
23
23
|
|
24
24
|
from __future__ import annotations
|
25
25
|
|
26
|
-
import logging
|
27
26
|
import time
|
28
27
|
from collections import defaultdict, deque
|
29
28
|
from copy import deepcopy
|
@@ -348,7 +347,7 @@ class AwsEcsExecutor(BaseExecutor):
|
|
348
347
|
queue = ecs_task.queue
|
349
348
|
exec_config = ecs_task.executor_config
|
350
349
|
attempt_number = ecs_task.attempt_number
|
351
|
-
|
350
|
+
_failure_reasons = []
|
352
351
|
if timezone.utcnow() < ecs_task.next_attempt_time:
|
353
352
|
self.pending_tasks.append(ecs_task)
|
354
353
|
continue
|
@@ -362,21 +361,23 @@ class AwsEcsExecutor(BaseExecutor):
|
|
362
361
|
if error_code in INVALID_CREDENTIALS_EXCEPTIONS:
|
363
362
|
self.pending_tasks.append(ecs_task)
|
364
363
|
raise
|
365
|
-
|
364
|
+
_failure_reasons.append(str(e))
|
366
365
|
except Exception as e:
|
367
366
|
# Failed to even get a response back from the Boto3 API or something else went
|
368
367
|
# wrong. For any possible failure we want to add the exception reasons to the
|
369
368
|
# failure list so that it is logged to the user and most importantly the task is
|
370
369
|
# added back to the pending list to be retried later.
|
371
|
-
|
370
|
+
_failure_reasons.append(str(e))
|
372
371
|
else:
|
373
372
|
# We got a response back, check if there were failures. If so, add them to the
|
374
373
|
# failures list so that it is logged to the user and most importantly the task
|
375
374
|
# is added back to the pending list to be retried later.
|
376
375
|
if run_task_response["failures"]:
|
377
|
-
|
376
|
+
_failure_reasons.extend([f["reason"] for f in run_task_response["failures"]])
|
378
377
|
|
379
|
-
if
|
378
|
+
if _failure_reasons:
|
379
|
+
for reason in _failure_reasons:
|
380
|
+
failure_reasons[reason] += 1
|
380
381
|
# Make sure the number of attempts does not exceed MAX_RUN_TASK_ATTEMPTS
|
381
382
|
if int(attempt_number) < int(self.__class__.MAX_RUN_TASK_ATTEMPTS):
|
382
383
|
ecs_task.attempt_number += 1
|
@@ -385,19 +386,14 @@ class AwsEcsExecutor(BaseExecutor):
|
|
385
386
|
)
|
386
387
|
self.pending_tasks.append(ecs_task)
|
387
388
|
else:
|
388
|
-
self.
|
389
|
-
|
390
|
-
"ECS task %s has failed a maximum of %s times. Marking as failed. Reasons: %s",
|
389
|
+
self.log.error(
|
390
|
+
"ECS task %s has failed a maximum of %s times. Marking as failed",
|
391
391
|
task_key,
|
392
392
|
attempt_number,
|
393
|
-
", ".join(failure_reasons),
|
394
|
-
ti=task_key,
|
395
393
|
)
|
396
394
|
self.fail(task_key)
|
397
395
|
elif not run_task_response["tasks"]:
|
398
|
-
self.
|
399
|
-
logging.ERROR, "ECS RunTask Response: %s", run_task_response, ti=task_key
|
400
|
-
)
|
396
|
+
self.log.error("ECS RunTask Response: %s", run_task_response)
|
401
397
|
raise EcsExecutorException(
|
402
398
|
"No failures and no ECS tasks provided in response. This should never happen."
|
403
399
|
)
|
@@ -411,6 +407,11 @@ class AwsEcsExecutor(BaseExecutor):
|
|
411
407
|
# executor feature).
|
412
408
|
# TODO: remove when min airflow version >= 2.9.2
|
413
409
|
pass
|
410
|
+
if failure_reasons:
|
411
|
+
self.log.error(
|
412
|
+
"Pending ECS tasks failed to launch for the following reasons: %s. Retrying later.",
|
413
|
+
dict(failure_reasons),
|
414
|
+
)
|
414
415
|
|
415
416
|
def _run_task(
|
416
417
|
self, task_id: TaskInstanceKey, cmd: CommandType, queue: str, exec_config: ExecutorConfigType
|
@@ -542,11 +543,3 @@ class AwsEcsExecutor(BaseExecutor):
|
|
542
543
|
|
543
544
|
not_adopted_tis = [ti for ti in tis if ti not in adopted_tis]
|
544
545
|
return not_adopted_tis
|
545
|
-
|
546
|
-
def send_message_to_task_logs(self, level: int, msg: str, *args, ti: TaskInstance | TaskInstanceKey):
|
547
|
-
# TODO: remove this method when min_airflow_version is set to higher than 2.10.0
|
548
|
-
try:
|
549
|
-
super().send_message_to_task_logs(level, msg, *args, ti=ti)
|
550
|
-
except AttributeError:
|
551
|
-
# ``send_message_to_task_logs`` is added in 2.10.0
|
552
|
-
self.log.error(msg, *args)
|
@@ -610,7 +610,6 @@ class S3FileTransformOperator(BaseOperator):
|
|
610
610
|
:param dest_s3_key: The key to be written from S3. (templated)
|
611
611
|
:param transform_script: location of the executable transformation script
|
612
612
|
:param select_expression: S3 Select expression
|
613
|
-
:param select_expr_serialization_config: A dictionary that contains input and output serialization configurations for S3 Select.
|
614
613
|
:param script_args: arguments for transformation script (templated)
|
615
614
|
:param source_aws_conn_id: source s3 connection
|
616
615
|
:param source_verify: Whether or not to verify SSL certificates for S3 connection.
|
@@ -642,7 +641,6 @@ class S3FileTransformOperator(BaseOperator):
|
|
642
641
|
dest_s3_key: str,
|
643
642
|
transform_script: str | None = None,
|
644
643
|
select_expression=None,
|
645
|
-
select_expr_serialization_config: dict[str, dict[str, dict]] | None = None,
|
646
644
|
script_args: Sequence[str] | None = None,
|
647
645
|
source_aws_conn_id: str | None = "aws_default",
|
648
646
|
source_verify: bool | str | None = None,
|
@@ -661,7 +659,6 @@ class S3FileTransformOperator(BaseOperator):
|
|
661
659
|
self.replace = replace
|
662
660
|
self.transform_script = transform_script
|
663
661
|
self.select_expression = select_expression
|
664
|
-
self.select_expr_serialization_config = select_expr_serialization_config or {}
|
665
662
|
self.script_args = script_args or []
|
666
663
|
self.output_encoding = sys.getdefaultencoding()
|
667
664
|
|
@@ -681,14 +678,7 @@ class S3FileTransformOperator(BaseOperator):
|
|
681
678
|
self.log.info("Dumping S3 file %s contents to local file %s", self.source_s3_key, f_source.name)
|
682
679
|
|
683
680
|
if self.select_expression is not None:
|
684
|
-
|
685
|
-
output_serialization = self.select_expr_serialization_config.get("output_serialization")
|
686
|
-
content = source_s3.select_key(
|
687
|
-
key=self.source_s3_key,
|
688
|
-
expression=self.select_expression,
|
689
|
-
input_serialization=input_serialization,
|
690
|
-
output_serialization=output_serialization,
|
691
|
-
)
|
681
|
+
content = source_s3.select_key(key=self.source_s3_key, expression=self.select_expression)
|
692
682
|
f_source.write(content.encode("utf-8"))
|
693
683
|
else:
|
694
684
|
source_s3_key_object.download_fileobj(Fileobj=f_source)
|
@@ -361,7 +361,7 @@ class SageMakerProcessingOperator(SageMakerBaseOperator):
|
|
361
361
|
raise AirflowException(f"Error while running job: {event}")
|
362
362
|
|
363
363
|
self.log.info(event["message"])
|
364
|
-
self.serialized_job = serialize(self.hook.describe_processing_job(
|
364
|
+
self.serialized_job = serialize(self.hook.describe_processing_job(self.config["ProcessingJobName"]))
|
365
365
|
self.log.info("%s completed successfully.", self.task_id)
|
366
366
|
return {"Processing": self.serialized_job}
|
367
367
|
|
@@ -612,11 +612,12 @@ class SageMakerEndpointOperator(SageMakerBaseOperator):
|
|
612
612
|
|
613
613
|
if event["status"] != "success":
|
614
614
|
raise AirflowException(f"Error while running job: {event}")
|
615
|
-
|
616
|
-
response = self.hook.describe_endpoint(event["job_name"])
|
615
|
+
endpoint_info = self.config.get("Endpoint", self.config)
|
617
616
|
return {
|
618
|
-
"EndpointConfig": serialize(
|
619
|
-
|
617
|
+
"EndpointConfig": serialize(
|
618
|
+
self.hook.describe_endpoint_config(endpoint_info["EndpointConfigName"])
|
619
|
+
),
|
620
|
+
"Endpoint": serialize(self.hook.describe_endpoint(endpoint_info["EndpointName"])),
|
620
621
|
}
|
621
622
|
|
622
623
|
|
@@ -996,7 +997,9 @@ class SageMakerTuningOperator(SageMakerBaseOperator):
|
|
996
997
|
|
997
998
|
if event["status"] != "success":
|
998
999
|
raise AirflowException(f"Error while running job: {event}")
|
999
|
-
return {
|
1000
|
+
return {
|
1001
|
+
"Tuning": serialize(self.hook.describe_tuning_job(self.config["HyperParameterTuningJobName"]))
|
1002
|
+
}
|
1000
1003
|
|
1001
1004
|
|
1002
1005
|
class SageMakerModelOperator(SageMakerBaseOperator):
|
@@ -18,7 +18,6 @@
|
|
18
18
|
from __future__ import annotations
|
19
19
|
|
20
20
|
import fnmatch
|
21
|
-
import inspect
|
22
21
|
import os
|
23
22
|
import re
|
24
23
|
from datetime import datetime, timedelta
|
@@ -58,13 +57,13 @@ class S3KeySensor(BaseSensorOperator):
|
|
58
57
|
refers to this bucket
|
59
58
|
:param wildcard_match: whether the bucket_key should be interpreted as a
|
60
59
|
Unix wildcard pattern
|
61
|
-
:param check_fn: Function that receives the list of the S3 objects
|
60
|
+
:param check_fn: Function that receives the list of the S3 objects,
|
62
61
|
and returns a boolean:
|
63
62
|
- ``True``: the criteria is met
|
64
63
|
- ``False``: the criteria isn't met
|
65
64
|
**Example**: Wait for any S3 object size more than 1 megabyte ::
|
66
65
|
|
67
|
-
def check_fn(files: List
|
66
|
+
def check_fn(files: List) -> bool:
|
68
67
|
return any(f.get('Size', 0) > 1048576 for f in files)
|
69
68
|
:param aws_conn_id: a reference to the s3 connection
|
70
69
|
:param verify: Whether to verify SSL certificates for S3 connection.
|
@@ -113,7 +112,7 @@ class S3KeySensor(BaseSensorOperator):
|
|
113
112
|
self.use_regex = use_regex
|
114
113
|
self.metadata_keys = metadata_keys if metadata_keys else ["Size"]
|
115
114
|
|
116
|
-
def _check_key(self, key
|
115
|
+
def _check_key(self, key):
|
117
116
|
bucket_name, key = S3Hook.get_s3_bucket_key(self.bucket_name, key, "bucket_name", "bucket_key")
|
118
117
|
self.log.info("Poking for key : s3://%s/%s", bucket_name, key)
|
119
118
|
|
@@ -168,20 +167,15 @@ class S3KeySensor(BaseSensorOperator):
|
|
168
167
|
files = [metadata]
|
169
168
|
|
170
169
|
if self.check_fn is not None:
|
171
|
-
# For backwards compatibility, check if the function takes a context argument
|
172
|
-
signature = inspect.signature(self.check_fn)
|
173
|
-
if any(param.kind == inspect.Parameter.VAR_KEYWORD for param in signature.parameters.values()):
|
174
|
-
return self.check_fn(files, **context)
|
175
|
-
# Otherwise, just pass the files
|
176
170
|
return self.check_fn(files)
|
177
171
|
|
178
172
|
return True
|
179
173
|
|
180
174
|
def poke(self, context: Context):
|
181
175
|
if isinstance(self.bucket_key, str):
|
182
|
-
return self._check_key(self.bucket_key
|
176
|
+
return self._check_key(self.bucket_key)
|
183
177
|
else:
|
184
|
-
return all(self._check_key(key
|
178
|
+
return all(self._check_key(key) for key in self.bucket_key)
|
185
179
|
|
186
180
|
def execute(self, context: Context) -> None:
|
187
181
|
"""Airflow runs this method on the worker and defers using the trigger."""
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: apache-airflow-providers-amazon
|
3
|
-
Version: 8.26.
|
3
|
+
Version: 8.26.0rc1
|
4
4
|
Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
|
5
5
|
Keywords: airflow-provider,amazon,airflow,integration
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
@@ -22,9 +22,9 @@ Classifier: Programming Language :: Python :: 3.11
|
|
22
22
|
Classifier: Programming Language :: Python :: 3.12
|
23
23
|
Classifier: Topic :: System :: Monitoring
|
24
24
|
Requires-Dist: PyAthena>=3.0.10
|
25
|
-
Requires-Dist: apache-airflow-providers-common-sql>=1.3.
|
25
|
+
Requires-Dist: apache-airflow-providers-common-sql>=1.3.1rc0
|
26
26
|
Requires-Dist: apache-airflow-providers-http
|
27
|
-
Requires-Dist: apache-airflow>=2.7.
|
27
|
+
Requires-Dist: apache-airflow>=2.7.0rc0
|
28
28
|
Requires-Dist: asgiref>=2.3.0
|
29
29
|
Requires-Dist: boto3>=1.34.90
|
30
30
|
Requires-Dist: botocore>=1.34.90
|
@@ -36,7 +36,7 @@ Requires-Dist: sqlalchemy_redshift>=0.8.6
|
|
36
36
|
Requires-Dist: watchtower>=3.0.0,<4
|
37
37
|
Requires-Dist: aiobotocore[boto3]>=2.13.0 ; extra == "aiobotocore"
|
38
38
|
Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache.hive"
|
39
|
-
Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.
|
39
|
+
Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0rc0 ; extra == "cncf.kubernetes"
|
40
40
|
Requires-Dist: apache-airflow-providers-common-sql ; extra == "common.sql"
|
41
41
|
Requires-Dist: apache-airflow-providers-exasol ; extra == "exasol"
|
42
42
|
Requires-Dist: apache-airflow-providers-ftp ; extra == "ftp"
|
@@ -121,7 +121,7 @@ Provides-Extra: ssh
|
|
121
121
|
|
122
122
|
Package ``apache-airflow-providers-amazon``
|
123
123
|
|
124
|
-
Release: ``8.26.0``
|
124
|
+
Release: ``8.26.0.rc1``
|
125
125
|
|
126
126
|
|
127
127
|
Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
|
@@ -4,7 +4,7 @@ airflow/providers/amazon/get_provider_info.py,sha256=aLXM68-PpHnOerofhxQjuVkr3eC
|
|
4
4
|
airflow/providers/amazon/aws/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
5
5
|
airflow/providers/amazon/aws/exceptions.py,sha256=uRGNMgXvgdzfphpOTiyj74lQhjzb70J-X8n6fsx5Jog,1864
|
6
6
|
airflow/providers/amazon/aws/auth_manager/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
7
|
-
airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=
|
7
|
+
airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=dSvuhLw5npmNMIIhZp1_yGx2OrV0UrgBfdxsSWw64TM,16100
|
8
8
|
airflow/providers/amazon/aws/auth_manager/constants.py,sha256=Jdluo42InhyNGkYHB_dRtoFMpKanJLJdH0hyR9-5AZg,1050
|
9
9
|
airflow/providers/amazon/aws/auth_manager/user.py,sha256=SoiiA3sVB1-G02qhQDSTst_25MjW4xbSE0vVDxwR-uw,1882
|
10
10
|
airflow/providers/amazon/aws/auth_manager/avp/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
@@ -22,13 +22,13 @@ airflow/providers/amazon/aws/auth_manager/views/auth.py,sha256=e5InDh2jYEBClkgn9
|
|
22
22
|
airflow/providers/amazon/aws/executors/Dockerfile,sha256=VZ-YOR59KSMoztJV_g7v5hUwetKR0Ii4wNNaKqDIfyQ,4275
|
23
23
|
airflow/providers/amazon/aws/executors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
24
24
|
airflow/providers/amazon/aws/executors/batch/__init__.py,sha256=TPSNZJ6E3zqN7mvdrMrarqwHeFYN9Efd2jD3hpN7tr0,970
|
25
|
-
airflow/providers/amazon/aws/executors/batch/batch_executor.py,sha256=
|
25
|
+
airflow/providers/amazon/aws/executors/batch/batch_executor.py,sha256=DEn5vUmDElAJWpS8XzNIXUM4j4_KBJneZ_N-2m6oh_Q,20626
|
26
26
|
airflow/providers/amazon/aws/executors/batch/batch_executor_config.py,sha256=7yYLKB1jRoBy0AeW5chcpz7i2UfvSQob9QLvMhYUWDQ,3223
|
27
27
|
airflow/providers/amazon/aws/executors/batch/boto_schema.py,sha256=Rqr_uk6Tx6hNVYsQRPNlLj0zC8TC_awWk2rv3tkUuYU,2445
|
28
28
|
airflow/providers/amazon/aws/executors/batch/utils.py,sha256=Jugs8lvvtWey_CcwMkHnRVe9G0Sn8wyVmbROVrjgk9A,5286
|
29
29
|
airflow/providers/amazon/aws/executors/ecs/__init__.py,sha256=J_B7TIPPQmn67Y7kzr4pgzcpFRr0wUp6gVsyfz5GKc4,962
|
30
30
|
airflow/providers/amazon/aws/executors/ecs/boto_schema.py,sha256=hxj76uoo4y9koshb5Ou2hyjvNKCtrSK5wXea3iVtPqs,3762
|
31
|
-
airflow/providers/amazon/aws/executors/ecs/ecs_executor.py,sha256=
|
31
|
+
airflow/providers/amazon/aws/executors/ecs/ecs_executor.py,sha256=FHGxamtxqUJLMy1_Ze0YBQpLhNAWzK-K4svWnB1pfxA,24221
|
32
32
|
airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py,sha256=iRP39ynsYFOisLN9NQsiLikTrBBN54bWaFQs60Snrsw,5436
|
33
33
|
airflow/providers/amazon/aws/executors/ecs/utils.py,sha256=RLsmPN5MpLpXQftkyoIb8i8HxAw2R3vQWK1zM_M5XDg,9477
|
34
34
|
airflow/providers/amazon/aws/executors/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
@@ -123,8 +123,8 @@ airflow/providers/amazon/aws/operators/quicksight.py,sha256=jc3Eof19UfLt5IqbQswR
|
|
123
123
|
airflow/providers/amazon/aws/operators/rds.py,sha256=XaZ-ablCJ9gkzxSsvRBN7egkvKMtecBCYPswC9pg_Pc,39448
|
124
124
|
airflow/providers/amazon/aws/operators/redshift_cluster.py,sha256=VdI57morYXIF8Y25HpdnL_aekDrzgYIcgbR3-fde0nY,35664
|
125
125
|
airflow/providers/amazon/aws/operators/redshift_data.py,sha256=wK-vTDcn0MqOuF9e-71JYIEkLKihah6oGU-p_8VT2HI,8612
|
126
|
-
airflow/providers/amazon/aws/operators/s3.py,sha256=
|
127
|
-
airflow/providers/amazon/aws/operators/sagemaker.py,sha256=
|
126
|
+
airflow/providers/amazon/aws/operators/s3.py,sha256=6RKHwGCk1cVeqmCzQ6UHBjWJsGNJhEdk4Gfxr9a9lOg,35141
|
127
|
+
airflow/providers/amazon/aws/operators/sagemaker.py,sha256=Ycd4cLVxR-oudclAhoNMLVfAOCitAriKNu51GHhQCXM,84100
|
128
128
|
airflow/providers/amazon/aws/operators/sns.py,sha256=Rttd015UhLo4pCplGybxtLhflyu_26IFzYP7WTmQFk8,3730
|
129
129
|
airflow/providers/amazon/aws/operators/sqs.py,sha256=0KkhhIblMggNHLxAyrv5dbWcaXvdSWQA2AOQP2CzOlo,4327
|
130
130
|
airflow/providers/amazon/aws/operators/step_function.py,sha256=wPCJ2Uvb22b9b8JqW3scOMx2FbCv_j4BmvSy1huBaAY,9141
|
@@ -153,7 +153,7 @@ airflow/providers/amazon/aws/sensors/opensearch_serverless.py,sha256=o2OSYxWLP12
|
|
153
153
|
airflow/providers/amazon/aws/sensors/quicksight.py,sha256=_jw5455fWYAttuLl63uDmzt9EYU1FjaRvXtG_S_1CUE,4625
|
154
154
|
airflow/providers/amazon/aws/sensors/rds.py,sha256=AB2dH7fLwAaQogj0NYRrOOftfeOk_INetsyVHr1_qfM,6476
|
155
155
|
airflow/providers/amazon/aws/sensors/redshift_cluster.py,sha256=4A2Fq-_mERIdNKMM8_kss1zC3C4CItRuqCdZCRRKcGo,4533
|
156
|
-
airflow/providers/amazon/aws/sensors/s3.py,sha256=
|
156
|
+
airflow/providers/amazon/aws/sensors/s3.py,sha256=RTW9-HANKFVrEoHsmH2iaoNoO_TmexfTbzB1Nwv4Vto,17745
|
157
157
|
airflow/providers/amazon/aws/sensors/sagemaker.py,sha256=Pd8S0hExbaqdqOKglQAi51EggJEMxHRn_sZu-QWqsts,12984
|
158
158
|
airflow/providers/amazon/aws/sensors/sqs.py,sha256=GFzHT5nFSyIMATqwqjhEmOWZfwdOcAe4T6yUFNUlvWk,11329
|
159
159
|
airflow/providers/amazon/aws/sensors/step_function.py,sha256=pqAtBJd3m003qvaJwr4BrKBHhYWGrJ67yaqczjcE1_w,4089
|
@@ -241,7 +241,7 @@ airflow/providers/amazon/aws/waiters/opensearchserverless.json,sha256=7UkPgv_tBm
|
|
241
241
|
airflow/providers/amazon/aws/waiters/redshift.json,sha256=jOBotCgbkko1b_CHcGEbhhRvusgt0YSzVuFiZrqVP30,1742
|
242
242
|
airflow/providers/amazon/aws/waiters/sagemaker.json,sha256=JPHuQtUFZ1B7EMLfVmCRevNZ9jgpB71LM0dva8ZEO9A,5254
|
243
243
|
airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=aBaAZaGv8ZZGdN-2gvYEbq3fL_WHI_7s6SSDL-nWS1A,1034
|
244
|
-
apache_airflow_providers_amazon-8.26.
|
245
|
-
apache_airflow_providers_amazon-8.26.
|
246
|
-
apache_airflow_providers_amazon-8.26.
|
247
|
-
apache_airflow_providers_amazon-8.26.
|
244
|
+
apache_airflow_providers_amazon-8.26.0rc1.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
|
245
|
+
apache_airflow_providers_amazon-8.26.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
246
|
+
apache_airflow_providers_amazon-8.26.0rc1.dist-info/METADATA,sha256=S4JR4uK-23Vc4fO5_x54efSVwhN-qpaGNtJLCTh5oVg,10307
|
247
|
+
apache_airflow_providers_amazon-8.26.0rc1.dist-info/RECORD,,
|
File without changes
|