apache-airflow-providers-amazon 9.13.0__py3-none-any.whl → 9.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/amazon/__init__.py +1 -1
- airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +84 -12
- airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +27 -18
- airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py +10 -7
- airflow/providers/amazon/aws/hooks/glue.py +1 -1
- airflow/providers/amazon/aws/hooks/redshift_cluster.py +3 -4
- airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +13 -2
- airflow/providers/amazon/aws/log/s3_task_handler.py +13 -2
- airflow/providers/amazon/aws/operators/batch.py +8 -2
- airflow/providers/amazon/aws/operators/bedrock.py +33 -3
- airflow/providers/amazon/aws/operators/ecs.py +22 -3
- airflow/providers/amazon/aws/operators/redshift_cluster.py +3 -4
- airflow/providers/amazon/aws/transfers/s3_to_sftp.py +6 -1
- airflow/providers/amazon/aws/triggers/sqs.py +49 -3
- airflow/providers/amazon/aws/triggers/ssm.py +2 -2
- airflow/providers/amazon/aws/utils/task_log_fetcher.py +1 -1
- {apache_airflow_providers_amazon-9.13.0.dist-info → apache_airflow_providers_amazon-9.14.0.dist-info}/METADATA +31 -6
- {apache_airflow_providers_amazon-9.13.0.dist-info → apache_airflow_providers_amazon-9.14.0.dist-info}/RECORD +20 -20
- {apache_airflow_providers_amazon-9.13.0.dist-info → apache_airflow_providers_amazon-9.14.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_amazon-9.13.0.dist-info → apache_airflow_providers_amazon-9.14.0.dist-info}/entry_points.txt +0 -0
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "9.
|
|
32
|
+
__version__ = "9.14.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.10.0"
|
|
@@ -44,7 +44,10 @@ from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
|
|
|
44
44
|
if TYPE_CHECKING:
|
|
45
45
|
from airflow.api_fastapi.auth.managers.base_auth_manager import ResourceMethod
|
|
46
46
|
from airflow.api_fastapi.auth.managers.models.batch_apis import (
|
|
47
|
+
IsAuthorizedConnectionRequest,
|
|
47
48
|
IsAuthorizedDagRequest,
|
|
49
|
+
IsAuthorizedPoolRequest,
|
|
50
|
+
IsAuthorizedVariableRequest,
|
|
48
51
|
)
|
|
49
52
|
from airflow.api_fastapi.auth.managers.models.resource_details import (
|
|
50
53
|
AccessView,
|
|
@@ -244,6 +247,27 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
|
|
|
244
247
|
|
|
245
248
|
return [menu_item for menu_item in menu_items if _has_access_to_menu_item(requests[menu_item.value])]
|
|
246
249
|
|
|
250
|
+
def batch_is_authorized_connection(
|
|
251
|
+
self,
|
|
252
|
+
requests: Sequence[IsAuthorizedConnectionRequest],
|
|
253
|
+
*,
|
|
254
|
+
user: AwsAuthManagerUser,
|
|
255
|
+
) -> bool:
|
|
256
|
+
facade_requests: Sequence[IsAuthorizedRequest] = [
|
|
257
|
+
cast(
|
|
258
|
+
"IsAuthorizedRequest",
|
|
259
|
+
{
|
|
260
|
+
"method": request["method"],
|
|
261
|
+
"entity_type": AvpEntities.CONNECTION,
|
|
262
|
+
"entity_id": cast("ConnectionDetails", request["details"]).conn_id
|
|
263
|
+
if request.get("details")
|
|
264
|
+
else None,
|
|
265
|
+
},
|
|
266
|
+
)
|
|
267
|
+
for request in requests
|
|
268
|
+
]
|
|
269
|
+
return self.avp_facade.batch_is_authorized(requests=facade_requests, user=user)
|
|
270
|
+
|
|
247
271
|
def batch_is_authorized_dag(
|
|
248
272
|
self,
|
|
249
273
|
requests: Sequence[IsAuthorizedDagRequest],
|
|
@@ -251,18 +275,65 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
|
|
|
251
275
|
user: AwsAuthManagerUser,
|
|
252
276
|
) -> bool:
|
|
253
277
|
facade_requests: Sequence[IsAuthorizedRequest] = [
|
|
254
|
-
|
|
255
|
-
"
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
"
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
278
|
+
cast(
|
|
279
|
+
"IsAuthorizedRequest",
|
|
280
|
+
{
|
|
281
|
+
"method": request["method"],
|
|
282
|
+
"entity_type": AvpEntities.DAG,
|
|
283
|
+
"entity_id": cast("DagDetails", request["details"]).id
|
|
284
|
+
if request.get("details")
|
|
285
|
+
else None,
|
|
286
|
+
"context": {
|
|
287
|
+
"dag_entity": {
|
|
288
|
+
"string": cast("DagAccessEntity", request["access_entity"]).value,
|
|
289
|
+
},
|
|
290
|
+
}
|
|
291
|
+
if request.get("access_entity")
|
|
292
|
+
else None,
|
|
293
|
+
},
|
|
294
|
+
)
|
|
295
|
+
for request in requests
|
|
296
|
+
]
|
|
297
|
+
return self.avp_facade.batch_is_authorized(requests=facade_requests, user=user)
|
|
298
|
+
|
|
299
|
+
def batch_is_authorized_pool(
|
|
300
|
+
self,
|
|
301
|
+
requests: Sequence[IsAuthorizedPoolRequest],
|
|
302
|
+
*,
|
|
303
|
+
user: AwsAuthManagerUser,
|
|
304
|
+
) -> bool:
|
|
305
|
+
facade_requests: Sequence[IsAuthorizedRequest] = [
|
|
306
|
+
cast(
|
|
307
|
+
"IsAuthorizedRequest",
|
|
308
|
+
{
|
|
309
|
+
"method": request["method"],
|
|
310
|
+
"entity_type": AvpEntities.POOL,
|
|
311
|
+
"entity_id": cast("PoolDetails", request["details"]).name
|
|
312
|
+
if request.get("details")
|
|
313
|
+
else None,
|
|
314
|
+
},
|
|
315
|
+
)
|
|
316
|
+
for request in requests
|
|
317
|
+
]
|
|
318
|
+
return self.avp_facade.batch_is_authorized(requests=facade_requests, user=user)
|
|
319
|
+
|
|
320
|
+
def batch_is_authorized_variable(
|
|
321
|
+
self,
|
|
322
|
+
requests: Sequence[IsAuthorizedVariableRequest],
|
|
323
|
+
*,
|
|
324
|
+
user: AwsAuthManagerUser,
|
|
325
|
+
) -> bool:
|
|
326
|
+
facade_requests: Sequence[IsAuthorizedRequest] = [
|
|
327
|
+
cast(
|
|
328
|
+
"IsAuthorizedRequest",
|
|
329
|
+
{
|
|
330
|
+
"method": request["method"],
|
|
331
|
+
"entity_type": AvpEntities.VARIABLE,
|
|
332
|
+
"entity_id": cast("VariableDetails", request["details"]).key
|
|
333
|
+
if request.get("details")
|
|
334
|
+
else None,
|
|
335
|
+
},
|
|
336
|
+
)
|
|
266
337
|
for request in requests
|
|
267
338
|
]
|
|
268
339
|
return self.avp_facade.batch_is_authorized(requests=facade_requests, user=user)
|
|
@@ -273,6 +344,7 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
|
|
|
273
344
|
dag_ids: set[str],
|
|
274
345
|
user: AwsAuthManagerUser,
|
|
275
346
|
method: ResourceMethod = "GET",
|
|
347
|
+
team_name: str | None = None,
|
|
276
348
|
):
|
|
277
349
|
requests: dict[str, dict[ResourceMethod, IsAuthorizedRequest]] = defaultdict(dict)
|
|
278
350
|
requests_list: list[IsAuthorizedRequest] = []
|
|
@@ -32,7 +32,6 @@ from typing import TYPE_CHECKING
|
|
|
32
32
|
|
|
33
33
|
from botocore.exceptions import ClientError, NoCredentialsError
|
|
34
34
|
|
|
35
|
-
from airflow.configuration import conf
|
|
36
35
|
from airflow.exceptions import AirflowException
|
|
37
36
|
from airflow.executors.base_executor import BaseExecutor
|
|
38
37
|
from airflow.providers.amazon.aws.executors.ecs.boto_schema import BotoDescribeTasksSchema, BotoRunTaskSchema
|
|
@@ -98,13 +97,6 @@ class AwsEcsExecutor(BaseExecutor):
|
|
|
98
97
|
Airflow TaskInstance's executor_config.
|
|
99
98
|
"""
|
|
100
99
|
|
|
101
|
-
# Maximum number of retries to run an ECS task.
|
|
102
|
-
MAX_RUN_TASK_ATTEMPTS = conf.get(
|
|
103
|
-
CONFIG_GROUP_NAME,
|
|
104
|
-
AllEcsConfigKeys.MAX_RUN_TASK_ATTEMPTS,
|
|
105
|
-
fallback=CONFIG_DEFAULTS[AllEcsConfigKeys.MAX_RUN_TASK_ATTEMPTS],
|
|
106
|
-
)
|
|
107
|
-
|
|
108
100
|
# AWS limits the maximum number of ARNs in the describe_tasks function.
|
|
109
101
|
DESCRIBE_TASKS_BATCH_SIZE = 99
|
|
110
102
|
|
|
@@ -118,8 +110,18 @@ class AwsEcsExecutor(BaseExecutor):
|
|
|
118
110
|
self.active_workers: EcsTaskCollection = EcsTaskCollection()
|
|
119
111
|
self.pending_tasks: deque = deque()
|
|
120
112
|
|
|
121
|
-
self
|
|
122
|
-
|
|
113
|
+
# Check if self has the ExecutorConf set on the self.conf attribute, and if not, set it to the global
|
|
114
|
+
# configuration object. This allows the changes to be backwards compatible with older versions of
|
|
115
|
+
# Airflow.
|
|
116
|
+
# Can be removed when minimum supported provider version is equal to the version of core airflow
|
|
117
|
+
# which introduces multi-team configuration.
|
|
118
|
+
if not hasattr(self, "conf"):
|
|
119
|
+
from airflow.configuration import conf
|
|
120
|
+
|
|
121
|
+
self.conf = conf
|
|
122
|
+
|
|
123
|
+
self.cluster = self.conf.get(CONFIG_GROUP_NAME, AllEcsConfigKeys.CLUSTER)
|
|
124
|
+
self.container_name = self.conf.get(CONFIG_GROUP_NAME, AllEcsConfigKeys.CONTAINER_NAME)
|
|
123
125
|
self.attempts_since_last_successful_connection = 0
|
|
124
126
|
|
|
125
127
|
self.load_ecs_connection(check_connection=False)
|
|
@@ -127,6 +129,13 @@ class AwsEcsExecutor(BaseExecutor):
|
|
|
127
129
|
|
|
128
130
|
self.run_task_kwargs = self._load_run_kwargs()
|
|
129
131
|
|
|
132
|
+
# Maximum number of retries to run an ECS task.
|
|
133
|
+
self.max_run_task_attempts = self.conf.get(
|
|
134
|
+
CONFIG_GROUP_NAME,
|
|
135
|
+
AllEcsConfigKeys.MAX_RUN_TASK_ATTEMPTS,
|
|
136
|
+
fallback=CONFIG_DEFAULTS[AllEcsConfigKeys.MAX_RUN_TASK_ATTEMPTS],
|
|
137
|
+
)
|
|
138
|
+
|
|
130
139
|
def queue_workload(self, workload: workloads.All, session: Session | None) -> None:
|
|
131
140
|
from airflow.executors import workloads
|
|
132
141
|
|
|
@@ -154,7 +163,7 @@ class AwsEcsExecutor(BaseExecutor):
|
|
|
154
163
|
|
|
155
164
|
def start(self):
|
|
156
165
|
"""Call this when the Executor is run for the first time by the scheduler."""
|
|
157
|
-
check_health = conf.getboolean(
|
|
166
|
+
check_health = self.conf.getboolean(
|
|
158
167
|
CONFIG_GROUP_NAME, AllEcsConfigKeys.CHECK_HEALTH_ON_STARTUP, fallback=False
|
|
159
168
|
)
|
|
160
169
|
|
|
@@ -218,12 +227,12 @@ class AwsEcsExecutor(BaseExecutor):
|
|
|
218
227
|
|
|
219
228
|
def load_ecs_connection(self, check_connection: bool = True):
|
|
220
229
|
self.log.info("Loading Connection information")
|
|
221
|
-
aws_conn_id = conf.get(
|
|
230
|
+
aws_conn_id = self.conf.get(
|
|
222
231
|
CONFIG_GROUP_NAME,
|
|
223
232
|
AllEcsConfigKeys.AWS_CONN_ID,
|
|
224
233
|
fallback=CONFIG_DEFAULTS[AllEcsConfigKeys.AWS_CONN_ID],
|
|
225
234
|
)
|
|
226
|
-
region_name = conf.get(CONFIG_GROUP_NAME, AllEcsConfigKeys.REGION_NAME, fallback=None)
|
|
235
|
+
region_name = self.conf.get(CONFIG_GROUP_NAME, AllEcsConfigKeys.REGION_NAME, fallback=None)
|
|
227
236
|
self.ecs = EcsHook(aws_conn_id=aws_conn_id, region_name=region_name).conn
|
|
228
237
|
self.attempts_since_last_successful_connection += 1
|
|
229
238
|
self.last_connection_reload = timezone.utcnow()
|
|
@@ -340,13 +349,13 @@ class AwsEcsExecutor(BaseExecutor):
|
|
|
340
349
|
queue = task_info.queue
|
|
341
350
|
exec_info = task_info.config
|
|
342
351
|
failure_count = self.active_workers.failure_count_by_key(task_key)
|
|
343
|
-
if int(failure_count) < int(self.
|
|
352
|
+
if int(failure_count) < int(self.max_run_task_attempts):
|
|
344
353
|
self.log.warning(
|
|
345
354
|
"Airflow task %s failed due to %s. Failure %s out of %s occurred on %s. Rescheduling.",
|
|
346
355
|
task_key,
|
|
347
356
|
reason,
|
|
348
357
|
failure_count,
|
|
349
|
-
self.
|
|
358
|
+
self.max_run_task_attempts,
|
|
350
359
|
task_arn,
|
|
351
360
|
)
|
|
352
361
|
self.pending_tasks.append(
|
|
@@ -416,8 +425,8 @@ class AwsEcsExecutor(BaseExecutor):
|
|
|
416
425
|
failure_reasons.extend([f["reason"] for f in run_task_response["failures"]])
|
|
417
426
|
|
|
418
427
|
if failure_reasons:
|
|
419
|
-
# Make sure the number of attempts does not exceed
|
|
420
|
-
if int(attempt_number) < int(self.
|
|
428
|
+
# Make sure the number of attempts does not exceed max_run_task_attempts
|
|
429
|
+
if int(attempt_number) < int(self.max_run_task_attempts):
|
|
421
430
|
ecs_task.attempt_number += 1
|
|
422
431
|
ecs_task.next_attempt_time = timezone.utcnow() + calculate_next_attempt_delay(
|
|
423
432
|
attempt_number
|
|
@@ -545,7 +554,7 @@ class AwsEcsExecutor(BaseExecutor):
|
|
|
545
554
|
def _load_run_kwargs(self) -> dict:
|
|
546
555
|
from airflow.providers.amazon.aws.executors.ecs.ecs_executor_config import build_task_kwargs
|
|
547
556
|
|
|
548
|
-
ecs_executor_run_task_kwargs = build_task_kwargs()
|
|
557
|
+
ecs_executor_run_task_kwargs = build_task_kwargs(self.conf)
|
|
549
558
|
|
|
550
559
|
try:
|
|
551
560
|
self.get_container(ecs_executor_run_task_kwargs["overrides"]["containerOverrides"])["command"]
|
|
@@ -32,7 +32,6 @@ from __future__ import annotations
|
|
|
32
32
|
import json
|
|
33
33
|
from json import JSONDecodeError
|
|
34
34
|
|
|
35
|
-
from airflow.configuration import conf
|
|
36
35
|
from airflow.providers.amazon.aws.executors.ecs.utils import (
|
|
37
36
|
CONFIG_GROUP_NAME,
|
|
38
37
|
ECS_LAUNCH_TYPE_EC2,
|
|
@@ -46,23 +45,27 @@ from airflow.providers.amazon.aws.hooks.ecs import EcsHook
|
|
|
46
45
|
from airflow.utils.helpers import prune_dict
|
|
47
46
|
|
|
48
47
|
|
|
49
|
-
def _fetch_templated_kwargs() -> dict[str, str]:
|
|
50
|
-
run_task_kwargs_value = conf.get(
|
|
48
|
+
def _fetch_templated_kwargs(conf) -> dict[str, str]:
|
|
49
|
+
run_task_kwargs_value = conf.get(
|
|
50
|
+
CONFIG_GROUP_NAME,
|
|
51
|
+
AllEcsConfigKeys.RUN_TASK_KWARGS,
|
|
52
|
+
fallback=dict(),
|
|
53
|
+
)
|
|
51
54
|
return json.loads(str(run_task_kwargs_value))
|
|
52
55
|
|
|
53
56
|
|
|
54
|
-
def _fetch_config_values() -> dict[str, str]:
|
|
57
|
+
def _fetch_config_values(conf) -> dict[str, str]:
|
|
55
58
|
return prune_dict(
|
|
56
59
|
{key: conf.get(CONFIG_GROUP_NAME, key, fallback=None) for key in RunTaskKwargsConfigKeys()}
|
|
57
60
|
)
|
|
58
61
|
|
|
59
62
|
|
|
60
|
-
def build_task_kwargs() -> dict:
|
|
63
|
+
def build_task_kwargs(conf) -> dict:
|
|
61
64
|
all_config_keys = AllEcsConfigKeys()
|
|
62
65
|
# This will put some kwargs at the root of the dictionary that do NOT belong there. However,
|
|
63
66
|
# the code below expects them to be there and will rearrange them as necessary.
|
|
64
|
-
task_kwargs = _fetch_config_values()
|
|
65
|
-
task_kwargs.update(_fetch_templated_kwargs())
|
|
67
|
+
task_kwargs = _fetch_config_values(conf)
|
|
68
|
+
task_kwargs.update(_fetch_templated_kwargs(conf))
|
|
66
69
|
|
|
67
70
|
has_launch_type: bool = all_config_keys.LAUNCH_TYPE in task_kwargs
|
|
68
71
|
has_capacity_provider: bool = all_config_keys.CAPACITY_PROVIDER_STRATEGY in task_kwargs
|
|
@@ -109,7 +109,7 @@ class GlueJobHook(AwsBaseHook):
|
|
|
109
109
|
"retry": retry_if_exception(self._should_retry_on_error),
|
|
110
110
|
"wait": wait_exponential(multiplier=1, min=1, max=60),
|
|
111
111
|
"stop": stop_after_attempt(5),
|
|
112
|
-
"before_sleep": before_sleep_log(self.log, log_level=20),
|
|
112
|
+
"before_sleep": before_sleep_log(self.log, log_level=20), # type: ignore[arg-type]
|
|
113
113
|
"reraise": True,
|
|
114
114
|
}
|
|
115
115
|
|
|
@@ -57,10 +57,9 @@ class RedshiftHook(AwsBaseHook):
|
|
|
57
57
|
- :external+boto3:py:meth:`Redshift.Client.create_cluster`
|
|
58
58
|
|
|
59
59
|
:param cluster_identifier: A unique identifier for the cluster.
|
|
60
|
-
:param node_type: The node type to be provisioned for the cluster.
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
``ra3.4xlarge``, and ``ra3.16xlarge``.
|
|
60
|
+
:param node_type: The node type to be provisioned for the cluster. Refer
|
|
61
|
+
https://docs.aws.amazon.com/redshift/latest/mgmt/working-with-clusters.html#rs-node-type-info
|
|
62
|
+
for the list of available node types.
|
|
64
63
|
:param master_username: The username associated with the admin user account
|
|
65
64
|
for the cluster that is being created.
|
|
66
65
|
:param master_user_password: password associated with the admin user account
|
|
@@ -230,8 +230,19 @@ class CloudwatchTaskHandler(FileTaskHandler, LoggingMixin):
|
|
|
230
230
|
|
|
231
231
|
trigger_should_wrap = True
|
|
232
232
|
|
|
233
|
-
def __init__(
|
|
234
|
-
|
|
233
|
+
def __init__(
|
|
234
|
+
self,
|
|
235
|
+
base_log_folder: str,
|
|
236
|
+
log_group_arn: str,
|
|
237
|
+
max_bytes: int = 0,
|
|
238
|
+
backup_count: int = 0,
|
|
239
|
+
delay: bool = False,
|
|
240
|
+
**kwargs,
|
|
241
|
+
) -> None:
|
|
242
|
+
# support log file size handling of FileTaskHandler
|
|
243
|
+
super().__init__(
|
|
244
|
+
base_log_folder=base_log_folder, max_bytes=max_bytes, backup_count=backup_count, delay=delay
|
|
245
|
+
)
|
|
235
246
|
split_arn = log_group_arn.split(":")
|
|
236
247
|
|
|
237
248
|
self.handler = None
|
|
@@ -172,8 +172,19 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
|
|
|
172
172
|
It extends airflow FileTaskHandler and uploads to and reads from S3 remote storage.
|
|
173
173
|
"""
|
|
174
174
|
|
|
175
|
-
def __init__(
|
|
176
|
-
|
|
175
|
+
def __init__(
|
|
176
|
+
self,
|
|
177
|
+
base_log_folder: str,
|
|
178
|
+
s3_log_folder: str,
|
|
179
|
+
max_bytes: int = 0,
|
|
180
|
+
backup_count: int = 0,
|
|
181
|
+
delay: bool = False,
|
|
182
|
+
**kwargs,
|
|
183
|
+
) -> None:
|
|
184
|
+
# support log file size handling of FileTaskHandler
|
|
185
|
+
super().__init__(
|
|
186
|
+
base_log_folder=base_log_folder, max_bytes=max_bytes, backup_count=backup_count, delay=delay
|
|
187
|
+
)
|
|
177
188
|
self.handler: logging.FileHandler | None = None
|
|
178
189
|
self.remote_base = s3_log_folder
|
|
179
190
|
self.log_relative_path = ""
|
|
@@ -256,8 +256,14 @@ class BatchOperator(AwsBaseOperator[BatchClientHook]):
|
|
|
256
256
|
if validated_event["status"] != "success":
|
|
257
257
|
raise AirflowException(f"Error while running job: {validated_event}")
|
|
258
258
|
|
|
259
|
-
self.
|
|
260
|
-
|
|
259
|
+
self.job_id = validated_event["job_id"]
|
|
260
|
+
|
|
261
|
+
# Fetch logs if awslogs_enabled
|
|
262
|
+
if self.awslogs_enabled:
|
|
263
|
+
self.monitor_job(context) # fetch logs, no need to return
|
|
264
|
+
|
|
265
|
+
self.log.info("Job completed successfully for job_id: %s", self.job_id)
|
|
266
|
+
return self.job_id
|
|
261
267
|
|
|
262
268
|
def on_kill(self):
|
|
263
269
|
response = self.hook.client.terminate_job(jobId=self.job_id, reason="Task killed by the user")
|
|
@@ -642,6 +642,8 @@ class BedrockIngestDataOperator(AwsBaseOperator[BedrockAgentHook]):
|
|
|
642
642
|
self.waiter_delay = waiter_delay
|
|
643
643
|
self.waiter_max_attempts = waiter_max_attempts
|
|
644
644
|
self.deferrable = deferrable
|
|
645
|
+
self.indexing_error_max_attempts = 5
|
|
646
|
+
self.indexing_error_retry_delay = 5
|
|
645
647
|
|
|
646
648
|
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str:
|
|
647
649
|
validated_event = validate_execute_complete_event(event)
|
|
@@ -654,9 +656,37 @@ class BedrockIngestDataOperator(AwsBaseOperator[BedrockAgentHook]):
|
|
|
654
656
|
return validated_event["ingestion_job_id"]
|
|
655
657
|
|
|
656
658
|
def execute(self, context: Context) -> str:
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
659
|
+
def start_ingestion_job():
|
|
660
|
+
try:
|
|
661
|
+
ingestion_job_id = self.hook.conn.start_ingestion_job(
|
|
662
|
+
knowledgeBaseId=self.knowledge_base_id, dataSourceId=self.data_source_id
|
|
663
|
+
)["ingestionJob"]["ingestionJobId"]
|
|
664
|
+
|
|
665
|
+
return ingestion_job_id
|
|
666
|
+
except ClientError as error:
|
|
667
|
+
error_message = error.response["Error"]["Message"].lower()
|
|
668
|
+
is_known_retryable_message = (
|
|
669
|
+
"dependency error document status code: 404" in error_message
|
|
670
|
+
or "request failed: [http_exception] server returned 401" in error_message
|
|
671
|
+
)
|
|
672
|
+
if all(
|
|
673
|
+
[
|
|
674
|
+
error.response["Error"]["Code"] == "ValidationException",
|
|
675
|
+
is_known_retryable_message,
|
|
676
|
+
self.indexing_error_max_attempts > 0,
|
|
677
|
+
]
|
|
678
|
+
):
|
|
679
|
+
self.indexing_error_max_attempts -= 1
|
|
680
|
+
self.log.warning(
|
|
681
|
+
"Index is not ready for ingestion, retrying in %s seconds.",
|
|
682
|
+
self.indexing_error_retry_delay,
|
|
683
|
+
)
|
|
684
|
+
self.log.info("%s retries remaining.", self.indexing_error_max_attempts)
|
|
685
|
+
sleep(self.indexing_error_retry_delay)
|
|
686
|
+
return start_ingestion_job()
|
|
687
|
+
raise
|
|
688
|
+
|
|
689
|
+
ingestion_job_id = start_ingestion_job()
|
|
660
690
|
|
|
661
691
|
if self.deferrable:
|
|
662
692
|
self.log.info("Deferring for ingestion job.")
|
|
@@ -21,6 +21,7 @@ import re
|
|
|
21
21
|
from collections.abc import Sequence
|
|
22
22
|
from datetime import timedelta
|
|
23
23
|
from functools import cached_property
|
|
24
|
+
from time import sleep
|
|
24
25
|
from typing import TYPE_CHECKING, Any
|
|
25
26
|
|
|
26
27
|
from airflow.configuration import conf
|
|
@@ -629,10 +630,22 @@ class EcsRunTaskOperator(EcsBaseOperator):
|
|
|
629
630
|
self.log.info("ECS Task started: %s", response)
|
|
630
631
|
|
|
631
632
|
self.arn = response["tasks"][0]["taskArn"]
|
|
632
|
-
if not self.container_name:
|
|
633
|
-
self.container_name = response["tasks"][0]["containers"][0]["name"]
|
|
634
633
|
self.log.info("ECS task ID is: %s", self._get_ecs_task_id(self.arn))
|
|
635
634
|
|
|
635
|
+
if not self.container_name and (self.awslogs_group and self.awslogs_stream_prefix):
|
|
636
|
+
backoff_schedule = [10, 30]
|
|
637
|
+
for delay in backoff_schedule:
|
|
638
|
+
sleep(delay)
|
|
639
|
+
response = self.client.describe_tasks(cluster=self.cluster, tasks=[self.arn])
|
|
640
|
+
containers = response["tasks"][0].get("containers", [])
|
|
641
|
+
if containers:
|
|
642
|
+
self.container_name = containers[0]["name"]
|
|
643
|
+
if self.container_name:
|
|
644
|
+
break
|
|
645
|
+
|
|
646
|
+
if not self.container_name:
|
|
647
|
+
self.log.info("Could not find container name, required for the log stream after 2 tries")
|
|
648
|
+
|
|
636
649
|
def _try_reattach_task(self, started_by: str):
|
|
637
650
|
if not started_by:
|
|
638
651
|
raise AirflowException("`started_by` should not be empty or None")
|
|
@@ -666,7 +679,13 @@ class EcsRunTaskOperator(EcsBaseOperator):
|
|
|
666
679
|
return self.awslogs_group and self.awslogs_stream_prefix
|
|
667
680
|
|
|
668
681
|
def _get_logs_stream_name(self) -> str:
|
|
669
|
-
if
|
|
682
|
+
if not self.container_name and self.awslogs_stream_prefix and "/" not in self.awslogs_stream_prefix:
|
|
683
|
+
self.log.warning(
|
|
684
|
+
"Container name could not be inferred and awslogs_stream_prefix '%s' does not contain '/'. "
|
|
685
|
+
"This may cause issues when extracting logs from Cloudwatch.",
|
|
686
|
+
self.awslogs_stream_prefix,
|
|
687
|
+
)
|
|
688
|
+
elif (
|
|
670
689
|
self.awslogs_stream_prefix
|
|
671
690
|
and self.container_name
|
|
672
691
|
and not self.awslogs_stream_prefix.endswith(f"/{self.container_name}")
|
|
@@ -49,10 +49,9 @@ class RedshiftCreateClusterOperator(AwsBaseOperator[RedshiftHook]):
|
|
|
49
49
|
:ref:`howto/operator:RedshiftCreateClusterOperator`
|
|
50
50
|
|
|
51
51
|
:param cluster_identifier: A unique identifier for the cluster.
|
|
52
|
-
:param node_type: The node type to be provisioned for the cluster.
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
``ra3.4xlarge``, and ``ra3.16xlarge``.
|
|
52
|
+
:param node_type: The node type to be provisioned for the cluster. Refer
|
|
53
|
+
https://docs.aws.amazon.com/redshift/latest/mgmt/working-with-clusters.html#rs-node-type-info
|
|
54
|
+
for the list of available node types.
|
|
56
55
|
:param master_username: The username associated with the admin user account for
|
|
57
56
|
the cluster that is being created.
|
|
58
57
|
:param master_user_password: The password associated with the admin user account for
|
|
@@ -51,6 +51,9 @@ class S3ToSFTPOperator(BaseOperator):
|
|
|
51
51
|
where the file is downloaded.
|
|
52
52
|
:param s3_key: The targeted s3 key. This is the specified file path for
|
|
53
53
|
downloading the file from S3.
|
|
54
|
+
:param confirm: specify if the SFTP operation should be confirmed, defaults to True.
|
|
55
|
+
When True, a stat will be performed on the remote file after upload to verify
|
|
56
|
+
the file size matches and confirm successful transfer.
|
|
54
57
|
"""
|
|
55
58
|
|
|
56
59
|
template_fields: Sequence[str] = ("s3_key", "sftp_path", "s3_bucket")
|
|
@@ -63,6 +66,7 @@ class S3ToSFTPOperator(BaseOperator):
|
|
|
63
66
|
sftp_path: str,
|
|
64
67
|
sftp_conn_id: str = "ssh_default",
|
|
65
68
|
aws_conn_id: str | None = "aws_default",
|
|
69
|
+
confirm: bool = True,
|
|
66
70
|
**kwargs,
|
|
67
71
|
) -> None:
|
|
68
72
|
super().__init__(**kwargs)
|
|
@@ -71,6 +75,7 @@ class S3ToSFTPOperator(BaseOperator):
|
|
|
71
75
|
self.s3_bucket = s3_bucket
|
|
72
76
|
self.s3_key = s3_key
|
|
73
77
|
self.aws_conn_id = aws_conn_id
|
|
78
|
+
self.confirm = confirm
|
|
74
79
|
|
|
75
80
|
@staticmethod
|
|
76
81
|
def get_s3_key(s3_key: str) -> str:
|
|
@@ -88,4 +93,4 @@ class S3ToSFTPOperator(BaseOperator):
|
|
|
88
93
|
|
|
89
94
|
with NamedTemporaryFile("w") as f:
|
|
90
95
|
s3_client.download_file(self.s3_bucket, self.s3_key, f.name)
|
|
91
|
-
sftp_client.put(f.name, self.sftp_path)
|
|
96
|
+
sftp_client.put(f.name, self.sftp_path, confirm=self.confirm)
|
|
@@ -137,6 +137,12 @@ class SqsSensorTrigger(BaseEventTrigger):
|
|
|
137
137
|
:return: A list of messages retrieved from SQS
|
|
138
138
|
"""
|
|
139
139
|
self.log.info("SqsSensor checking for message on queue: %s", self.sqs_queue)
|
|
140
|
+
self.log.debug(
|
|
141
|
+
"Polling SQS queue '%s' for up to %d message(s) with %d seconds wait time",
|
|
142
|
+
self.sqs_queue,
|
|
143
|
+
self.max_messages,
|
|
144
|
+
self.wait_time_seconds,
|
|
145
|
+
)
|
|
140
146
|
|
|
141
147
|
receive_message_kwargs = {
|
|
142
148
|
"QueueUrl": self.sqs_queue,
|
|
@@ -145,14 +151,28 @@ class SqsSensorTrigger(BaseEventTrigger):
|
|
|
145
151
|
}
|
|
146
152
|
if self.visibility_timeout is not None:
|
|
147
153
|
receive_message_kwargs["VisibilityTimeout"] = self.visibility_timeout
|
|
154
|
+
self.log.debug("Using visibility timeout: %d seconds", self.visibility_timeout)
|
|
148
155
|
|
|
149
156
|
response = await client.receive_message(**receive_message_kwargs)
|
|
157
|
+
|
|
158
|
+
message_count = len(response.get("Messages", []))
|
|
159
|
+
if message_count > 0:
|
|
160
|
+
self.log.debug("Received %d message(s) from SQS API call", message_count)
|
|
161
|
+
else:
|
|
162
|
+
self.log.debug("No messages returned from SQS API call")
|
|
163
|
+
|
|
150
164
|
return response
|
|
151
165
|
|
|
152
166
|
async def poke(self, client: Any):
|
|
153
167
|
message_batch: list[Any] = []
|
|
154
|
-
|
|
155
|
-
|
|
168
|
+
self.log.debug(
|
|
169
|
+
"Starting poke operation with %d batch(es) for queue '%s'",
|
|
170
|
+
self.num_batches,
|
|
171
|
+
self.sqs_queue,
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
for batch_num in range(self.num_batches):
|
|
175
|
+
self.log.debug("Processing batch %d of %d", batch_num + 1, self.num_batches)
|
|
156
176
|
response = await self.poll_sqs(client=client)
|
|
157
177
|
messages = process_response(
|
|
158
178
|
response,
|
|
@@ -162,12 +182,14 @@ class SqsSensorTrigger(BaseEventTrigger):
|
|
|
162
182
|
)
|
|
163
183
|
|
|
164
184
|
if not messages:
|
|
185
|
+
self.log.debug("No messages found in batch %d", batch_num + 1)
|
|
165
186
|
continue
|
|
166
187
|
|
|
188
|
+
self.log.info("Found %d message(s) in batch %d", len(messages), batch_num + 1)
|
|
167
189
|
message_batch.extend(messages)
|
|
168
190
|
|
|
169
191
|
if self.delete_message_on_reception:
|
|
170
|
-
self.log.info("Deleting %d messages", len(messages))
|
|
192
|
+
self.log.info("Deleting %d messages from queue '%s'", len(messages), self.sqs_queue)
|
|
171
193
|
|
|
172
194
|
entries = [
|
|
173
195
|
{"Id": message["MessageId"], "ReceiptHandle": message["ReceiptHandle"]}
|
|
@@ -178,16 +200,40 @@ class SqsSensorTrigger(BaseEventTrigger):
|
|
|
178
200
|
if "Successful" not in response:
|
|
179
201
|
raise AirflowException(f"Delete SQS Messages failed {response} for messages {messages}")
|
|
180
202
|
|
|
203
|
+
self.log.debug("Successfully deleted %d messages", len(messages))
|
|
204
|
+
|
|
205
|
+
if message_batch:
|
|
206
|
+
self.log.info("Completed poke operation: collected %d total message(s)", len(message_batch))
|
|
207
|
+
else:
|
|
208
|
+
self.log.debug("Completed poke operation: no messages found across all batches")
|
|
209
|
+
|
|
181
210
|
return message_batch
|
|
182
211
|
|
|
183
212
|
async def run(self) -> AsyncIterator[TriggerEvent]:
|
|
213
|
+
self.log.info(
|
|
214
|
+
"Starting SQS sensor trigger for queue '%s' with waiter_delay=%d seconds",
|
|
215
|
+
self.sqs_queue,
|
|
216
|
+
self.waiter_delay,
|
|
217
|
+
)
|
|
218
|
+
|
|
184
219
|
while True:
|
|
185
220
|
# This loop will run indefinitely until the timeout, which is set in the self.defer
|
|
186
221
|
# method, is reached.
|
|
222
|
+
self.log.debug("Establishing connection to SQS and checking for messages")
|
|
187
223
|
async with await self.hook.get_async_conn() as client:
|
|
188
224
|
result = await self.poke(client=client)
|
|
189
225
|
if result:
|
|
226
|
+
self.log.info(
|
|
227
|
+
"Successfully received %d message(s) from SQS queue '%s'",
|
|
228
|
+
len(result),
|
|
229
|
+
self.sqs_queue,
|
|
230
|
+
)
|
|
190
231
|
yield TriggerEvent({"status": "success", "message_batch": result})
|
|
191
232
|
break
|
|
192
233
|
else:
|
|
234
|
+
self.log.info(
|
|
235
|
+
"No messages found in SQS queue '%s', sleeping for %d seconds before next check",
|
|
236
|
+
self.sqs_queue,
|
|
237
|
+
self.waiter_delay,
|
|
238
|
+
)
|
|
193
239
|
await asyncio.sleep(self.waiter_delay)
|
|
@@ -66,8 +66,8 @@ class SsmRunCommandTrigger(AwsBaseWaiterTrigger):
|
|
|
66
66
|
|
|
67
67
|
async def run(self) -> AsyncIterator[TriggerEvent]:
|
|
68
68
|
hook = self.hook()
|
|
69
|
-
async with hook.
|
|
70
|
-
response = client.list_command_invocations(CommandId=self.command_id)
|
|
69
|
+
async with await hook.get_async_conn() as client:
|
|
70
|
+
response = await client.list_command_invocations(CommandId=self.command_id)
|
|
71
71
|
instance_ids = [invocation["InstanceId"] for invocation in response.get("CommandInvocations", [])]
|
|
72
72
|
waiter = hook.get_waiter(self.waiter_name, deferrable=True, client=client)
|
|
73
73
|
|
|
@@ -28,7 +28,7 @@ from botocore.exceptions import ClientError, ConnectionClosedError
|
|
|
28
28
|
from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
|
|
29
29
|
|
|
30
30
|
if TYPE_CHECKING:
|
|
31
|
-
from
|
|
31
|
+
from airflow.sdk.types import Logger
|
|
32
32
|
|
|
33
33
|
|
|
34
34
|
class AwsTaskLogFetcher(Thread):
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-amazon
|
|
3
|
-
Version: 9.
|
|
3
|
+
Version: 9.14.0
|
|
4
4
|
Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,amazon,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -55,8 +55,8 @@ Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
|
|
|
55
55
|
Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
|
|
56
56
|
Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
|
|
57
57
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
58
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.
|
|
59
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.
|
|
58
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.14.0/changelog.html
|
|
59
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.14.0
|
|
60
60
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
61
61
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
62
62
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -104,7 +104,7 @@ Provides-Extra: standard
|
|
|
104
104
|
|
|
105
105
|
Package ``apache-airflow-providers-amazon``
|
|
106
106
|
|
|
107
|
-
Release: ``9.
|
|
107
|
+
Release: ``9.14.0``
|
|
108
108
|
|
|
109
109
|
|
|
110
110
|
Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
|
|
@@ -117,7 +117,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
|
|
|
117
117
|
are in ``airflow.providers.amazon`` python package.
|
|
118
118
|
|
|
119
119
|
You can find package information and changelog for the provider
|
|
120
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.
|
|
120
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.14.0/>`_.
|
|
121
121
|
|
|
122
122
|
Installation
|
|
123
123
|
------------
|
|
@@ -184,6 +184,31 @@ Dependent package
|
|
|
184
184
|
`apache-airflow-providers-ssh <https://airflow.apache.org/docs/apache-airflow-providers-ssh>`_ ``ssh``
|
|
185
185
|
======================================================================================================================== ====================
|
|
186
186
|
|
|
187
|
+
Optional dependencies
|
|
188
|
+
----------------------
|
|
189
|
+
|
|
190
|
+
==================== ========================================================================================================================================
|
|
191
|
+
Extra Dependencies
|
|
192
|
+
==================== ========================================================================================================================================
|
|
193
|
+
``aiobotocore`` ``aiobotocore[boto3]>=2.21.1``
|
|
194
|
+
``cncf.kubernetes`` ``apache-airflow-providers-cncf-kubernetes>=7.2.0``
|
|
195
|
+
``s3fs`` ``s3fs>=2023.10.0``
|
|
196
|
+
``python3-saml`` ``python3-saml>=1.16.0; python_version < '3.13'``, ``xmlsec>=1.3.14; python_version < '3.13'``, ``lxml>=6.0.0; python_version < '3.13'``
|
|
197
|
+
``apache.hive`` ``apache-airflow-providers-apache-hive``
|
|
198
|
+
``exasol`` ``apache-airflow-providers-exasol``
|
|
199
|
+
``fab`` ``apache-airflow-providers-fab>=2.2.0; python_version < '3.13'``
|
|
200
|
+
``ftp`` ``apache-airflow-providers-ftp``
|
|
201
|
+
``google`` ``apache-airflow-providers-google``
|
|
202
|
+
``imap`` ``apache-airflow-providers-imap``
|
|
203
|
+
``microsoft.azure`` ``apache-airflow-providers-microsoft-azure``
|
|
204
|
+
``mongo`` ``apache-airflow-providers-mongo``
|
|
205
|
+
``openlineage`` ``apache-airflow-providers-openlineage>=2.3.0``
|
|
206
|
+
``salesforce`` ``apache-airflow-providers-salesforce``
|
|
207
|
+
``ssh`` ``apache-airflow-providers-ssh``
|
|
208
|
+
``standard`` ``apache-airflow-providers-standard``
|
|
209
|
+
``common.messaging`` ``apache-airflow-providers-common-messaging>=2.0.0``
|
|
210
|
+
==================== ========================================================================================================================================
|
|
211
|
+
|
|
187
212
|
The changelog for the provider package can be found in the
|
|
188
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.
|
|
213
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.14.0/changelog.html>`_.
|
|
189
214
|
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
airflow/providers/amazon/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
2
|
-
airflow/providers/amazon/__init__.py,sha256=
|
|
2
|
+
airflow/providers/amazon/__init__.py,sha256=x3G0awLAU_Df9ZuYvQcp6RIXS2gisytWaiMDdtQQ-O4,1496
|
|
3
3
|
airflow/providers/amazon/get_provider_info.py,sha256=HqgOY-2XbaX7Nhb11ySGgUIrQJ_C8tBWRx9b6XO32zg,73282
|
|
4
4
|
airflow/providers/amazon/version_compat.py,sha256=8biVK8TSccWSZKPfRoA5w9N9R6YznPWPq8RALrVDWuY,2309
|
|
5
5
|
airflow/providers/amazon/aws/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
@@ -7,7 +7,7 @@ airflow/providers/amazon/aws/exceptions.py,sha256=uRGNMgXvgdzfphpOTiyj74lQhjzb70
|
|
|
7
7
|
airflow/providers/amazon/aws/assets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
8
8
|
airflow/providers/amazon/aws/assets/s3.py,sha256=wNaJiOM90-SCauD4EQneZVXMO54yDRjLPfI8D5o0-fw,1861
|
|
9
9
|
airflow/providers/amazon/aws/auth_manager/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
10
|
-
airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=
|
|
10
|
+
airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=adgOpZj2ClyoO0fqUPAaNI5V7oIictoOGef77wzhEEk,15698
|
|
11
11
|
airflow/providers/amazon/aws/auth_manager/constants.py,sha256=Jdluo42InhyNGkYHB_dRtoFMpKanJLJdH0hyR9-5AZg,1050
|
|
12
12
|
airflow/providers/amazon/aws/auth_manager/user.py,sha256=zds3U6gHmwAy1MuxFFPtGTYikMj-RjYVki9-TSdfnbg,2043
|
|
13
13
|
airflow/providers/amazon/aws/auth_manager/avp/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
@@ -38,8 +38,8 @@ airflow/providers/amazon/aws/executors/batch/boto_schema.py,sha256=Rqr_uk6Tx6hNV
|
|
|
38
38
|
airflow/providers/amazon/aws/executors/batch/utils.py,sha256=QXaKyrUMCYr_Oz7Hq5b7A-gppP61fQtaOX7wip1J7ho,5274
|
|
39
39
|
airflow/providers/amazon/aws/executors/ecs/__init__.py,sha256=J_B7TIPPQmn67Y7kzr4pgzcpFRr0wUp6gVsyfz5GKc4,962
|
|
40
40
|
airflow/providers/amazon/aws/executors/ecs/boto_schema.py,sha256=c_2BJu6pC9xjRuPfufqSMYPZVDAbma0JO71JKSBRMSg,3760
|
|
41
|
-
airflow/providers/amazon/aws/executors/ecs/ecs_executor.py,sha256=
|
|
42
|
-
airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py,sha256=
|
|
41
|
+
airflow/providers/amazon/aws/executors/ecs/ecs_executor.py,sha256=CU2ufAHpmXtn2DTP9as1Y6XMzUTdUD6NwOcDpCql5HM,27461
|
|
42
|
+
airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py,sha256=1T9qeHjj79cDviWeFC-vNeibHKUvrViL1tdk2Flg50o,5887
|
|
43
43
|
airflow/providers/amazon/aws/executors/ecs/utils.py,sha256=hp_C_XzfNqKXXBOSN8e8TBG4vAqEDkaUi0YqLGJzNbE,9759
|
|
44
44
|
airflow/providers/amazon/aws/executors/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
45
45
|
airflow/providers/amazon/aws/executors/utils/base_config_keys.py,sha256=q-xDVM8_iiygte8PK1khJjX7442sTNe72xJGwngtdV8,1169
|
|
@@ -69,7 +69,7 @@ airflow/providers/amazon/aws/hooks/elasticache_replication_group.py,sha256=x6kka
|
|
|
69
69
|
airflow/providers/amazon/aws/hooks/emr.py,sha256=B8vNVLOFCkCEXnongySjcFH2fqnK3sBN8d6LbJrAYNA,22240
|
|
70
70
|
airflow/providers/amazon/aws/hooks/eventbridge.py,sha256=dSaKbFB8ueOUJGl6YLIz70zXy0Xzr3yMflKS2wGFDSM,3364
|
|
71
71
|
airflow/providers/amazon/aws/hooks/glacier.py,sha256=Ia4xE8D1hGnUWNs8CqNhDAsqSJiqY4HliE2-knrMHrw,3495
|
|
72
|
-
airflow/providers/amazon/aws/hooks/glue.py,sha256=
|
|
72
|
+
airflow/providers/amazon/aws/hooks/glue.py,sha256=EZVoUBONTuVuX1ekt9wNVY5tldb0cw_z5-NUPzBvB9Q,26139
|
|
73
73
|
airflow/providers/amazon/aws/hooks/glue_catalog.py,sha256=XQu9v_b37TXO7F_V3u7WuLS7UuCigm4UEy2tuzF8ZiY,9213
|
|
74
74
|
airflow/providers/amazon/aws/hooks/glue_crawler.py,sha256=C9O2YG63BiNS6UvvB1Mn1aHWdRYzDBf2a5brimLU9IQ,7926
|
|
75
75
|
airflow/providers/amazon/aws/hooks/glue_databrew.py,sha256=96duZVYtLDQgfJ02XUdov-QWPoG2Wp0O0RFuwB-6nkU,2580
|
|
@@ -82,7 +82,7 @@ airflow/providers/amazon/aws/hooks/neptune.py,sha256=a3r26msR8U5oCTMHQYqA-2OspVO
|
|
|
82
82
|
airflow/providers/amazon/aws/hooks/opensearch_serverless.py,sha256=0zFRXXjlbQRCTt5D_q1FCp965FC8LyOhMRk2x6nvsIc,1543
|
|
83
83
|
airflow/providers/amazon/aws/hooks/quicksight.py,sha256=2Am_K-BcoqcfuWwLbWjW1LsbZpGskK2bV-uHT2diu1o,7347
|
|
84
84
|
airflow/providers/amazon/aws/hooks/rds.py,sha256=bAcaGeP7uNN0lp_FZtIPlt2JCZxcTEr8E7u5o8UbVYk,15238
|
|
85
|
-
airflow/providers/amazon/aws/hooks/redshift_cluster.py,sha256=
|
|
85
|
+
airflow/providers/amazon/aws/hooks/redshift_cluster.py,sha256=0whwYfl9U3VlDBvJ60v_FTCXMO7L9J4TgN7dIu2A5MM,7952
|
|
86
86
|
airflow/providers/amazon/aws/hooks/redshift_data.py,sha256=JxyXEyFeJHUtMxjjtMlCMJSW9P-cnixISd3R4Ob7fy8,11841
|
|
87
87
|
airflow/providers/amazon/aws/hooks/redshift_sql.py,sha256=gpI1q9KK-mkewigehTegIhWJKrAQnQu1WedDfapx6gU,10947
|
|
88
88
|
airflow/providers/amazon/aws/hooks/s3.py,sha256=sAuzqwpCkWzVl45Vu6juJsb3-T6mcsskaUlPUwGZxSE,67709
|
|
@@ -110,8 +110,8 @@ airflow/providers/amazon/aws/links/sagemaker.py,sha256=RTQubIIpmjTWEGrJiRI2MyF4C
|
|
|
110
110
|
airflow/providers/amazon/aws/links/sagemaker_unified_studio.py,sha256=pHbO14OmkqqjrjnZpt2tO3LISdBbitd9E00DV3ucfTI,1202
|
|
111
111
|
airflow/providers/amazon/aws/links/step_function.py,sha256=xSL4vfKLnCn-QboRtruajpH5elRrNfw0XkY7eSfPpE4,2099
|
|
112
112
|
airflow/providers/amazon/aws/log/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
113
|
-
airflow/providers/amazon/aws/log/cloudwatch_task_handler.py,sha256=
|
|
114
|
-
airflow/providers/amazon/aws/log/s3_task_handler.py,sha256=
|
|
113
|
+
airflow/providers/amazon/aws/log/cloudwatch_task_handler.py,sha256=9WSFn4HPT0752F92VQYynyp4tgIsAyYU3syzghB-4u0,11469
|
|
114
|
+
airflow/providers/amazon/aws/log/s3_task_handler.py,sha256=9sHuzRldjpyT7kGwDnS5IjlOb5qV9n9i8yur2EotsWI,9942
|
|
115
115
|
airflow/providers/amazon/aws/notifications/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
116
116
|
airflow/providers/amazon/aws/notifications/chime.py,sha256=OpFM5fNknzD8mVz_04vOwKd-Ow2ArWM3QdrUAgaSHqs,2101
|
|
117
117
|
airflow/providers/amazon/aws/notifications/sns.py,sha256=XracHC3r3BxzUuv-DzFLy6l7K6R_Ps85oJIUS0-Lkt4,3116
|
|
@@ -120,14 +120,14 @@ airflow/providers/amazon/aws/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39k
|
|
|
120
120
|
airflow/providers/amazon/aws/operators/appflow.py,sha256=TlQSJc1frCw7yKfobjCLf2faWQIT0nKRans5Wy-kJQQ,20824
|
|
121
121
|
airflow/providers/amazon/aws/operators/athena.py,sha256=CPKPZXN4dGELg0sW0LJIkG0X2-v5qPgWt4TMqYtTS18,14754
|
|
122
122
|
airflow/providers/amazon/aws/operators/base_aws.py,sha256=Y8hbT2uDA2CUfaEhHqm9qc_P81OS1ZnPh_34sHn0FrA,3898
|
|
123
|
-
airflow/providers/amazon/aws/operators/batch.py,sha256=
|
|
124
|
-
airflow/providers/amazon/aws/operators/bedrock.py,sha256=
|
|
123
|
+
airflow/providers/amazon/aws/operators/batch.py,sha256=Bkl2rEQAYVGKl7Kxz_czDYEQQhcqE1D0agiTDpQdM0w,22155
|
|
124
|
+
airflow/providers/amazon/aws/operators/bedrock.py,sha256=7Y7eTgCyE6dxAN45K6pjvRizTK1T7P2mFB2eam-_n9I,47547
|
|
125
125
|
airflow/providers/amazon/aws/operators/cloud_formation.py,sha256=bDzAHcs2QjrnwE3Z9w4s6JFeu5Xl74JWKR8Y5Ez03C4,5005
|
|
126
126
|
airflow/providers/amazon/aws/operators/comprehend.py,sha256=Vkdw0i2iW9_WRQLSDKNncNkVIQaNWG8jz-DxHy47Fmg,17607
|
|
127
127
|
airflow/providers/amazon/aws/operators/datasync.py,sha256=7DZtLjYxQqk7kV968CFHtWk5pbQPausbvE8DAkvPhqw,20265
|
|
128
128
|
airflow/providers/amazon/aws/operators/dms.py,sha256=XmIcXpkp_--PBQF1m7NFfeHDTp4aRV4lnXUu5h6kEa4,34658
|
|
129
129
|
airflow/providers/amazon/aws/operators/ec2.py,sha256=SclBzOLo3GbQe3kw4S3MKf8zLm8IaKNSiGTc_U-OxRo,19700
|
|
130
|
-
airflow/providers/amazon/aws/operators/ecs.py,sha256=
|
|
130
|
+
airflow/providers/amazon/aws/operators/ecs.py,sha256=5jvAibuqWS-x7-S9saRSb5umgkw8ec2YO_JGoMsja6o,33766
|
|
131
131
|
airflow/providers/amazon/aws/operators/eks.py,sha256=djaoaMhj3N5JzKeQVZSMlJb9XGkiGYUAQx-xVocoBC0,52086
|
|
132
132
|
airflow/providers/amazon/aws/operators/emr.py,sha256=Cw1qiA0eiPJODCSxHhPayo2_0TZOlA4mj8pcveV0WNc,75983
|
|
133
133
|
airflow/providers/amazon/aws/operators/eventbridge.py,sha256=NacTdvRzZZFizSzC3rb0Z7g8dHQWkKQEXGYzFKOp3fc,10421
|
|
@@ -141,7 +141,7 @@ airflow/providers/amazon/aws/operators/mwaa.py,sha256=lJuQ3kZ1Gh_udmeSE4j-HNnXcE
|
|
|
141
141
|
airflow/providers/amazon/aws/operators/neptune.py,sha256=vLuPro1yIoPY_8YlAD-PTKnzb-JQLY4E7653QYfMFLg,14774
|
|
142
142
|
airflow/providers/amazon/aws/operators/quicksight.py,sha256=LNZRW8N4yIXLUgVb0vmJekjG1NFS70yGyeKtinNATMk,4116
|
|
143
143
|
airflow/providers/amazon/aws/operators/rds.py,sha256=tiRxWVtx2trpeCEzgD7h7_xzsequg2jUA71okYCfSYQ,43848
|
|
144
|
-
airflow/providers/amazon/aws/operators/redshift_cluster.py,sha256
|
|
144
|
+
airflow/providers/amazon/aws/operators/redshift_cluster.py,sha256=0JDOk2VJ_jnp68ZqSl0OVF4SGO5eXoFGmIuY69o5cwE,39096
|
|
145
145
|
airflow/providers/amazon/aws/operators/redshift_data.py,sha256=motUwcXjxNoboswXx9ooE8fNbNnJ1y9OQyzekYGPNss,10854
|
|
146
146
|
airflow/providers/amazon/aws/operators/s3.py,sha256=Imd3siCtmtaPWRmmSd382dJHhr49WRd-_aP6Tx5T7ac,38389
|
|
147
147
|
airflow/providers/amazon/aws/operators/sagemaker.py,sha256=Aj4mgTCXtwWMP1Tms-3bmY10UjBWdWI7RG-LSkbpuwQ,91251
|
|
@@ -203,7 +203,7 @@ airflow/providers/amazon/aws/transfers/redshift_to_s3.py,sha256=jz3BeSf071NuJD7O
|
|
|
203
203
|
airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py,sha256=hjSEVKiUUp1Pl7tAzAHquTFXF-HRAlR1UeuDenFATxQ,11706
|
|
204
204
|
airflow/providers/amazon/aws/transfers/s3_to_ftp.py,sha256=V9a12bCtqXIpscOrO95dHw7umRRy_jJmB8zA25qVEM8,3018
|
|
205
205
|
airflow/providers/amazon/aws/transfers/s3_to_redshift.py,sha256=ZwzHcM48bjmvdU5AUVvy0iI-VRnY4Y-acyPSQ3vGMWI,12019
|
|
206
|
-
airflow/providers/amazon/aws/transfers/s3_to_sftp.py,sha256=
|
|
206
|
+
airflow/providers/amazon/aws/transfers/s3_to_sftp.py,sha256=3yC1yMQiq7SceHi661GLyz_8NEiOsv_GzUCBS8lwqAo,3861
|
|
207
207
|
airflow/providers/amazon/aws/transfers/s3_to_sql.py,sha256=RLuAwBHJOMcGaZcDgrivAhLRsOuZsjwTxJEOcLB_1MY,4971
|
|
208
208
|
airflow/providers/amazon/aws/transfers/salesforce_to_s3.py,sha256=noALwo6dpjEHF33ZDDZY0z47HK0Gsv-BU3Zr2NE3zRA,5738
|
|
209
209
|
airflow/providers/amazon/aws/transfers/sftp_to_s3.py,sha256=o5IDLFmeHzqBH6_Uh_fGTk9iymjQYsuGznnH-qZ1M-Y,4234
|
|
@@ -234,8 +234,8 @@ airflow/providers/amazon/aws/triggers/redshift_data.py,sha256=10IIWbta6Zpd2VOe1p
|
|
|
234
234
|
airflow/providers/amazon/aws/triggers/s3.py,sha256=s2_JGXWy7ge6mgypVsdgO80q03p0tddw5ndVhsX0rBI,10357
|
|
235
235
|
airflow/providers/amazon/aws/triggers/sagemaker.py,sha256=Dq7LpnaqZkugrySfljz2n6kcMdL0qhwf9qUhPxOOOzk,7946
|
|
236
236
|
airflow/providers/amazon/aws/triggers/sagemaker_unified_studio.py,sha256=1WGrng4rUprRDKSmbyeH-Eda2-8wf7o6VufT1_L7peI,2503
|
|
237
|
-
airflow/providers/amazon/aws/triggers/sqs.py,sha256=
|
|
238
|
-
airflow/providers/amazon/aws/triggers/ssm.py,sha256=
|
|
237
|
+
airflow/providers/amazon/aws/triggers/sqs.py,sha256=7rr4lBTIc9ifPvd_cirSzyf6LboafcqfDKqCMPBge2Y,10619
|
|
238
|
+
airflow/providers/amazon/aws/triggers/ssm.py,sha256=9FsI6tuw_cWZuXvZGkOucm8XJpx9mzlHZPjTQhy6s44,3492
|
|
239
239
|
airflow/providers/amazon/aws/triggers/step_function.py,sha256=M1HGdrnxL_T9KSCBNy2t531xMNJaFc-Y792T9cSmLGM,2685
|
|
240
240
|
airflow/providers/amazon/aws/utils/__init__.py,sha256=-Q5XK8ZV7EK6unj_4hlciqztACPuftMjNKMuBA21q84,3178
|
|
241
241
|
airflow/providers/amazon/aws/utils/connection_wrapper.py,sha256=KJsYG3qnESxxh2PFWvf83gHKzqEEAE9jBANTMoyRn3A,16435
|
|
@@ -251,7 +251,7 @@ airflow/providers/amazon/aws/utils/sagemaker_unified_studio.py,sha256=6ZiMtMzRx4
|
|
|
251
251
|
airflow/providers/amazon/aws/utils/sqs.py,sha256=HNVp0XgsN_L46NMbxlgN3dWvGAWk1Uv_Sl2lT915T_4,3511
|
|
252
252
|
airflow/providers/amazon/aws/utils/suppress.py,sha256=SxAZeDpRsaHpa6NBxDywDEIebazfTawYZJtQZPVE3Hw,2353
|
|
253
253
|
airflow/providers/amazon/aws/utils/tags.py,sha256=LDmVOEdPlyWW47bQByQ20UiA4baA34eT65pBd5goOQA,1746
|
|
254
|
-
airflow/providers/amazon/aws/utils/task_log_fetcher.py,sha256=
|
|
254
|
+
airflow/providers/amazon/aws/utils/task_log_fetcher.py,sha256=Z6-zIcxdkYS0kPSrxE2ETdQslrNLL8t4hC55nlsqLjw,5364
|
|
255
255
|
airflow/providers/amazon/aws/utils/waiter.py,sha256=Vn26dk_UglyhHZeLAQIDJiZmKhIxs--btSa1GRukaKk,4134
|
|
256
256
|
airflow/providers/amazon/aws/utils/waiter_with_logging.py,sha256=dbvF7SLDNu4K_xTE2oDNfT2Ar-Rocrw5e1c39nwcpf0,6782
|
|
257
257
|
airflow/providers/amazon/aws/waiters/README.md,sha256=ftfKyOH1Rqxa77DyLHkqRF1IltQty3uczLXWX7ekE0A,4535
|
|
@@ -280,7 +280,7 @@ airflow/providers/amazon/aws/waiters/rds.json,sha256=HNmNQm5J-VaFHzjWb1pE5P7-Ix-
|
|
|
280
280
|
airflow/providers/amazon/aws/waiters/redshift.json,sha256=jOBotCgbkko1b_CHcGEbhhRvusgt0YSzVuFiZrqVP30,1742
|
|
281
281
|
airflow/providers/amazon/aws/waiters/sagemaker.json,sha256=JPHuQtUFZ1B7EMLfVmCRevNZ9jgpB71LM0dva8ZEO9A,5254
|
|
282
282
|
airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=GsOH-emGerKGBAUFmI5lpMfNGH4c0ol_PSiea25DCEY,1033
|
|
283
|
-
apache_airflow_providers_amazon-9.
|
|
284
|
-
apache_airflow_providers_amazon-9.
|
|
285
|
-
apache_airflow_providers_amazon-9.
|
|
286
|
-
apache_airflow_providers_amazon-9.
|
|
283
|
+
apache_airflow_providers_amazon-9.14.0.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
|
|
284
|
+
apache_airflow_providers_amazon-9.14.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
285
|
+
apache_airflow_providers_amazon-9.14.0.dist-info/METADATA,sha256=arKaVQs6Lxzrokw-9nPCZ_QIHJX7TN0BXbbV77-2_XQ,11838
|
|
286
|
+
apache_airflow_providers_amazon-9.14.0.dist-info/RECORD,,
|
|
File without changes
|