apache-airflow-providers-amazon 9.19.0rc1__py3-none-any.whl → 9.21.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. airflow/providers/amazon/__init__.py +1 -1
  2. airflow/providers/amazon/aws/auth_manager/avp/facade.py +1 -2
  3. airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +6 -25
  4. airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py +1 -2
  5. airflow/providers/amazon/aws/auth_manager/routes/login.py +3 -4
  6. airflow/providers/amazon/aws/auth_manager/user.py +1 -1
  7. airflow/providers/amazon/aws/cli/__init__.py +16 -0
  8. airflow/providers/amazon/aws/{auth_manager/cli → cli}/definition.py +33 -0
  9. airflow/providers/amazon/aws/executors/aws_lambda/lambda_executor.py +2 -9
  10. airflow/providers/amazon/aws/executors/aws_lambda/utils.py +1 -1
  11. airflow/providers/amazon/aws/executors/batch/batch_executor.py +28 -32
  12. airflow/providers/amazon/aws/executors/batch/batch_executor_config.py +5 -6
  13. airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +4 -17
  14. airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py +5 -1
  15. airflow/providers/amazon/aws/hooks/athena_sql.py +12 -2
  16. airflow/providers/amazon/aws/hooks/base_aws.py +1 -2
  17. airflow/providers/amazon/aws/hooks/redshift_sql.py +17 -3
  18. airflow/providers/amazon/aws/hooks/s3.py +16 -13
  19. airflow/providers/amazon/aws/hooks/sagemaker.py +1 -6
  20. airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py +4 -5
  21. airflow/providers/amazon/aws/hooks/ssm.py +22 -0
  22. airflow/providers/amazon/aws/links/base_aws.py +1 -1
  23. airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +1 -1
  24. airflow/providers/amazon/aws/log/s3_task_handler.py +1 -1
  25. airflow/providers/amazon/aws/operators/athena.py +1 -2
  26. airflow/providers/amazon/aws/operators/batch.py +1 -2
  27. airflow/providers/amazon/aws/operators/bedrock.py +1 -2
  28. airflow/providers/amazon/aws/operators/comprehend.py +1 -2
  29. airflow/providers/amazon/aws/operators/dms.py +1 -2
  30. airflow/providers/amazon/aws/operators/ec2.py +39 -24
  31. airflow/providers/amazon/aws/operators/ecs.py +1 -2
  32. airflow/providers/amazon/aws/operators/eks.py +1 -2
  33. airflow/providers/amazon/aws/operators/emr.py +34 -13
  34. airflow/providers/amazon/aws/operators/glue.py +1 -2
  35. airflow/providers/amazon/aws/operators/glue_crawler.py +1 -2
  36. airflow/providers/amazon/aws/operators/glue_databrew.py +1 -2
  37. airflow/providers/amazon/aws/operators/kinesis_analytics.py +1 -2
  38. airflow/providers/amazon/aws/operators/lambda_function.py +1 -2
  39. airflow/providers/amazon/aws/operators/mwaa.py +1 -2
  40. airflow/providers/amazon/aws/operators/neptune.py +1 -2
  41. airflow/providers/amazon/aws/operators/rds.py +1 -2
  42. airflow/providers/amazon/aws/operators/redshift_cluster.py +1 -2
  43. airflow/providers/amazon/aws/operators/redshift_data.py +1 -2
  44. airflow/providers/amazon/aws/operators/sagemaker.py +1 -2
  45. airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py +3 -5
  46. airflow/providers/amazon/aws/operators/ssm.py +54 -17
  47. airflow/providers/amazon/aws/operators/step_function.py +1 -2
  48. airflow/providers/amazon/aws/queues/sqs.py +1 -1
  49. airflow/providers/amazon/aws/secrets/secrets_manager.py +2 -1
  50. airflow/providers/amazon/aws/secrets/systems_manager.py +2 -1
  51. airflow/providers/amazon/aws/sensors/batch.py +1 -2
  52. airflow/providers/amazon/aws/sensors/bedrock.py +1 -2
  53. airflow/providers/amazon/aws/sensors/comprehend.py +1 -2
  54. airflow/providers/amazon/aws/sensors/ec2.py +1 -2
  55. airflow/providers/amazon/aws/sensors/emr.py +1 -2
  56. airflow/providers/amazon/aws/sensors/glue.py +1 -2
  57. airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +1 -2
  58. airflow/providers/amazon/aws/sensors/kinesis_analytics.py +1 -2
  59. airflow/providers/amazon/aws/sensors/mwaa.py +1 -2
  60. airflow/providers/amazon/aws/sensors/opensearch_serverless.py +1 -2
  61. airflow/providers/amazon/aws/sensors/redshift_cluster.py +1 -2
  62. airflow/providers/amazon/aws/sensors/s3.py +1 -2
  63. airflow/providers/amazon/aws/sensors/sqs.py +1 -2
  64. airflow/providers/amazon/aws/sensors/ssm.py +22 -2
  65. airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py +1 -1
  66. airflow/providers/amazon/aws/transfers/sql_to_s3.py +1 -1
  67. airflow/providers/amazon/aws/triggers/emr.py +2 -2
  68. airflow/providers/amazon/aws/triggers/ssm.py +68 -11
  69. airflow/providers/amazon/get_provider_info.py +2 -1
  70. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/METADATA +17 -14
  71. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/RECORD +75 -74
  72. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/licenses/NOTICE +1 -1
  73. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/WHEEL +0 -0
  74. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/entry_points.txt +0 -0
  75. {apache_airflow_providers_amazon-9.19.0rc1.dist-info → apache_airflow_providers_amazon-9.21.0.dist-info}/licenses/LICENSE +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "9.19.0"
32
+ __version__ = "9.21.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.11.0"
@@ -22,7 +22,6 @@ from functools import cached_property
22
22
  from pathlib import Path
23
23
  from typing import TYPE_CHECKING, TypedDict
24
24
 
25
- from airflow.configuration import conf
26
25
  from airflow.providers.amazon.aws.auth_manager.avp.entities import AvpEntities, get_action_id, get_entity_type
27
26
  from airflow.providers.amazon.aws.auth_manager.constants import (
28
27
  CONF_AVP_POLICY_STORE_ID_KEY,
@@ -31,7 +30,7 @@ from airflow.providers.amazon.aws.auth_manager.constants import (
31
30
  CONF_SECTION_NAME,
32
31
  )
33
32
  from airflow.providers.amazon.aws.hooks.verified_permissions import VerifiedPermissionsHook
34
- from airflow.providers.common.compat.sdk import AirflowException
33
+ from airflow.providers.common.compat.sdk import AirflowException, conf
35
34
  from airflow.utils.helpers import prune_dict
36
35
  from airflow.utils.log.logging_mixin import LoggingMixin
37
36
 
@@ -16,7 +16,6 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
- import argparse
20
19
  from collections import defaultdict
21
20
  from collections.abc import Sequence
22
21
  from functools import cached_property
@@ -27,19 +26,15 @@ from fastapi import FastAPI
27
26
 
28
27
  from airflow.api_fastapi.app import AUTH_MANAGER_FASTAPI_APP_PREFIX
29
28
  from airflow.api_fastapi.auth.managers.base_auth_manager import BaseAuthManager
30
- from airflow.cli.cli_config import CLICommand, DefaultHelpParser, GroupCommand
31
- from airflow.configuration import conf
32
- from airflow.exceptions import AirflowOptionalProviderFeatureException
29
+ from airflow.cli.cli_config import CLICommand
33
30
  from airflow.providers.amazon.aws.auth_manager.avp.entities import AvpEntities
34
31
  from airflow.providers.amazon.aws.auth_manager.avp.facade import (
35
32
  AwsAuthManagerAmazonVerifiedPermissionsFacade,
36
33
  IsAuthorizedRequest,
37
34
  )
38
- from airflow.providers.amazon.aws.auth_manager.cli.definition import (
39
- AWS_AUTH_MANAGER_COMMANDS,
40
- )
41
35
  from airflow.providers.amazon.aws.auth_manager.user import AwsAuthManagerUser
42
36
  from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
37
+ from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException, conf
43
38
 
44
39
  if TYPE_CHECKING:
45
40
  from airflow.api_fastapi.auth.managers.base_auth_manager import ResourceMethod
@@ -62,6 +57,7 @@ if TYPE_CHECKING:
62
57
  VariableDetails,
63
58
  )
64
59
  from airflow.api_fastapi.common.types import MenuItem
60
+ from airflow.cli.cli_config import CLICommand
65
61
 
66
62
 
67
63
  class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
@@ -468,13 +464,9 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
468
464
  @staticmethod
469
465
  def get_cli_commands() -> list[CLICommand]:
470
466
  """Vends CLI commands to be included in Airflow CLI."""
471
- return [
472
- GroupCommand(
473
- name="aws-auth-manager",
474
- help="Manage resources used by AWS auth manager",
475
- subcommands=AWS_AUTH_MANAGER_COMMANDS,
476
- ),
477
- ]
467
+ from airflow.providers.amazon.aws.cli.definition import get_aws_cli_commands
468
+
469
+ return get_aws_cli_commands()
478
470
 
479
471
  def get_fastapi_app(self) -> FastAPI | None:
480
472
  from airflow.providers.amazon.aws.auth_manager.routes.login import login_router
@@ -515,14 +507,3 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
515
507
  "Please update it to its latest version. "
516
508
  "See doc: https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/auth-manager/setup/amazon-verified-permissions.html#update-the-policy-store-schema."
517
509
  )
518
-
519
-
520
- def get_parser() -> argparse.ArgumentParser:
521
- """Generate documentation; used by Sphinx argparse."""
522
- from airflow.cli.cli_parser import AirflowHelpFormatter, _add_command
523
-
524
- parser = DefaultHelpParser(prog="airflow", formatter_class=AirflowHelpFormatter)
525
- subparsers = parser.add_subparsers(dest="subcommand", metavar="GROUP_OR_COMMAND")
526
- for group_command in AwsAuthManager.get_cli_commands():
527
- _add_command(subparsers, group_command)
528
- return parser
@@ -22,9 +22,8 @@ from typing import TYPE_CHECKING
22
22
 
23
23
  import boto3
24
24
 
25
- from airflow.configuration import conf
26
- from airflow.exceptions import AirflowOptionalProviderFeatureException
27
25
  from airflow.providers.amazon.aws.auth_manager.constants import CONF_REGION_NAME_KEY, CONF_SECTION_NAME
26
+ from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException, conf
28
27
  from airflow.utils import cli as cli_utils
29
28
 
30
29
  try:
@@ -21,9 +21,8 @@ import logging
21
21
  from typing import Any
22
22
 
23
23
  import anyio
24
- from fastapi import HTTPException, Request
25
- from starlette import status
26
- from starlette.responses import RedirectResponse
24
+ from fastapi import HTTPException, Request, status
25
+ from fastapi.responses import RedirectResponse
27
26
 
28
27
  from airflow.api_fastapi.app import (
29
28
  AUTH_MANAGER_FASTAPI_APP_PREFIX,
@@ -31,11 +30,11 @@ from airflow.api_fastapi.app import (
31
30
  )
32
31
  from airflow.api_fastapi.auth.managers.base_auth_manager import COOKIE_NAME_JWT_TOKEN
33
32
  from airflow.api_fastapi.common.router import AirflowRouter
34
- from airflow.configuration import conf
35
33
  from airflow.providers.amazon.aws.auth_manager.constants import CONF_SAML_METADATA_URL_KEY, CONF_SECTION_NAME
36
34
  from airflow.providers.amazon.aws.auth_manager.datamodels.login import LoginResponse
37
35
  from airflow.providers.amazon.aws.auth_manager.user import AwsAuthManagerUser
38
36
  from airflow.providers.amazon.version_compat import AIRFLOW_V_3_1_1_PLUS
37
+ from airflow.providers.common.compat.sdk import conf
39
38
 
40
39
  try:
41
40
  from onelogin.saml2.auth import OneLogin_Saml2_Auth
@@ -16,7 +16,7 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
- from airflow.exceptions import AirflowOptionalProviderFeatureException
19
+ from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException
20
20
 
21
21
  try:
22
22
  from airflow.api_fastapi.auth.managers.models.base_user import BaseUser
@@ -0,0 +1,16 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
@@ -17,12 +17,17 @@
17
17
 
18
18
  from __future__ import annotations
19
19
 
20
+ from typing import TYPE_CHECKING
21
+
20
22
  from airflow.cli.cli_config import (
21
23
  ActionCommand,
22
24
  Arg,
23
25
  lazy_load_command,
24
26
  )
25
27
 
28
+ if TYPE_CHECKING:
29
+ import argparse
30
+
26
31
  ############
27
32
  # # ARGS # #
28
33
  ############
@@ -58,3 +63,31 @@ AWS_AUTH_MANAGER_COMMANDS = (
58
63
  args=(ARG_POLICY_STORE_ID, ARG_DRY_RUN),
59
64
  ),
60
65
  )
66
+
67
+
68
+ def get_aws_cli_commands():
69
+ """Return CLI commands for AWS auth manager."""
70
+ from airflow.cli.cli_config import GroupCommand
71
+
72
+ return [
73
+ GroupCommand(
74
+ name="aws-auth-manager",
75
+ help="Manage resources used by AWS auth manager",
76
+ subcommands=AWS_AUTH_MANAGER_COMMANDS,
77
+ ),
78
+ ]
79
+
80
+
81
+ def get_parser() -> argparse.ArgumentParser:
82
+ """
83
+ Generate documentation; used by Sphinx argparse.
84
+
85
+ :meta private:
86
+ """
87
+ from airflow.cli.cli_parser import AirflowHelpFormatter, DefaultHelpParser, _add_command
88
+
89
+ parser = DefaultHelpParser(prog="airflow", formatter_class=AirflowHelpFormatter)
90
+ subparsers = parser.add_subparsers(dest="subcommand", metavar="GROUP_OR_COMMAND")
91
+ for group_command in get_aws_cli_commands():
92
+ _add_command(subparsers, group_command)
93
+ return parser
@@ -25,7 +25,6 @@ from typing import TYPE_CHECKING
25
25
  from boto3.session import NoCredentialsError
26
26
  from botocore.utils import ClientError
27
27
 
28
- from airflow.configuration import conf
29
28
  from airflow.executors.base_executor import BaseExecutor
30
29
  from airflow.models.taskinstancekey import TaskInstanceKey
31
30
  from airflow.providers.amazon.aws.executors.aws_lambda.utils import (
@@ -41,14 +40,8 @@ from airflow.providers.amazon.aws.executors.utils.exponential_backoff_retry impo
41
40
  )
42
41
  from airflow.providers.amazon.aws.hooks.lambda_function import LambdaHook
43
42
  from airflow.providers.amazon.aws.hooks.sqs import SqsHook
44
- from airflow.providers.common.compat.sdk import AirflowException, Stats
45
-
46
- try:
47
- from airflow.sdk import timezone
48
- except ImportError:
49
- from airflow.utils import timezone # type: ignore[attr-defined,no-redef]
50
-
51
43
  from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
44
+ from airflow.providers.common.compat.sdk import AirflowException, Stats, conf, timezone
52
45
 
53
46
  if TYPE_CHECKING:
54
47
  from sqlalchemy.orm import Session
@@ -499,7 +492,7 @@ class AwsLambdaExecutor(BaseExecutor):
499
492
  :param heartbeat_interval: The interval in seconds to wait between checks for task completion.
500
493
  """
501
494
  self.log.info("Received signal to end, waiting for outstanding tasks to finish.")
502
- time_to_wait = int(conf.get(CONFIG_GROUP_NAME, AllLambdaConfigKeys.END_WAIT_TIMEOUT))
495
+ time_to_wait = int(conf.get(CONFIG_GROUP_NAME, AllLambdaConfigKeys.END_WAIT_TIMEOUT, fallback="0"))
503
496
  start_time = timezone.utcnow()
504
497
  while True:
505
498
  if time_to_wait:
@@ -59,7 +59,7 @@ class AllLambdaConfigKeys(InvokeLambdaKwargsConfigKeys):
59
59
 
60
60
  AWS_CONN_ID = "conn_id"
61
61
  CHECK_HEALTH_ON_STARTUP = "check_health_on_startup"
62
- MAX_INVOKE_ATTEMPTS = "max_run_task_attempts"
62
+ MAX_INVOKE_ATTEMPTS = "max_invoke_attempts"
63
63
  REGION_NAME = "region_name"
64
64
  QUEUE_URL = "queue_url"
65
65
  DLQ_URL = "dead_letter_queue_url"
@@ -22,13 +22,11 @@ from __future__ import annotations
22
22
  import time
23
23
  from collections import deque
24
24
  from collections.abc import Sequence
25
- from contextlib import suppress
26
25
  from copy import deepcopy
27
26
  from typing import TYPE_CHECKING, Any
28
27
 
29
28
  from botocore.exceptions import ClientError, NoCredentialsError
30
29
 
31
- from airflow.configuration import conf
32
30
  from airflow.executors.base_executor import BaseExecutor
33
31
  from airflow.providers.amazon.aws.executors.utils.exponential_backoff_retry import (
34
32
  calculate_next_attempt_delay,
@@ -36,12 +34,7 @@ from airflow.providers.amazon.aws.executors.utils.exponential_backoff_retry impo
36
34
  )
37
35
  from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook
38
36
  from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
39
- from airflow.providers.common.compat.sdk import AirflowException, Stats
40
-
41
- try:
42
- from airflow.sdk import timezone
43
- except ImportError:
44
- from airflow.utils import timezone # type: ignore[attr-defined,no-redef]
37
+ from airflow.providers.common.compat.sdk import AirflowException, Stats, timezone
45
38
  from airflow.utils.helpers import merge_dicts
46
39
 
47
40
  if TYPE_CHECKING:
@@ -94,12 +87,7 @@ class AwsBatchExecutor(BaseExecutor):
94
87
  Airflow TaskInstance's executor_config.
95
88
  """
96
89
 
97
- # Maximum number of retries to submit a Batch Job.
98
- MAX_SUBMIT_JOB_ATTEMPTS = conf.get(
99
- CONFIG_GROUP_NAME,
100
- AllBatchConfigKeys.MAX_SUBMIT_JOB_ATTEMPTS,
101
- fallback=CONFIG_DEFAULTS[AllBatchConfigKeys.MAX_SUBMIT_JOB_ATTEMPTS],
102
- )
90
+ supports_multi_team: bool = True
103
91
 
104
92
  # AWS only allows a maximum number of JOBs in the describe_jobs function
105
93
  DESCRIBE_JOBS_BATCH_SIZE = 99
@@ -113,11 +101,29 @@ class AwsBatchExecutor(BaseExecutor):
113
101
  super().__init__(*args, **kwargs)
114
102
  self.active_workers = BatchJobCollection()
115
103
  self.pending_jobs: deque = deque()
104
+
105
+ # Check if self has the ExecutorConf set on the self.conf attribute, and if not, set it to the global
106
+ # configuration object. This allows the changes to be backwards compatible with older versions of
107
+ # Airflow.
108
+ # Can be removed when minimum supported provider version is equal to the version of core airflow
109
+ # which introduces multi-team configuration.
110
+ if not hasattr(self, "conf"):
111
+ from airflow.providers.common.compat.sdk import conf
112
+
113
+ self.conf = conf
114
+
116
115
  self.attempts_since_last_successful_connection = 0
117
116
  self.load_batch_connection(check_connection=False)
118
117
  self.IS_BOTO_CONNECTION_HEALTHY = False
119
118
  self.submit_job_kwargs = self._load_submit_kwargs()
120
119
 
120
+ # Maximum number of retries to submit a Batch job.
121
+ self.max_submit_job_attempts = self.conf.get(
122
+ CONFIG_GROUP_NAME,
123
+ AllBatchConfigKeys.MAX_SUBMIT_JOB_ATTEMPTS,
124
+ fallback=CONFIG_DEFAULTS[AllBatchConfigKeys.MAX_SUBMIT_JOB_ATTEMPTS],
125
+ )
126
+
121
127
  def queue_workload(self, workload: workloads.All, session: Session | None) -> None:
122
128
  from airflow.executors import workloads
123
129
 
@@ -171,7 +177,7 @@ class AwsBatchExecutor(BaseExecutor):
171
177
 
172
178
  def start(self):
173
179
  """Call this when the Executor is run for the first time by the scheduler."""
174
- check_health = conf.getboolean(
180
+ check_health = self.conf.getboolean(
175
181
  CONFIG_GROUP_NAME, AllBatchConfigKeys.CHECK_HEALTH_ON_STARTUP, fallback=False
176
182
  )
177
183
 
@@ -187,12 +193,12 @@ class AwsBatchExecutor(BaseExecutor):
187
193
 
188
194
  def load_batch_connection(self, check_connection: bool = True):
189
195
  self.log.info("Loading Connection information")
190
- aws_conn_id = conf.get(
196
+ aws_conn_id = self.conf.get(
191
197
  CONFIG_GROUP_NAME,
192
198
  AllBatchConfigKeys.AWS_CONN_ID,
193
199
  fallback=CONFIG_DEFAULTS[AllBatchConfigKeys.AWS_CONN_ID],
194
200
  )
195
- region_name = conf.get(CONFIG_GROUP_NAME, AllBatchConfigKeys.REGION_NAME, fallback=None)
201
+ region_name = self.conf.get(CONFIG_GROUP_NAME, AllBatchConfigKeys.REGION_NAME, fallback=None)
196
202
  self.batch = BatchClientHook(aws_conn_id=aws_conn_id, region_name=region_name).conn
197
203
  self.attempts_since_last_successful_connection += 1
198
204
  self.last_connection_reload = timezone.utcnow()
@@ -262,13 +268,13 @@ class AwsBatchExecutor(BaseExecutor):
262
268
  queue = job_info.queue
263
269
  exec_info = job_info.config
264
270
  failure_count = self.active_workers.failure_count_by_id(job_id=job.job_id)
265
- if int(failure_count) < int(self.__class__.MAX_SUBMIT_JOB_ATTEMPTS):
271
+ if int(failure_count) < int(self.max_submit_job_attempts):
266
272
  self.log.warning(
267
273
  "Airflow task %s failed due to %s. Failure %s out of %s occurred on %s. Rescheduling.",
268
274
  task_key,
269
275
  job.status_reason,
270
276
  failure_count,
271
- self.__class__.MAX_SUBMIT_JOB_ATTEMPTS,
277
+ self.max_submit_job_attempts,
272
278
  job.job_id,
273
279
  )
274
280
  self.active_workers.increment_failure_count(job_id=job.job_id)
@@ -327,7 +333,7 @@ class AwsBatchExecutor(BaseExecutor):
327
333
  failure_reason = str(e)
328
334
 
329
335
  if failure_reason:
330
- if attempt_number >= int(self.__class__.MAX_SUBMIT_JOB_ATTEMPTS):
336
+ if attempt_number >= int(self.max_submit_job_attempts):
331
337
  self.log.error(
332
338
  (
333
339
  "This job has been unsuccessfully attempted too many times (%s). "
@@ -466,11 +472,10 @@ class AwsBatchExecutor(BaseExecutor):
466
472
  # up and kill the scheduler process.
467
473
  self.log.exception("Failed to terminate %s", self.__class__.__name__)
468
474
 
469
- @staticmethod
470
- def _load_submit_kwargs() -> dict:
475
+ def _load_submit_kwargs(self) -> dict:
471
476
  from airflow.providers.amazon.aws.executors.batch.batch_executor_config import build_submit_kwargs
472
477
 
473
- submit_kwargs = build_submit_kwargs()
478
+ submit_kwargs = build_submit_kwargs(self.conf)
474
479
 
475
480
  if "containerOverrides" not in submit_kwargs or "command" not in submit_kwargs["containerOverrides"]:
476
481
  raise KeyError(
@@ -514,12 +519,3 @@ class AwsBatchExecutor(BaseExecutor):
514
519
 
515
520
  not_adopted_tis = [ti for ti in tis if ti not in adopted_tis]
516
521
  return not_adopted_tis
517
-
518
- def log_task_event(self, *, event: str, extra: str, ti_key: TaskInstanceKey):
519
- # TODO: remove this method when min_airflow_version is set to higher than 2.10.0
520
- with suppress(AttributeError):
521
- super().log_task_event(
522
- event=event,
523
- extra=extra,
524
- ti_key=ti_key,
525
- )
@@ -33,7 +33,6 @@ import json
33
33
  from json import JSONDecodeError
34
34
  from typing import TYPE_CHECKING
35
35
 
36
- from airflow.configuration import conf
37
36
  from airflow.providers.amazon.aws.executors.batch.utils import (
38
37
  CONFIG_GROUP_NAME,
39
38
  AllBatchConfigKeys,
@@ -43,22 +42,22 @@ from airflow.providers.amazon.aws.executors.ecs.utils import camelize_dict_keys
43
42
  from airflow.utils.helpers import prune_dict
44
43
 
45
44
 
46
- def _fetch_templated_kwargs() -> dict[str, str]:
45
+ def _fetch_templated_kwargs(conf) -> dict[str, str]:
47
46
  submit_job_kwargs_value = conf.get(
48
47
  CONFIG_GROUP_NAME, AllBatchConfigKeys.SUBMIT_JOB_KWARGS, fallback=dict()
49
48
  )
50
49
  return json.loads(str(submit_job_kwargs_value))
51
50
 
52
51
 
53
- def _fetch_config_values() -> dict[str, str]:
52
+ def _fetch_config_values(conf) -> dict[str, str]:
54
53
  return prune_dict(
55
54
  {key: conf.get(CONFIG_GROUP_NAME, key, fallback=None) for key in BatchSubmitJobKwargsConfigKeys()}
56
55
  )
57
56
 
58
57
 
59
- def build_submit_kwargs() -> dict:
60
- job_kwargs = _fetch_config_values()
61
- job_kwargs.update(_fetch_templated_kwargs())
58
+ def build_submit_kwargs(conf) -> dict:
59
+ job_kwargs = _fetch_config_values(conf)
60
+ job_kwargs.update(_fetch_templated_kwargs(conf))
62
61
 
63
62
  if "containerOverrides" not in job_kwargs:
64
63
  job_kwargs["containerOverrides"] = {} # type: ignore
@@ -26,7 +26,6 @@ from __future__ import annotations
26
26
  import time
27
27
  from collections import defaultdict, deque
28
28
  from collections.abc import Sequence
29
- from contextlib import suppress
30
29
  from copy import deepcopy
31
30
  from typing import TYPE_CHECKING
32
31
 
@@ -48,12 +47,7 @@ from airflow.providers.amazon.aws.executors.utils.exponential_backoff_retry impo
48
47
  )
49
48
  from airflow.providers.amazon.aws.hooks.ecs import EcsHook
50
49
  from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
51
- from airflow.providers.common.compat.sdk import AirflowException, Stats
52
-
53
- try:
54
- from airflow.sdk import timezone
55
- except ImportError:
56
- from airflow.utils import timezone # type: ignore[attr-defined,no-redef]
50
+ from airflow.providers.common.compat.sdk import AirflowException, Stats, timezone
57
51
  from airflow.utils.helpers import merge_dicts
58
52
  from airflow.utils.state import State
59
53
 
@@ -96,6 +90,8 @@ class AwsEcsExecutor(BaseExecutor):
96
90
  Airflow TaskInstance's executor_config.
97
91
  """
98
92
 
93
+ supports_multi_team: bool = True
94
+
99
95
  # AWS limits the maximum number of ARNs in the describe_tasks function.
100
96
  DESCRIBE_TASKS_BATCH_SIZE = 99
101
97
 
@@ -115,7 +111,7 @@ class AwsEcsExecutor(BaseExecutor):
115
111
  # Can be removed when minimum supported provider version is equal to the version of core airflow
116
112
  # which introduces multi-team configuration.
117
113
  if not hasattr(self, "conf"):
118
- from airflow.configuration import conf
114
+ from airflow.providers.common.compat.sdk import conf
119
115
 
120
116
  self.conf = conf
121
117
 
@@ -611,12 +607,3 @@ class AwsEcsExecutor(BaseExecutor):
611
607
 
612
608
  not_adopted_tis = [ti for ti in tis if ti not in adopted_tis]
613
609
  return not_adopted_tis
614
-
615
- def log_task_event(self, *, event: str, extra: str, ti_key: TaskInstanceKey):
616
- # TODO: remove this method when min_airflow_version is set to higher than 2.10.0
617
- with suppress(AttributeError):
618
- super().log_task_event(
619
- event=event,
620
- extra=extra,
621
- ti_key=ti_key,
622
- )
@@ -33,6 +33,7 @@ import json
33
33
  from json import JSONDecodeError
34
34
 
35
35
  from airflow.providers.amazon.aws.executors.ecs.utils import (
36
+ CONFIG_DEFAULTS,
36
37
  CONFIG_GROUP_NAME,
37
38
  ECS_LAUNCH_TYPE_EC2,
38
39
  ECS_LAUNCH_TYPE_FARGATE,
@@ -56,7 +57,10 @@ def _fetch_templated_kwargs(conf) -> dict[str, str]:
56
57
 
57
58
  def _fetch_config_values(conf) -> dict[str, str]:
58
59
  return prune_dict(
59
- {key: conf.get(CONFIG_GROUP_NAME, key, fallback=None) for key in RunTaskKwargsConfigKeys()}
60
+ {
61
+ key: conf.get(CONFIG_GROUP_NAME, key, fallback=CONFIG_DEFAULTS.get(key, None))
62
+ for key in RunTaskKwargsConfigKeys()
63
+ }
60
64
  )
61
65
 
62
66
 
@@ -21,7 +21,11 @@ from functools import cached_property
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
23
  import pyathena
24
- from sqlalchemy.engine.url import URL
24
+
25
+ try:
26
+ from sqlalchemy.engine.url import URL
27
+ except ImportError:
28
+ URL = None # type: ignore[assignment,misc]
25
29
 
26
30
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
27
31
  from airflow.providers.amazon.aws.utils.connection_wrapper import AwsConnectionWrapper
@@ -152,9 +156,15 @@ class AthenaSQLHook(AwsBaseHook, DbApiHook):
152
156
 
153
157
  def get_uri(self) -> str:
154
158
  """Overridden to use the Athena dialect as driver name."""
159
+ from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException
160
+
161
+ if URL is None:
162
+ raise AirflowOptionalProviderFeatureException(
163
+ "sqlalchemy is required to generate the connection URI. "
164
+ "Install it with: pip install 'apache-airflow-providers-amazon[sqlalchemy]'"
165
+ )
155
166
  conn_params = self._get_conn_params()
156
167
  creds = self.get_credentials(region_name=conn_params["region_name"])
157
-
158
168
  return URL.create(
159
169
  f"awsathena+{conn_params['driver']}",
160
170
  username=creds.access_key,
@@ -51,12 +51,11 @@ from botocore.waiter import Waiter, WaiterModel
51
51
  from dateutil.tz import tzlocal
52
52
  from slugify import slugify
53
53
 
54
- from airflow.configuration import conf
55
54
  from airflow.exceptions import AirflowProviderDeprecationWarning
56
55
  from airflow.providers.amazon.aws.utils.connection_wrapper import AwsConnectionWrapper
57
56
  from airflow.providers.amazon.aws.utils.identifiers import generate_uuid
58
57
  from airflow.providers.amazon.aws.utils.suppress import return_on_error
59
- from airflow.providers.common.compat.sdk import AirflowException, AirflowNotFoundException, BaseHook
58
+ from airflow.providers.common.compat.sdk import AirflowException, AirflowNotFoundException, BaseHook, conf
60
59
  from airflow.providers_manager import ProvidersManager
61
60
  from airflow.utils.helpers import exactly_one
62
61
  from airflow.utils.log.logging_mixin import LoggingMixin
@@ -22,11 +22,15 @@ from typing import TYPE_CHECKING
22
22
  import redshift_connector
23
23
  import tenacity
24
24
  from redshift_connector import Connection as RedshiftConnection, InterfaceError, OperationalError
25
- from sqlalchemy import create_engine
26
- from sqlalchemy.engine.url import URL
25
+
26
+ try:
27
+ from sqlalchemy import create_engine
28
+ from sqlalchemy.engine.url import URL
29
+ except ImportError:
30
+ URL = create_engine = None # type: ignore[assignment,misc]
27
31
 
28
32
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
29
- from airflow.providers.common.compat.sdk import AirflowException
33
+ from airflow.providers.common.compat.sdk import AirflowException, AirflowOptionalProviderFeatureException
30
34
  from airflow.providers.common.sql.hooks.sql import DbApiHook
31
35
 
32
36
  if TYPE_CHECKING:
@@ -151,6 +155,11 @@ class RedshiftSQLHook(DbApiHook):
151
155
 
152
156
  def get_uri(self) -> str:
153
157
  """Overridden to use the Redshift dialect as driver name."""
158
+ if URL is None:
159
+ raise AirflowOptionalProviderFeatureException(
160
+ "sqlalchemy is required to generate the connection URI. "
161
+ "Install it with: pip install 'apache-airflow-providers-amazon[sqlalchemy]'"
162
+ )
154
163
  conn_params = self._get_conn_params()
155
164
 
156
165
  if "user" in conn_params:
@@ -174,6 +183,11 @@ class RedshiftSQLHook(DbApiHook):
174
183
 
175
184
  def get_sqlalchemy_engine(self, engine_kwargs=None):
176
185
  """Overridden to pass Redshift-specific arguments."""
186
+ if create_engine is None:
187
+ raise AirflowOptionalProviderFeatureException(
188
+ "sqlalchemy is required for creating the engine. Install it with"
189
+ ": pip install 'apache-airflow-providers-amazon[sqlalchemy]'"
190
+ )
177
191
  conn_kwargs = self.conn.extra_dejson
178
192
  if engine_kwargs is None:
179
193
  engine_kwargs = {}
@@ -42,6 +42,8 @@ from typing import TYPE_CHECKING, Any
42
42
  from urllib.parse import urlsplit
43
43
  from uuid import uuid4
44
44
 
45
+ from airflow.providers.common.compat.connection import get_async_connection
46
+
45
47
  if TYPE_CHECKING:
46
48
  from aiobotocore.client import AioBaseClient
47
49
  from mypy_boto3_s3.service_resource import (
@@ -52,7 +54,6 @@ if TYPE_CHECKING:
52
54
  from airflow.providers.amazon.version_compat import ArgNotSet
53
55
 
54
56
 
55
- from asgiref.sync import sync_to_async
56
57
  from boto3.s3.transfer import S3Transfer, TransferConfig
57
58
  from botocore.exceptions import ClientError
58
59
 
@@ -90,7 +91,7 @@ def provide_bucket_name(func: Callable) -> Callable:
90
91
  if not bound_args.arguments.get("bucket_name"):
91
92
  self = args[0]
92
93
  if self.aws_conn_id:
93
- connection = await sync_to_async(self.get_connection)(self.aws_conn_id)
94
+ connection = await get_async_connection(self.aws_conn_id)
94
95
  if connection.schema:
95
96
  bound_args.arguments["bucket_name"] = connection.schema
96
97
  return bound_args
@@ -1743,29 +1744,31 @@ class S3Hook(AwsBaseHook):
1743
1744
 
1744
1745
  def _sync_to_local_dir_if_changed(self, s3_bucket, s3_object, local_target_path: Path):
1745
1746
  should_download = False
1746
- download_msg = ""
1747
+ download_logs: list[str] = []
1748
+ download_log_params: list[Any] = []
1749
+
1747
1750
  if not local_target_path.exists():
1748
1751
  should_download = True
1749
- download_msg = f"Local file {local_target_path} does not exist."
1752
+ download_logs.append("Local file %s does not exist.")
1753
+ download_log_params.append(local_target_path)
1750
1754
  else:
1751
1755
  local_stats = local_target_path.stat()
1752
-
1753
1756
  if s3_object.size != local_stats.st_size:
1754
1757
  should_download = True
1755
- download_msg = (
1756
- f"S3 object size ({s3_object.size}) and local file size ({local_stats.st_size}) differ."
1757
- )
1758
+ download_logs.append("S3 object size (%s) and local file size (%s) differ.")
1759
+ download_log_params.extend([s3_object.size, local_stats.st_size])
1758
1760
 
1759
1761
  s3_last_modified = s3_object.last_modified
1760
- if local_stats.st_mtime < s3_last_modified.microsecond:
1762
+ if local_stats.st_mtime < s3_last_modified.timestamp():
1761
1763
  should_download = True
1762
- download_msg = f"S3 object last modified ({s3_last_modified.microsecond}) and local file last modified ({local_stats.st_mtime}) differ."
1764
+ download_logs.append("S3 object last modified (%s) and local file last modified (%s) differ.")
1765
+ download_log_params.extend([s3_last_modified.timestamp(), local_stats.st_mtime])
1763
1766
 
1764
1767
  if should_download:
1765
1768
  s3_bucket.download_file(s3_object.key, local_target_path)
1766
- self.log.debug(
1767
- "%s Downloaded %s to %s", download_msg, s3_object.key, local_target_path.as_posix()
1768
- )
1769
+ download_logs.append("Downloaded %s to %s")
1770
+ download_log_params.extend([s3_object.key, local_target_path.as_posix()])
1771
+ self.log.debug(" ".join(download_logs), *download_log_params)
1769
1772
  else:
1770
1773
  self.log.debug(
1771
1774
  "Local file %s is up-to-date with S3 object %s. Skipping download.",