apache-airflow-providers-amazon 9.9.0rc1__py3-none-any.whl → 9.9.1rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. airflow/providers/amazon/__init__.py +1 -1
  2. airflow/providers/amazon/aws/executors/batch/batch_executor.py +51 -0
  3. airflow/providers/amazon/aws/executors/ecs/utils.py +2 -2
  4. airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py +1 -1
  5. airflow/providers/amazon/aws/fs/s3.py +2 -1
  6. airflow/providers/amazon/aws/hooks/athena_sql.py +12 -2
  7. airflow/providers/amazon/aws/hooks/base_aws.py +24 -5
  8. airflow/providers/amazon/aws/hooks/batch_client.py +2 -1
  9. airflow/providers/amazon/aws/hooks/batch_waiters.py +2 -1
  10. airflow/providers/amazon/aws/hooks/chime.py +5 -1
  11. airflow/providers/amazon/aws/hooks/ec2.py +2 -1
  12. airflow/providers/amazon/aws/hooks/eks.py +1 -2
  13. airflow/providers/amazon/aws/hooks/glue.py +82 -7
  14. airflow/providers/amazon/aws/hooks/rds.py +2 -1
  15. airflow/providers/amazon/aws/hooks/s3.py +2 -2
  16. airflow/providers/amazon/aws/hooks/sagemaker.py +2 -2
  17. airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py +5 -1
  18. airflow/providers/amazon/aws/links/base_aws.py +2 -10
  19. airflow/providers/amazon/aws/operators/base_aws.py +1 -1
  20. airflow/providers/amazon/aws/operators/batch.py +6 -22
  21. airflow/providers/amazon/aws/operators/ecs.py +1 -1
  22. airflow/providers/amazon/aws/operators/glue.py +22 -8
  23. airflow/providers/amazon/aws/operators/redshift_data.py +1 -1
  24. airflow/providers/amazon/aws/operators/sagemaker.py +2 -2
  25. airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py +1 -1
  26. airflow/providers/amazon/aws/sensors/base_aws.py +1 -1
  27. airflow/providers/amazon/aws/sensors/glue.py +56 -12
  28. airflow/providers/amazon/aws/sensors/s3.py +2 -2
  29. airflow/providers/amazon/aws/sensors/sagemaker_unified_studio.py +1 -1
  30. airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py +1 -1
  31. airflow/providers/amazon/aws/transfers/base.py +1 -1
  32. airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +2 -2
  33. airflow/providers/amazon/aws/transfers/exasol_to_s3.py +1 -1
  34. airflow/providers/amazon/aws/transfers/ftp_to_s3.py +1 -1
  35. airflow/providers/amazon/aws/transfers/gcs_to_s3.py +1 -1
  36. airflow/providers/amazon/aws/transfers/glacier_to_gcs.py +1 -1
  37. airflow/providers/amazon/aws/transfers/google_api_to_s3.py +1 -1
  38. airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py +3 -3
  39. airflow/providers/amazon/aws/transfers/http_to_s3.py +1 -1
  40. airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py +1 -1
  41. airflow/providers/amazon/aws/transfers/local_to_s3.py +1 -1
  42. airflow/providers/amazon/aws/transfers/mongo_to_s3.py +1 -1
  43. airflow/providers/amazon/aws/transfers/redshift_to_s3.py +1 -1
  44. airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py +1 -1
  45. airflow/providers/amazon/aws/transfers/s3_to_ftp.py +1 -1
  46. airflow/providers/amazon/aws/transfers/s3_to_redshift.py +1 -1
  47. airflow/providers/amazon/aws/transfers/s3_to_sftp.py +1 -1
  48. airflow/providers/amazon/aws/transfers/s3_to_sql.py +8 -4
  49. airflow/providers/amazon/aws/transfers/salesforce_to_s3.py +1 -1
  50. airflow/providers/amazon/aws/transfers/sftp_to_s3.py +1 -1
  51. airflow/providers/amazon/aws/transfers/sql_to_s3.py +7 -5
  52. airflow/providers/amazon/aws/triggers/base.py +0 -1
  53. airflow/providers/amazon/aws/triggers/glue.py +37 -24
  54. airflow/providers/amazon/aws/utils/connection_wrapper.py +4 -1
  55. airflow/providers/amazon/aws/utils/suppress.py +2 -1
  56. airflow/providers/amazon/aws/utils/waiter.py +1 -1
  57. airflow/providers/amazon/aws/waiters/glue.json +55 -0
  58. airflow/providers/amazon/version_compat.py +10 -0
  59. {apache_airflow_providers_amazon-9.9.0rc1.dist-info → apache_airflow_providers_amazon-9.9.1rc1.dist-info}/METADATA +8 -9
  60. {apache_airflow_providers_amazon-9.9.0rc1.dist-info → apache_airflow_providers_amazon-9.9.1rc1.dist-info}/RECORD +62 -62
  61. {apache_airflow_providers_amazon-9.9.0rc1.dist-info → apache_airflow_providers_amazon-9.9.1rc1.dist-info}/WHEEL +0 -0
  62. {apache_airflow_providers_amazon-9.9.0rc1.dist-info → apache_airflow_providers_amazon-9.9.1rc1.dist-info}/entry_points.txt +0 -0
@@ -60,7 +60,6 @@ class GlueJobOperator(AwsBaseOperator[GlueJobHook]):
60
60
  :param script_args: etl script arguments and AWS Glue arguments (templated)
61
61
  :param retry_limit: The maximum number of times to retry this job if it fails
62
62
  :param num_of_dpus: Number of AWS Glue DPUs to allocate to this Job.
63
- :param region_name: aws region name (example: us-east-1)
64
63
  :param s3_bucket: S3 bucket where logs and local etl script will be uploaded
65
64
  :param iam_role_name: AWS IAM Role for Glue Job Execution. If set `iam_role_arn` must equal None.
66
65
  :param iam_role_arn: AWS IAM ARN for Glue Job Execution. If set `iam_role_name` must equal None.
@@ -78,7 +77,20 @@ class GlueJobOperator(AwsBaseOperator[GlueJobHook]):
78
77
  of limiting concurrency, Glue needs 5-10 seconds to clean up resources.
79
78
  Thus if status is returned immediately it might end up in case of more than 1 concurrent run.
80
79
  It is recommended to set this parameter to 10 when you are using concurrency=1.
81
- For more information see: https://repost.aws/questions/QUaKgpLBMPSGWO0iq2Fob_bw/glue-run-concurrent-jobs#ANFpCL2fRnQRqgDFuIU_rpvA
80
+ For more information see:
81
+ https://repost.aws/questions/QUaKgpLBMPSGWO0iq2Fob_bw/glue-run-concurrent-jobs#ANFpCL2fRnQRqgDFuIU_rpvA
82
+ :param waiter_delay: Time in seconds to wait between status checks. (default: 60)
83
+ :param waiter_max_attempts: Maximum number of attempts to check for job completion. (default: 20)
84
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
85
+ If this is ``None`` or empty then the default boto3 behaviour is used. If
86
+ running Airflow in a distributed manner and aws_conn_id is None or
87
+ empty, then default boto3 configuration would be used (and must be
88
+ maintained on each worker node).
89
+ :param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
90
+ :param verify: Whether or not to verify SSL certificates. See:
91
+ https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
92
+ :param botocore_config: Configuration dictionary (key-values) for botocore client. See:
93
+ https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
82
94
  """
83
95
 
84
96
  aws_hook_class = GlueJobHook
@@ -122,9 +134,11 @@ class GlueJobOperator(AwsBaseOperator[GlueJobHook]):
122
134
  verbose: bool = False,
123
135
  replace_script_file: bool = False,
124
136
  update_config: bool = False,
125
- job_poll_interval: int | float = 6,
126
137
  stop_job_run_on_kill: bool = False,
127
138
  sleep_before_return: int = 0,
139
+ job_poll_interval: int | float = 6,
140
+ waiter_delay: int = 60,
141
+ waiter_max_attempts: int = 75,
128
142
  **kwargs,
129
143
  ):
130
144
  super().__init__(**kwargs)
@@ -152,6 +166,8 @@ class GlueJobOperator(AwsBaseOperator[GlueJobHook]):
152
166
  self._job_run_id: str | None = None
153
167
  self.sleep_before_return: int = sleep_before_return
154
168
  self.s3_script_location: str | None = None
169
+ self.waiter_delay = waiter_delay
170
+ self.waiter_max_attempts = waiter_max_attempts
155
171
 
156
172
  @property
157
173
  def _hook_parameters(self):
@@ -231,7 +247,8 @@ class GlueJobOperator(AwsBaseOperator[GlueJobHook]):
231
247
  run_id=self._job_run_id,
232
248
  verbose=self.verbose,
233
249
  aws_conn_id=self.aws_conn_id,
234
- job_poll_interval=self.job_poll_interval,
250
+ waiter_delay=self.waiter_delay,
251
+ waiter_max_attempts=self.waiter_max_attempts,
235
252
  ),
236
253
  method_name="execute_complete",
237
254
  )
@@ -254,7 +271,7 @@ class GlueJobOperator(AwsBaseOperator[GlueJobHook]):
254
271
 
255
272
  if validated_event["status"] != "success":
256
273
  raise AirflowException(f"Error in glue job: {validated_event}")
257
- return validated_event["value"]
274
+ return validated_event["run_id"]
258
275
 
259
276
  def on_kill(self):
260
277
  """Cancel the running AWS Glue Job."""
@@ -282,7 +299,6 @@ class GlueDataQualityOperator(AwsBaseOperator[GlueDataQualityHook]):
282
299
  :param description: A description of the data quality ruleset.
283
300
  :param update_rule_set: To update existing ruleset, Set this flag to True. (default: False)
284
301
  :param data_quality_ruleset_kwargs: Extra arguments for RuleSet.
285
-
286
302
  :param aws_conn_id: The Airflow connection used for AWS credentials.
287
303
  If this is ``None`` or empty then the default boto3 behaviour is used. If
288
304
  running Airflow in a distributed manner and aws_conn_id is None or
@@ -378,7 +394,6 @@ class GlueDataQualityRuleSetEvaluationRunOperator(AwsBaseOperator[GlueDataQualit
378
394
  :param deferrable: If True, the operator will wait asynchronously for the job to stop.
379
395
  This implies waiting for completion. This mode requires aiobotocore module to be installed.
380
396
  (default: False)
381
-
382
397
  :param aws_conn_id: The Airflow connection used for AWS credentials.
383
398
  If this is ``None`` or empty then the default boto3 behaviour is used. If
384
399
  running Airflow in a distributed manner and aws_conn_id is None or
@@ -543,7 +558,6 @@ class GlueDataQualityRuleRecommendationRunOperator(AwsBaseOperator[GlueDataQuali
543
558
  :param deferrable: If True, the operator will wait asynchronously for the job to stop.
544
559
  This implies waiting for completion. This mode requires aiobotocore module to be installed.
545
560
  (default: False)
546
-
547
561
  :param aws_conn_id: The Airflow connection used for AWS credentials.
548
562
  If this is ``None`` or empty then the default boto3 behaviour is used. If
549
563
  running Airflow in a distributed manner and aws_conn_id is None or
@@ -159,7 +159,7 @@ class RedshiftDataOperator(AwsBaseOperator[RedshiftDataHook]):
159
159
  self.statement_id: str = query_execution_output.statement_id
160
160
 
161
161
  if query_execution_output.session_id:
162
- self.xcom_push(context, key="session_id", value=query_execution_output.session_id)
162
+ context["ti"].xcom_push(key="session_id", value=query_execution_output.session_id)
163
163
 
164
164
  if self.deferrable and self.wait_for_completion:
165
165
  is_finished: bool = self.hook.check_query_is_finished(self.statement_id)
@@ -20,8 +20,8 @@ import datetime
20
20
  import json
21
21
  import time
22
22
  import urllib
23
- from collections.abc import Sequence
24
- from typing import TYPE_CHECKING, Any, Callable, ClassVar
23
+ from collections.abc import Callable, Sequence
24
+ from typing import TYPE_CHECKING, Any, ClassVar
25
25
 
26
26
  from botocore.exceptions import ClientError
27
27
 
@@ -24,7 +24,6 @@ from typing import TYPE_CHECKING
24
24
 
25
25
  from airflow.configuration import conf
26
26
  from airflow.exceptions import AirflowException
27
- from airflow.models import BaseOperator
28
27
  from airflow.providers.amazon.aws.hooks.sagemaker_unified_studio import (
29
28
  SageMakerNotebookHook,
30
29
  )
@@ -34,6 +33,7 @@ from airflow.providers.amazon.aws.links.sagemaker_unified_studio import (
34
33
  from airflow.providers.amazon.aws.triggers.sagemaker_unified_studio import (
35
34
  SageMakerNotebookJobTrigger,
36
35
  )
36
+ from airflow.providers.amazon.version_compat import BaseOperator
37
37
 
38
38
  if TYPE_CHECKING:
39
39
  from airflow.utils.context import Context
@@ -25,7 +25,7 @@ from airflow.providers.amazon.aws.utils.mixins import (
25
25
  AwsHookType,
26
26
  aws_template_fields,
27
27
  )
28
- from airflow.sensors.base import BaseSensorOperator
28
+ from airflow.providers.amazon.version_compat import BaseSensorOperator
29
29
  from airflow.utils.types import NOTSET, ArgNotSet
30
30
 
31
31
 
@@ -18,7 +18,6 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  from collections.abc import Sequence
21
- from functools import cached_property
22
21
  from typing import TYPE_CHECKING, Any
23
22
 
24
23
  from airflow.configuration import conf
@@ -28,16 +27,16 @@ from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
28
27
  from airflow.providers.amazon.aws.triggers.glue import (
29
28
  GlueDataQualityRuleRecommendationRunCompleteTrigger,
30
29
  GlueDataQualityRuleSetEvaluationRunCompleteTrigger,
30
+ GlueJobCompleteTrigger,
31
31
  )
32
32
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
33
33
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
34
- from airflow.sensors.base import BaseSensorOperator
35
34
 
36
35
  if TYPE_CHECKING:
37
36
  from airflow.utils.context import Context
38
37
 
39
38
 
40
- class GlueJobSensor(BaseSensorOperator):
39
+ class GlueJobSensor(AwsBaseSensor[GlueJobHook]):
41
40
  """
42
41
  Waits for an AWS Glue Job to reach any of the status below.
43
42
 
@@ -50,9 +49,29 @@ class GlueJobSensor(BaseSensorOperator):
50
49
  :param job_name: The AWS Glue Job unique name
51
50
  :param run_id: The AWS Glue current running job identifier
52
51
  :param verbose: If True, more Glue Job Run logs show in the Airflow Task Logs. (default: False)
52
+ :param deferrable: If True, the sensor will operate in deferrable mode. This mode requires aiobotocore
53
+ module to be installed.
54
+ (default: False, but can be overridden in config file by setting default_deferrable to True)
55
+ :param poke_interval: Polling period in seconds to check for the status of the job. (default: 120)
56
+ :param max_retries: Number of times before returning the current state. (default: 60)
57
+
58
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
59
+ If this is ``None`` or empty then the default boto3 behaviour is used. If
60
+ running Airflow in a distributed manner and aws_conn_id is None or
61
+ empty, then default boto3 configuration would be used (and must be
62
+ maintained on each worker node).
63
+ :param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
64
+ :param verify: Whether to verify SSL certificates. See:
65
+ https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
66
+ :param botocore_config: Configuration dictionary (key-values) for botocore client. See:
67
+ https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
53
68
  """
54
69
 
55
- template_fields: Sequence[str] = ("job_name", "run_id")
70
+ SUCCESS_STATES = ("SUCCEEDED",)
71
+ FAILURE_STATES = ("FAILED", "STOPPED", "TIMEOUT")
72
+
73
+ aws_hook_class = GlueJobHook
74
+ template_fields: Sequence[str] = aws_template_fields("job_name", "run_id")
56
75
 
57
76
  def __init__(
58
77
  self,
@@ -60,6 +79,9 @@ class GlueJobSensor(BaseSensorOperator):
60
79
  job_name: str,
61
80
  run_id: str,
62
81
  verbose: bool = False,
82
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
83
+ poke_interval: int = 120,
84
+ max_retries: int = 60,
63
85
  aws_conn_id: str | None = "aws_default",
64
86
  **kwargs,
65
87
  ):
@@ -67,24 +89,46 @@ class GlueJobSensor(BaseSensorOperator):
67
89
  self.job_name = job_name
68
90
  self.run_id = run_id
69
91
  self.verbose = verbose
92
+ self.deferrable = deferrable
93
+ self.poke_interval = poke_interval
94
+ self.max_retries = max_retries
70
95
  self.aws_conn_id = aws_conn_id
71
- self.success_states: list[str] = ["SUCCEEDED"]
72
- self.errored_states: list[str] = ["FAILED", "STOPPED", "TIMEOUT"]
73
96
  self.next_log_tokens = GlueJobHook.LogContinuationTokens()
74
97
 
75
- @cached_property
76
- def hook(self):
77
- return GlueJobHook(aws_conn_id=self.aws_conn_id)
98
+ def execute(self, context: Context) -> Any:
99
+ if self.deferrable:
100
+ self.defer(
101
+ trigger=GlueJobCompleteTrigger(
102
+ job_name=self.job_name,
103
+ run_id=self.run_id,
104
+ verbose=self.verbose,
105
+ aws_conn_id=self.aws_conn_id,
106
+ waiter_delay=int(self.poke_interval),
107
+ waiter_max_attempts=self.max_retries,
108
+ ),
109
+ method_name="execute_complete",
110
+ )
111
+ else:
112
+ super().execute(context=context)
78
113
 
79
- def poke(self, context: Context):
114
+ def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
115
+ validated_event = validate_execute_complete_event(event)
116
+
117
+ if validated_event["status"] != "success":
118
+ message = f"Error: AWS Glue Job: {validated_event}"
119
+ raise AirflowException(message)
120
+
121
+ self.log.info("AWS Glue Job completed.")
122
+
123
+ def poke(self, context: Context) -> bool:
80
124
  self.log.info("Poking for job run status :for Glue Job %s and ID %s", self.job_name, self.run_id)
81
125
  job_state = self.hook.get_job_state(job_name=self.job_name, run_id=self.run_id)
82
126
 
83
127
  try:
84
- if job_state in self.success_states:
128
+ if job_state in self.SUCCESS_STATES:
85
129
  self.log.info("Exiting Job %s Run State: %s", self.run_id, job_state)
86
130
  return True
87
- if job_state in self.errored_states:
131
+ if job_state in self.FAILURE_STATES:
88
132
  job_error_message = "Exiting Job %s Run State: %s", self.run_id, job_state
89
133
  self.log.info(job_error_message)
90
134
  raise AirflowException(job_error_message)
@@ -21,9 +21,9 @@ import fnmatch
21
21
  import inspect
22
22
  import os
23
23
  import re
24
- from collections.abc import Sequence
24
+ from collections.abc import Callable, Sequence
25
25
  from datetime import datetime, timedelta
26
- from typing import TYPE_CHECKING, Any, Callable, cast
26
+ from typing import TYPE_CHECKING, Any, cast
27
27
 
28
28
  from airflow.configuration import conf
29
29
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
@@ -25,7 +25,7 @@ from airflow.exceptions import AirflowException
25
25
  from airflow.providers.amazon.aws.hooks.sagemaker_unified_studio import (
26
26
  SageMakerNotebookHook,
27
27
  )
28
- from airflow.sensors.base import BaseSensorOperator
28
+ from airflow.providers.amazon.version_compat import BaseSensorOperator
29
29
 
30
30
  if TYPE_CHECKING:
31
31
  from airflow.utils.context import Context
@@ -22,8 +22,8 @@ import tempfile
22
22
  from collections.abc import Sequence
23
23
  from typing import TYPE_CHECKING
24
24
 
25
- from airflow.models import BaseOperator
26
25
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
26
+ from airflow.providers.amazon.version_compat import BaseOperator
27
27
 
28
28
  try:
29
29
  from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
@@ -21,8 +21,8 @@ from __future__ import annotations
21
21
 
22
22
  from collections.abc import Sequence
23
23
 
24
- from airflow.models import BaseOperator
25
24
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
25
+ from airflow.providers.amazon.version_compat import BaseOperator
26
26
  from airflow.utils.types import NOTSET, ArgNotSet
27
27
 
28
28
 
@@ -20,13 +20,13 @@ from __future__ import annotations
20
20
 
21
21
  import json
22
22
  import os
23
- from collections.abc import Sequence
23
+ from collections.abc import Callable, Sequence
24
24
  from copy import copy
25
25
  from datetime import datetime
26
26
  from decimal import Decimal
27
27
  from functools import cached_property
28
28
  from tempfile import NamedTemporaryFile
29
- from typing import IO, TYPE_CHECKING, Any, Callable
29
+ from typing import IO, TYPE_CHECKING, Any
30
30
  from uuid import uuid4
31
31
 
32
32
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
@@ -23,8 +23,8 @@ from collections.abc import Sequence
23
23
  from tempfile import NamedTemporaryFile
24
24
  from typing import TYPE_CHECKING
25
25
 
26
- from airflow.models import BaseOperator
27
26
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
27
+ from airflow.providers.amazon.version_compat import BaseOperator
28
28
  from airflow.providers.exasol.hooks.exasol import ExasolHook
29
29
 
30
30
  if TYPE_CHECKING:
@@ -21,8 +21,8 @@ from collections.abc import Sequence
21
21
  from tempfile import NamedTemporaryFile
22
22
  from typing import TYPE_CHECKING
23
23
 
24
- from airflow.models import BaseOperator
25
24
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
25
+ from airflow.providers.amazon.version_compat import BaseOperator
26
26
  from airflow.providers.ftp.hooks.ftp import FTPHook
27
27
 
28
28
  if TYPE_CHECKING:
@@ -26,8 +26,8 @@ from typing import TYPE_CHECKING
26
26
  from packaging.version import Version
27
27
 
28
28
  from airflow.exceptions import AirflowException
29
- from airflow.models import BaseOperator
30
29
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
30
+ from airflow.providers.amazon.version_compat import BaseOperator
31
31
  from airflow.providers.google.cloud.hooks.gcs import GCSHook
32
32
 
33
33
  if TYPE_CHECKING:
@@ -21,8 +21,8 @@ import tempfile
21
21
  from collections.abc import Sequence
22
22
  from typing import TYPE_CHECKING
23
23
 
24
- from airflow.models import BaseOperator
25
24
  from airflow.providers.amazon.aws.hooks.glacier import GlacierHook
25
+ from airflow.providers.amazon.version_compat import BaseOperator
26
26
  from airflow.providers.google.cloud.hooks.gcs import GCSHook
27
27
 
28
28
  if TYPE_CHECKING:
@@ -24,9 +24,9 @@ import sys
24
24
  from collections.abc import Sequence
25
25
  from typing import TYPE_CHECKING
26
26
 
27
- from airflow.models import BaseOperator
28
27
  from airflow.models.xcom import MAX_XCOM_SIZE, XCOM_RETURN_KEY
29
28
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
29
+ from airflow.providers.amazon.version_compat import BaseOperator
30
30
  from airflow.providers.google.common.hooks.discovery_api import GoogleDiscoveryApiHook
31
31
 
32
32
  if TYPE_CHECKING:
@@ -20,11 +20,11 @@
20
20
  from __future__ import annotations
21
21
 
22
22
  import json
23
- from collections.abc import Sequence
24
- from typing import TYPE_CHECKING, Callable
23
+ from collections.abc import Callable, Sequence
24
+ from typing import TYPE_CHECKING
25
25
 
26
- from airflow.models import BaseOperator
27
26
  from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
27
+ from airflow.providers.amazon.version_compat import BaseOperator
28
28
  from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
29
29
 
30
30
  if TYPE_CHECKING:
@@ -22,8 +22,8 @@ from __future__ import annotations
22
22
  from functools import cached_property
23
23
  from typing import TYPE_CHECKING, Any
24
24
 
25
- from airflow.models import BaseOperator
26
25
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
26
+ from airflow.providers.amazon.version_compat import BaseOperator
27
27
  from airflow.providers.http.hooks.http import HttpHook
28
28
 
29
29
  if TYPE_CHECKING:
@@ -22,8 +22,8 @@ from __future__ import annotations
22
22
  from collections.abc import Sequence
23
23
  from typing import TYPE_CHECKING
24
24
 
25
- from airflow.models import BaseOperator
26
25
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
26
+ from airflow.providers.amazon.version_compat import BaseOperator
27
27
  from airflow.providers.imap.hooks.imap import ImapHook
28
28
 
29
29
  if TYPE_CHECKING:
@@ -20,8 +20,8 @@ from __future__ import annotations
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING
22
22
 
23
- from airflow.models import BaseOperator
24
23
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
24
+ from airflow.providers.amazon.version_compat import BaseOperator
25
25
 
26
26
  if TYPE_CHECKING:
27
27
  from airflow.utils.context import Context
@@ -23,8 +23,8 @@ from typing import TYPE_CHECKING, Any, cast
23
23
 
24
24
  from bson import json_util
25
25
 
26
- from airflow.models import BaseOperator
27
26
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
27
+ from airflow.providers.amazon.version_compat import BaseOperator
28
28
  from airflow.providers.mongo.hooks.mongo import MongoHook
29
29
 
30
30
  if TYPE_CHECKING:
@@ -24,11 +24,11 @@ from collections.abc import Iterable, Mapping, Sequence
24
24
  from typing import TYPE_CHECKING
25
25
 
26
26
  from airflow.exceptions import AirflowException
27
- from airflow.models import BaseOperator
28
27
  from airflow.providers.amazon.aws.hooks.redshift_data import RedshiftDataHook
29
28
  from airflow.providers.amazon.aws.hooks.redshift_sql import RedshiftSQLHook
30
29
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
31
30
  from airflow.providers.amazon.aws.utils.redshift import build_credentials_block
31
+ from airflow.providers.amazon.version_compat import BaseOperator
32
32
  from airflow.utils.types import NOTSET, ArgNotSet
33
33
 
34
34
  if TYPE_CHECKING:
@@ -23,8 +23,8 @@ from typing import TYPE_CHECKING, Any, Literal, TypedDict
23
23
  from botocore.exceptions import ClientError, WaiterError
24
24
 
25
25
  from airflow.exceptions import AirflowException
26
- from airflow.models import BaseOperator
27
26
  from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
27
+ from airflow.providers.amazon.version_compat import BaseOperator
28
28
 
29
29
  if TYPE_CHECKING:
30
30
  from airflow.utils.context import Context
@@ -21,8 +21,8 @@ from collections.abc import Sequence
21
21
  from tempfile import NamedTemporaryFile
22
22
  from typing import TYPE_CHECKING
23
23
 
24
- from airflow.models import BaseOperator
25
24
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
25
+ from airflow.providers.amazon.version_compat import BaseOperator
26
26
  from airflow.providers.ftp.hooks.ftp import FTPHook
27
27
 
28
28
  if TYPE_CHECKING:
@@ -20,11 +20,11 @@ from collections.abc import Iterable, Sequence
20
20
  from typing import TYPE_CHECKING
21
21
 
22
22
  from airflow.exceptions import AirflowException
23
- from airflow.models import BaseOperator
24
23
  from airflow.providers.amazon.aws.hooks.redshift_data import RedshiftDataHook
25
24
  from airflow.providers.amazon.aws.hooks.redshift_sql import RedshiftSQLHook
26
25
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
27
26
  from airflow.providers.amazon.aws.utils.redshift import build_credentials_block
27
+ from airflow.providers.amazon.version_compat import BaseOperator
28
28
  from airflow.utils.types import NOTSET, ArgNotSet
29
29
 
30
30
  if TYPE_CHECKING:
@@ -22,8 +22,8 @@ from tempfile import NamedTemporaryFile
22
22
  from typing import TYPE_CHECKING
23
23
  from urllib.parse import urlsplit
24
24
 
25
- from airflow.models import BaseOperator
26
25
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
26
+ from airflow.providers.amazon.version_compat import BaseOperator
27
27
  from airflow.providers.ssh.hooks.ssh import SSHHook
28
28
 
29
29
  if TYPE_CHECKING:
@@ -16,15 +16,19 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
- from collections.abc import Iterable, Sequence
19
+ from collections.abc import Callable, Iterable, Sequence
20
20
  from functools import cached_property
21
21
  from tempfile import NamedTemporaryFile
22
- from typing import TYPE_CHECKING, Callable
22
+ from typing import TYPE_CHECKING
23
23
 
24
24
  from airflow.exceptions import AirflowException
25
- from airflow.hooks.base import BaseHook
26
- from airflow.models import BaseOperator
27
25
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
26
+ from airflow.providers.amazon.version_compat import BaseOperator
27
+
28
+ try:
29
+ from airflow.sdk import BaseHook
30
+ except ImportError:
31
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
28
32
 
29
33
  if TYPE_CHECKING:
30
34
  from airflow.utils.context import Context
@@ -21,8 +21,8 @@ import tempfile
21
21
  from collections.abc import Sequence
22
22
  from typing import TYPE_CHECKING
23
23
 
24
- from airflow.models import BaseOperator
25
24
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
25
+ from airflow.providers.amazon.version_compat import BaseOperator
26
26
  from airflow.providers.salesforce.hooks.salesforce import SalesforceHook
27
27
 
28
28
  if TYPE_CHECKING:
@@ -22,8 +22,8 @@ from tempfile import NamedTemporaryFile
22
22
  from typing import TYPE_CHECKING
23
23
  from urllib.parse import urlsplit
24
24
 
25
- from airflow.models import BaseOperator
26
25
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
26
+ from airflow.providers.amazon.version_compat import BaseOperator
27
27
  from airflow.providers.ssh.hooks.ssh import SSHHook
28
28
 
29
29
  if TYPE_CHECKING:
@@ -22,14 +22,16 @@ import gzip
22
22
  import io
23
23
  from collections import namedtuple
24
24
  from collections.abc import Iterable, Mapping, Sequence
25
- from typing import TYPE_CHECKING, Any, cast
26
-
27
- from typing_extensions import Literal
25
+ from typing import TYPE_CHECKING, Any, Literal, cast
28
26
 
29
27
  from airflow.exceptions import AirflowException
30
- from airflow.hooks.base import BaseHook
31
- from airflow.models import BaseOperator
32
28
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
29
+ from airflow.providers.amazon.version_compat import BaseOperator
30
+
31
+ try:
32
+ from airflow.sdk import BaseHook
33
+ except ImportError:
34
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
33
35
 
34
36
  if TYPE_CHECKING:
35
37
  import pandas as pd
@@ -88,7 +88,6 @@ class AwsBaseWaiterTrigger(BaseTrigger):
88
88
  super().__init__()
89
89
  # parameters that should be hardcoded in the child's implem
90
90
  self.serialized_fields = serialized_fields
91
-
92
91
  self.waiter_name = waiter_name
93
92
  self.waiter_args = waiter_args
94
93
  self.failure_message = failure_message