apache-airflow-providers-amazon 8.29.0__py3-none-any.whl → 9.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. airflow/providers/amazon/__init__.py +1 -1
  2. airflow/providers/amazon/aws/{datasets → assets}/s3.py +10 -6
  3. airflow/providers/amazon/aws/auth_manager/avp/entities.py +1 -1
  4. airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +5 -11
  5. airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py +2 -5
  6. airflow/providers/amazon/aws/auth_manager/cli/definition.py +0 -6
  7. airflow/providers/amazon/aws/auth_manager/views/auth.py +1 -1
  8. airflow/providers/amazon/aws/hooks/athena.py +3 -17
  9. airflow/providers/amazon/aws/hooks/base_aws.py +4 -162
  10. airflow/providers/amazon/aws/hooks/logs.py +1 -20
  11. airflow/providers/amazon/aws/hooks/quicksight.py +1 -17
  12. airflow/providers/amazon/aws/hooks/redshift_cluster.py +6 -120
  13. airflow/providers/amazon/aws/hooks/redshift_data.py +52 -14
  14. airflow/providers/amazon/aws/hooks/s3.py +24 -27
  15. airflow/providers/amazon/aws/hooks/sagemaker.py +4 -48
  16. airflow/providers/amazon/aws/log/s3_task_handler.py +1 -6
  17. airflow/providers/amazon/aws/operators/appflow.py +1 -10
  18. airflow/providers/amazon/aws/operators/batch.py +1 -29
  19. airflow/providers/amazon/aws/operators/datasync.py +1 -8
  20. airflow/providers/amazon/aws/operators/ecs.py +1 -25
  21. airflow/providers/amazon/aws/operators/eks.py +7 -46
  22. airflow/providers/amazon/aws/operators/emr.py +16 -232
  23. airflow/providers/amazon/aws/operators/glue_databrew.py +1 -10
  24. airflow/providers/amazon/aws/operators/rds.py +3 -17
  25. airflow/providers/amazon/aws/operators/redshift_data.py +18 -3
  26. airflow/providers/amazon/aws/operators/s3.py +12 -2
  27. airflow/providers/amazon/aws/operators/sagemaker.py +10 -32
  28. airflow/providers/amazon/aws/secrets/secrets_manager.py +1 -40
  29. airflow/providers/amazon/aws/sensors/batch.py +1 -8
  30. airflow/providers/amazon/aws/sensors/dms.py +1 -8
  31. airflow/providers/amazon/aws/sensors/dynamodb.py +22 -8
  32. airflow/providers/amazon/aws/sensors/emr.py +0 -7
  33. airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +1 -8
  34. airflow/providers/amazon/aws/sensors/glue_crawler.py +1 -8
  35. airflow/providers/amazon/aws/sensors/quicksight.py +1 -29
  36. airflow/providers/amazon/aws/sensors/redshift_cluster.py +1 -8
  37. airflow/providers/amazon/aws/sensors/s3.py +1 -8
  38. airflow/providers/amazon/aws/sensors/sagemaker.py +2 -9
  39. airflow/providers/amazon/aws/sensors/sqs.py +1 -8
  40. airflow/providers/amazon/aws/sensors/step_function.py +1 -8
  41. airflow/providers/amazon/aws/transfers/base.py +1 -14
  42. airflow/providers/amazon/aws/transfers/gcs_to_s3.py +5 -33
  43. airflow/providers/amazon/aws/transfers/redshift_to_s3.py +15 -10
  44. airflow/providers/amazon/aws/transfers/s3_to_redshift.py +6 -6
  45. airflow/providers/amazon/aws/transfers/sql_to_s3.py +3 -6
  46. airflow/providers/amazon/aws/triggers/batch.py +1 -168
  47. airflow/providers/amazon/aws/triggers/eks.py +1 -20
  48. airflow/providers/amazon/aws/triggers/emr.py +0 -32
  49. airflow/providers/amazon/aws/triggers/glue_crawler.py +0 -11
  50. airflow/providers/amazon/aws/triggers/glue_databrew.py +0 -21
  51. airflow/providers/amazon/aws/triggers/rds.py +0 -79
  52. airflow/providers/amazon/aws/triggers/redshift_cluster.py +5 -64
  53. airflow/providers/amazon/aws/triggers/sagemaker.py +2 -93
  54. airflow/providers/amazon/aws/utils/asset_compat_lineage_collector.py +106 -0
  55. airflow/providers/amazon/aws/utils/connection_wrapper.py +4 -164
  56. airflow/providers/amazon/aws/utils/mixins.py +1 -23
  57. airflow/providers/amazon/aws/utils/openlineage.py +3 -1
  58. airflow/providers/amazon/aws/utils/task_log_fetcher.py +1 -1
  59. airflow/providers/amazon/get_provider_info.py +13 -4
  60. {apache_airflow_providers_amazon-8.29.0.dist-info → apache_airflow_providers_amazon-9.0.0.dist-info}/METADATA +8 -9
  61. {apache_airflow_providers_amazon-8.29.0.dist-info → apache_airflow_providers_amazon-9.0.0.dist-info}/RECORD +64 -64
  62. airflow/providers/amazon/aws/auth_manager/cli/idc_commands.py +0 -149
  63. /airflow/providers/amazon/aws/{datasets → assets}/__init__.py +0 -0
  64. {apache_airflow_providers_amazon-8.29.0.dist-info → apache_airflow_providers_amazon-9.0.0.dist-info}/WHEEL +0 -0
  65. {apache_airflow_providers_amazon-8.29.0.dist-info → apache_airflow_providers_amazon-9.0.0.dist-info}/entry_points.txt +0 -0
@@ -56,13 +56,16 @@ class RedshiftDataOperator(AwsBaseOperator[RedshiftDataHook]):
56
56
  :param workgroup_name: name of the Redshift Serverless workgroup. Mutually exclusive with
57
57
  `cluster_identifier`. Specify this parameter to query Redshift Serverless. More info
58
58
  https://docs.aws.amazon.com/redshift/latest/mgmt/working-with-serverless.html
59
+ :param session_id: the session identifier of the query
60
+ :param session_keep_alive_seconds: duration in seconds to keep the session alive after the query
61
+ finishes. The maximum time a session can keep alive is 24 hours
59
62
  :param aws_conn_id: The Airflow connection used for AWS credentials.
60
63
  If this is ``None`` or empty then the default boto3 behaviour is used. If
61
64
  running Airflow in a distributed manner and aws_conn_id is None or
62
65
  empty, then default boto3 configuration would be used (and must be
63
66
  maintained on each worker node).
64
67
  :param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
65
- :param verify: Whether or not to verify SSL certificates. See:
68
+ :param verify: Whether to verify SSL certificates. See:
66
69
  https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
67
70
  :param botocore_config: Configuration dictionary (key-values) for botocore client. See:
68
71
  https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
@@ -77,6 +80,7 @@ class RedshiftDataOperator(AwsBaseOperator[RedshiftDataHook]):
77
80
  "parameters",
78
81
  "statement_name",
79
82
  "workgroup_name",
83
+ "session_id",
80
84
  )
81
85
  template_ext = (".sql",)
82
86
  template_fields_renderers = {"sql": "sql"}
@@ -84,8 +88,8 @@ class RedshiftDataOperator(AwsBaseOperator[RedshiftDataHook]):
84
88
 
85
89
  def __init__(
86
90
  self,
87
- database: str,
88
91
  sql: str | list,
92
+ database: str | None = None,
89
93
  cluster_identifier: str | None = None,
90
94
  db_user: str | None = None,
91
95
  parameters: list | None = None,
@@ -97,6 +101,8 @@ class RedshiftDataOperator(AwsBaseOperator[RedshiftDataHook]):
97
101
  return_sql_result: bool = False,
98
102
  workgroup_name: str | None = None,
99
103
  deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
104
+ session_id: str | None = None,
105
+ session_keep_alive_seconds: int | None = None,
100
106
  **kwargs,
101
107
  ) -> None:
102
108
  super().__init__(**kwargs)
@@ -120,6 +126,8 @@ class RedshiftDataOperator(AwsBaseOperator[RedshiftDataHook]):
120
126
  self.return_sql_result = return_sql_result
121
127
  self.statement_id: str | None = None
122
128
  self.deferrable = deferrable
129
+ self.session_id = session_id
130
+ self.session_keep_alive_seconds = session_keep_alive_seconds
123
131
 
124
132
  def execute(self, context: Context) -> GetStatementResultResponseTypeDef | str:
125
133
  """Execute a statement against Amazon Redshift."""
@@ -130,7 +138,7 @@ class RedshiftDataOperator(AwsBaseOperator[RedshiftDataHook]):
130
138
  if self.deferrable:
131
139
  wait_for_completion = False
132
140
 
133
- self.statement_id = self.hook.execute_query(
141
+ query_execution_output = self.hook.execute_query(
134
142
  database=self.database,
135
143
  sql=self.sql,
136
144
  cluster_identifier=self.cluster_identifier,
@@ -142,8 +150,15 @@ class RedshiftDataOperator(AwsBaseOperator[RedshiftDataHook]):
142
150
  with_event=self.with_event,
143
151
  wait_for_completion=wait_for_completion,
144
152
  poll_interval=self.poll_interval,
153
+ session_id=self.session_id,
154
+ session_keep_alive_seconds=self.session_keep_alive_seconds,
145
155
  )
146
156
 
157
+ self.statement_id = query_execution_output.statement_id
158
+
159
+ if query_execution_output.session_id:
160
+ self.xcom_push(context, key="session_id", value=query_execution_output.session_id)
161
+
147
162
  if self.deferrable and self.wait_for_completion:
148
163
  is_finished = self.hook.check_query_is_finished(self.statement_id)
149
164
  if not is_finished:
@@ -24,6 +24,9 @@ import sys
24
24
  from tempfile import NamedTemporaryFile
25
25
  from typing import TYPE_CHECKING, Sequence
26
26
 
27
+ import pytz
28
+ from dateutil import parser
29
+
27
30
  from airflow.exceptions import AirflowException
28
31
  from airflow.models import BaseOperator
29
32
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
@@ -498,8 +501,8 @@ class S3DeleteObjectsOperator(BaseOperator):
498
501
  bucket: str,
499
502
  keys: str | list | None = None,
500
503
  prefix: str | None = None,
501
- from_datetime: datetime | None = None,
502
- to_datetime: datetime | None = None,
504
+ from_datetime: datetime | str | None = None,
505
+ to_datetime: datetime | str | None = None,
503
506
  aws_conn_id: str | None = "aws_default",
504
507
  verify: str | bool | None = None,
505
508
  **kwargs,
@@ -530,6 +533,13 @@ class S3DeleteObjectsOperator(BaseOperator):
530
533
 
531
534
  if isinstance(self.keys, (list, str)) and not self.keys:
532
535
  return
536
+ # handle case where dates are strings, specifically when sent as template fields and macros.
537
+ if isinstance(self.to_datetime, str):
538
+ self.to_datetime = parser.parse(self.to_datetime).replace(tzinfo=pytz.UTC)
539
+
540
+ if isinstance(self.from_datetime, str):
541
+ self.from_datetime = parser.parse(self.from_datetime).replace(tzinfo=pytz.UTC)
542
+
533
543
  s3_hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
534
544
 
535
545
  keys = self.keys or s3_hook.list_keys(
@@ -19,14 +19,13 @@ from __future__ import annotations
19
19
  import datetime
20
20
  import json
21
21
  import time
22
- import warnings
23
22
  from functools import cached_property
24
23
  from typing import TYPE_CHECKING, Any, Callable, Sequence
25
24
 
26
25
  from botocore.exceptions import ClientError
27
26
 
28
27
  from airflow.configuration import conf
29
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
28
+ from airflow.exceptions import AirflowException
30
29
  from airflow.models import BaseOperator
31
30
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
32
31
  from airflow.providers.amazon.aws.hooks.sagemaker import (
@@ -239,7 +238,7 @@ class SageMakerProcessingOperator(SageMakerBaseOperator):
239
238
  doesn't finish within max_ingestion_time seconds. If you set this parameter to None,
240
239
  the operation does not timeout.
241
240
  :param action_if_job_exists: Behaviour if the job name already exists. Possible options are "timestamp"
242
- (default), "increment" (deprecated) and "fail".
241
+ (default) and "fail".
243
242
  :param deferrable: Run operator in the deferrable mode. This is only effective if wait_for_completion is
244
243
  set to True.
245
244
  :return Dict: Returns The ARN of the processing job created in Amazon SageMaker.
@@ -260,18 +259,11 @@ class SageMakerProcessingOperator(SageMakerBaseOperator):
260
259
  **kwargs,
261
260
  ):
262
261
  super().__init__(config=config, aws_conn_id=aws_conn_id, **kwargs)
263
- if action_if_job_exists not in ("increment", "fail", "timestamp"):
262
+ if action_if_job_exists not in ("fail", "timestamp"):
264
263
  raise AirflowException(
265
- f"Argument action_if_job_exists accepts only 'timestamp', 'increment' and 'fail'. \
264
+ f"Argument action_if_job_exists accepts only 'timestamp' and 'fail'. \
266
265
  Provided value: '{action_if_job_exists}'."
267
266
  )
268
- if action_if_job_exists == "increment":
269
- warnings.warn(
270
- "Action 'increment' on job name conflict has been deprecated for performance reasons."
271
- "The alternative to 'fail' is now 'timestamp'.",
272
- AirflowProviderDeprecationWarning,
273
- stacklevel=2,
274
- )
275
267
  self.action_if_job_exists = action_if_job_exists
276
268
  self.wait_for_completion = wait_for_completion
277
269
  self.print_log = print_log
@@ -657,7 +649,7 @@ class SageMakerTransformOperator(SageMakerBaseOperator):
657
649
  :param check_if_job_exists: If set to true, then the operator will check whether a transform job
658
650
  already exists for the name in the config.
659
651
  :param action_if_job_exists: Behaviour if the job name already exists. Possible options are "timestamp"
660
- (default), "increment" (deprecated) and "fail".
652
+ (default) and "fail".
661
653
  This is only relevant if check_if_job_exists is True.
662
654
  :return Dict: Returns The ARN of the model created in Amazon SageMaker.
663
655
  """
@@ -684,18 +676,11 @@ class SageMakerTransformOperator(SageMakerBaseOperator):
684
676
  self.max_attempts = max_attempts or 60
685
677
  self.max_ingestion_time = max_ingestion_time
686
678
  self.check_if_job_exists = check_if_job_exists
687
- if action_if_job_exists in ("increment", "fail", "timestamp"):
688
- if action_if_job_exists == "increment":
689
- warnings.warn(
690
- "Action 'increment' on job name conflict has been deprecated for performance reasons."
691
- "The alternative to 'fail' is now 'timestamp'.",
692
- AirflowProviderDeprecationWarning,
693
- stacklevel=2,
694
- )
679
+ if action_if_job_exists in ("fail", "timestamp"):
695
680
  self.action_if_job_exists = action_if_job_exists
696
681
  else:
697
682
  raise AirflowException(
698
- f"Argument action_if_job_exists accepts only 'timestamp', 'increment' and 'fail'. \
683
+ f"Argument action_if_job_exists accepts only 'timestamp' and 'fail'. \
699
684
  Provided value: '{action_if_job_exists}'."
700
685
  )
701
686
  self.check_if_model_exists = check_if_model_exists
@@ -1064,7 +1049,7 @@ class SageMakerTrainingOperator(SageMakerBaseOperator):
1064
1049
  :param check_if_job_exists: If set to true, then the operator will check whether a training job
1065
1050
  already exists for the name in the config.
1066
1051
  :param action_if_job_exists: Behaviour if the job name already exists. Possible options are "timestamp"
1067
- (default), "increment" (deprecated) and "fail".
1052
+ (default) and "fail".
1068
1053
  This is only relevant if check_if_job_exists is True.
1069
1054
  :param deferrable: Run operator in the deferrable mode. This is only effective if wait_for_completion is
1070
1055
  set to True.
@@ -1093,18 +1078,11 @@ class SageMakerTrainingOperator(SageMakerBaseOperator):
1093
1078
  self.max_attempts = max_attempts or 60
1094
1079
  self.max_ingestion_time = max_ingestion_time
1095
1080
  self.check_if_job_exists = check_if_job_exists
1096
- if action_if_job_exists in {"timestamp", "increment", "fail"}:
1097
- if action_if_job_exists == "increment":
1098
- warnings.warn(
1099
- "Action 'increment' on job name conflict has been deprecated for performance reasons."
1100
- "The alternative to 'fail' is now 'timestamp'.",
1101
- AirflowProviderDeprecationWarning,
1102
- stacklevel=2,
1103
- )
1081
+ if action_if_job_exists in {"timestamp", "fail"}:
1104
1082
  self.action_if_job_exists = action_if_job_exists
1105
1083
  else:
1106
1084
  raise AirflowException(
1107
- f"Argument action_if_job_exists accepts only 'timestamp', 'increment' and 'fail'. \
1085
+ f"Argument action_if_job_exists accepts only 'timestamp' and 'fail'. \
1108
1086
  Provided value: '{action_if_job_exists}'."
1109
1087
  )
1110
1088
  self.deferrable = deferrable
@@ -21,12 +21,9 @@ from __future__ import annotations
21
21
 
22
22
  import json
23
23
  import re
24
- import warnings
25
24
  from functools import cached_property
26
25
  from typing import Any
27
- from urllib.parse import unquote
28
26
 
29
- from airflow.exceptions import AirflowProviderDeprecationWarning
30
27
  from airflow.providers.amazon.aws.utils import trim_none_values
31
28
  from airflow.secrets import BaseSecretsBackend
32
29
  from airflow.utils.log.logging_mixin import LoggingMixin
@@ -145,28 +142,7 @@ class SecretsManagerBackend(BaseSecretsBackend, LoggingMixin):
145
142
  self.variables_lookup_pattern = variables_lookup_pattern
146
143
  self.config_lookup_pattern = config_lookup_pattern
147
144
  self.sep = sep
148
-
149
- if kwargs.pop("full_url_mode", None) is not None:
150
- warnings.warn(
151
- "The `full_url_mode` kwarg is deprecated. Going forward, the `SecretsManagerBackend`"
152
- " will support both URL-encoded and JSON-encoded secrets at the same time. The encoding"
153
- " of the secret will be determined automatically.",
154
- AirflowProviderDeprecationWarning,
155
- stacklevel=2,
156
- )
157
-
158
- if kwargs.get("are_secret_values_urlencoded") is not None:
159
- warnings.warn(
160
- "The `secret_values_are_urlencoded` is deprecated. This kwarg only exists to assist in"
161
- " migrating away from URL-encoding secret values for JSON secrets."
162
- " To remove this warning, make sure your JSON secrets are *NOT* URL-encoded, and then"
163
- " remove this kwarg from backend_kwargs.",
164
- AirflowProviderDeprecationWarning,
165
- stacklevel=2,
166
- )
167
- self.are_secret_values_urlencoded = kwargs.pop("are_secret_values_urlencoded", None)
168
- else:
169
- self.are_secret_values_urlencoded = False
145
+ self.are_secret_values_urlencoded = False
170
146
 
171
147
  self.extra_conn_words = extra_conn_words or {}
172
148
 
@@ -222,19 +198,6 @@ class SecretsManagerBackend(BaseSecretsBackend, LoggingMixin):
222
198
 
223
199
  return conn_d
224
200
 
225
- def _remove_escaping_in_secret_dict(self, secret: dict[str, Any]) -> dict[str, Any]:
226
- """Un-escape secret values that are URL-encoded."""
227
- for k, v in secret.copy().items():
228
- if k == "extra" and isinstance(v, dict):
229
- # The old behavior was that extras were _not_ urlencoded inside the secret.
230
- # So we should just allow the extra dict to remain as-is.
231
- continue
232
-
233
- elif v is not None:
234
- secret[k] = unquote(v)
235
-
236
- return secret
237
-
238
201
  def get_conn_value(self, conn_id: str) -> str | None:
239
202
  """
240
203
  Get serialized representation of Connection.
@@ -259,8 +222,6 @@ class SecretsManagerBackend(BaseSecretsBackend, LoggingMixin):
259
222
 
260
223
  secret_dict = json.loads(secret)
261
224
  standardized_secret_dict = self._standardize_secret_keys(secret_dict)
262
- if self.are_secret_values_urlencoded:
263
- standardized_secret_dict = self._remove_escaping_in_secret_dict(standardized_secret_dict)
264
225
  standardized_secret = json.dumps(standardized_secret_dict)
265
226
  return standardized_secret
266
227
  else:
@@ -20,10 +20,8 @@ from datetime import timedelta
20
20
  from functools import cached_property
21
21
  from typing import TYPE_CHECKING, Any, Sequence
22
22
 
23
- from deprecated import deprecated
24
-
25
23
  from airflow.configuration import conf
26
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
24
+ from airflow.exceptions import AirflowException
27
25
  from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook
28
26
  from airflow.providers.amazon.aws.triggers.batch import BatchJobTrigger
29
27
  from airflow.sensors.base import BaseSensorOperator
@@ -120,11 +118,6 @@ class BatchSensor(BaseSensorOperator):
120
118
  job_id = event["job_id"]
121
119
  self.log.info("Batch Job %s complete", job_id)
122
120
 
123
- @deprecated(reason="use `hook` property instead.", category=AirflowProviderDeprecationWarning)
124
- def get_hook(self) -> BatchClientHook:
125
- """Create and return a BatchClientHook."""
126
- return self.hook
127
-
128
121
  @cached_property
129
122
  def hook(self) -> BatchClientHook:
130
123
  return BatchClientHook(
@@ -19,9 +19,7 @@ from __future__ import annotations
19
19
 
20
20
  from typing import TYPE_CHECKING, Iterable, Sequence
21
21
 
22
- from deprecated import deprecated
23
-
24
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
22
+ from airflow.exceptions import AirflowException
25
23
  from airflow.providers.amazon.aws.hooks.dms import DmsHook
26
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
25
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
@@ -68,11 +66,6 @@ class DmsTaskBaseSensor(AwsBaseSensor[DmsHook]):
68
66
  self.target_statuses: Iterable[str] = target_statuses or []
69
67
  self.termination_statuses: Iterable[str] = termination_statuses or []
70
68
 
71
- @deprecated(reason="use `hook` property instead.", category=AirflowProviderDeprecationWarning)
72
- def get_hook(self) -> DmsHook:
73
- """Get DmsHook."""
74
- return self.hook
75
-
76
69
  def poke(self, context: Context):
77
70
  if not (status := self.hook.get_task_status(self.replication_task_arn)):
78
71
  raise AirflowException(
@@ -18,6 +18,8 @@ from __future__ import annotations
18
18
 
19
19
  from typing import TYPE_CHECKING, Any, Iterable, Sequence
20
20
 
21
+ from botocore.exceptions import ClientError
22
+
21
23
  from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
22
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
23
25
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
@@ -102,14 +104,26 @@ class DynamoDBValueSensor(AwsBaseSensor[DynamoDBHook]):
102
104
  table = self.hook.conn.Table(self.table_name)
103
105
  self.log.info("Table: %s", table)
104
106
  self.log.info("Key: %s", key)
105
- response = table.get_item(Key=key)
107
+
106
108
  try:
107
- item_attribute_value = response["Item"][self.attribute_name]
108
- self.log.info("Response: %s", response)
109
- self.log.info("Want: %s = %s", self.attribute_name, self.attribute_value)
110
- self.log.info("Got: {response['Item'][self.attribute_name]} = %s", item_attribute_value)
111
- return item_attribute_value in (
112
- [self.attribute_value] if isinstance(self.attribute_value, str) else self.attribute_value
109
+ response = table.get_item(Key=key)
110
+ except ClientError as err:
111
+ self.log.error(
112
+ "Couldn't get %s from table %s.\nError Code: %s\nError Message: %s",
113
+ key,
114
+ self.table_name,
115
+ err.response["Error"]["Code"],
116
+ err.response["Error"]["Message"],
113
117
  )
114
- except KeyError:
115
118
  return False
119
+ else:
120
+ try:
121
+ item_attribute_value = response["Item"][self.attribute_name]
122
+ self.log.info("Response: %s", response)
123
+ self.log.info("Want: %s = %s", self.attribute_name, self.attribute_value)
124
+ self.log.info("Got: {response['Item'][self.attribute_name]} = %s", item_attribute_value)
125
+ return item_attribute_value in (
126
+ [self.attribute_value] if isinstance(self.attribute_value, str) else self.attribute_value
127
+ )
128
+ except KeyError:
129
+ return False
@@ -21,12 +21,9 @@ from datetime import timedelta
21
21
  from functools import cached_property
22
22
  from typing import TYPE_CHECKING, Any, Iterable, Sequence
23
23
 
24
- from deprecated import deprecated
25
-
26
24
  from airflow.configuration import conf
27
25
  from airflow.exceptions import (
28
26
  AirflowException,
29
- AirflowProviderDeprecationWarning,
30
27
  )
31
28
  from airflow.providers.amazon.aws.hooks.emr import EmrContainerHook, EmrHook, EmrServerlessHook
32
29
  from airflow.providers.amazon.aws.links.emr import EmrClusterLink, EmrLogsLink, get_log_uri
@@ -68,10 +65,6 @@ class EmrBaseSensor(BaseSensorOperator):
68
65
  self.target_states: Iterable[str] = [] # will be set in subclasses
69
66
  self.failed_states: Iterable[str] = [] # will be set in subclasses
70
67
 
71
- @deprecated(reason="use `hook` property instead.", category=AirflowProviderDeprecationWarning)
72
- def get_hook(self) -> EmrHook:
73
- return self.hook
74
-
75
68
  @cached_property
76
69
  def hook(self) -> EmrHook:
77
70
  return EmrHook(aws_conn_id=self.aws_conn_id)
@@ -20,10 +20,8 @@ from __future__ import annotations
20
20
  from datetime import timedelta
21
21
  from typing import TYPE_CHECKING, Any, Sequence
22
22
 
23
- from deprecated import deprecated
24
-
25
23
  from airflow.configuration import conf
26
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
24
+ from airflow.exceptions import AirflowException
27
25
  from airflow.providers.amazon.aws.hooks.glue_catalog import GlueCatalogHook
28
26
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
29
27
  from airflow.providers.amazon.aws.triggers.glue import GlueCatalogPartitionTrigger
@@ -129,8 +127,3 @@ class GlueCatalogPartitionSensor(AwsBaseSensor[GlueCatalogHook]):
129
127
  if event["status"] != "success":
130
128
  raise AirflowException(f"Trigger error: event is {event}")
131
129
  self.log.info("Partition exists in the Glue Catalog")
132
-
133
- @deprecated(reason="use `hook` property instead.", category=AirflowProviderDeprecationWarning)
134
- def get_hook(self) -> GlueCatalogHook:
135
- """Get the GlueCatalogHook."""
136
- return self.hook
@@ -19,9 +19,7 @@ from __future__ import annotations
19
19
 
20
20
  from typing import TYPE_CHECKING, Sequence
21
21
 
22
- from deprecated import deprecated
23
-
24
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
22
+ from airflow.exceptions import AirflowException
25
23
  from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook
26
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
25
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
@@ -78,8 +76,3 @@ class GlueCrawlerSensor(AwsBaseSensor[GlueCrawlerHook]):
78
76
  raise AirflowException(f"Status: {crawler_status}")
79
77
  else:
80
78
  return False
81
-
82
- @deprecated(reason="use `hook` property instead.", category=AirflowProviderDeprecationWarning)
83
- def get_hook(self) -> GlueCrawlerHook:
84
- """Return a new or pre-existing GlueCrawlerHook."""
85
- return self.hook
@@ -17,12 +17,9 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- from functools import cached_property
21
20
  from typing import TYPE_CHECKING, Sequence
22
21
 
23
- from deprecated import deprecated
24
-
25
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
22
+ from airflow.exceptions import AirflowException
26
23
  from airflow.providers.amazon.aws.hooks.quicksight import QuickSightHook
27
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
28
25
 
@@ -76,28 +73,3 @@ class QuickSightSensor(AwsBaseSensor[QuickSightHook]):
76
73
  error = self.hook.get_error_info(None, self.data_set_id, self.ingestion_id)
77
74
  raise AirflowException(f"The QuickSight Ingestion failed. Error info: {error}")
78
75
  return quicksight_ingestion_state == self.success_status
79
-
80
- @cached_property
81
- @deprecated(
82
- reason=(
83
- "`QuickSightSensor.quicksight_hook` property is deprecated, "
84
- "please use `QuickSightSensor.hook` property instead."
85
- ),
86
- category=AirflowProviderDeprecationWarning,
87
- )
88
- def quicksight_hook(self):
89
- return self.hook
90
-
91
- @cached_property
92
- @deprecated(
93
- reason=(
94
- "`QuickSightSensor.sts_hook` property is deprecated and will be removed in the future. "
95
- "This property used for obtain AWS Account ID, "
96
- "please consider to use `QuickSightSensor.hook.account_id` instead"
97
- ),
98
- category=AirflowProviderDeprecationWarning,
99
- )
100
- def sts_hook(self):
101
- from airflow.providers.amazon.aws.hooks.sts import StsHook
102
-
103
- return StsHook(aws_conn_id=self.aws_conn_id)
@@ -20,10 +20,8 @@ from datetime import timedelta
20
20
  from functools import cached_property
21
21
  from typing import TYPE_CHECKING, Any, Sequence
22
22
 
23
- from deprecated import deprecated
24
-
25
23
  from airflow.configuration import conf
26
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
24
+ from airflow.exceptions import AirflowException
27
25
  from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftHook
28
26
  from airflow.providers.amazon.aws.triggers.redshift_cluster import RedshiftClusterTrigger
29
27
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
@@ -98,11 +96,6 @@ class RedshiftClusterSensor(BaseSensorOperator):
98
96
  self.log.info("%s completed successfully.", self.task_id)
99
97
  self.log.info("Cluster Identifier %s is in %s state", self.cluster_identifier, self.target_status)
100
98
 
101
- @deprecated(reason="use `hook` property instead.", category=AirflowProviderDeprecationWarning)
102
- def get_hook(self) -> RedshiftHook:
103
- """Create and return a RedshiftHook."""
104
- return self.hook
105
-
106
99
  @cached_property
107
100
  def hook(self) -> RedshiftHook:
108
101
  return RedshiftHook(aws_conn_id=self.aws_conn_id)
@@ -25,15 +25,13 @@ from datetime import datetime, timedelta
25
25
  from functools import cached_property
26
26
  from typing import TYPE_CHECKING, Any, Callable, Sequence, cast
27
27
 
28
- from deprecated import deprecated
29
-
30
28
  from airflow.configuration import conf
31
29
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
32
30
 
33
31
  if TYPE_CHECKING:
34
32
  from airflow.utils.context import Context
35
33
 
36
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
34
+ from airflow.exceptions import AirflowException
37
35
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
38
36
  from airflow.providers.amazon.aws.triggers.s3 import S3KeysUnchangedTrigger, S3KeyTrigger
39
37
  from airflow.sensors.base import BaseSensorOperator, poke_mode_only
@@ -221,11 +219,6 @@ class S3KeySensor(BaseSensorOperator):
221
219
  elif event["status"] == "error":
222
220
  raise AirflowException(event["message"])
223
221
 
224
- @deprecated(reason="use `hook` property instead.", category=AirflowProviderDeprecationWarning)
225
- def get_hook(self) -> S3Hook:
226
- """Create and return an S3Hook."""
227
- return self.hook
228
-
229
222
  @cached_property
230
223
  def hook(self) -> S3Hook:
231
224
  return S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
@@ -20,9 +20,7 @@ import time
20
20
  from functools import cached_property
21
21
  from typing import TYPE_CHECKING, Sequence
22
22
 
23
- from deprecated import deprecated
24
-
25
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
23
+ from airflow.exceptions import AirflowException
26
24
  from airflow.providers.amazon.aws.hooks.sagemaker import LogState, SageMakerHook
27
25
  from airflow.sensors.base import BaseSensorOperator
28
26
 
@@ -45,11 +43,6 @@ class SageMakerBaseSensor(BaseSensorOperator):
45
43
  self.aws_conn_id = aws_conn_id
46
44
  self.resource_type = resource_type # only used for logs, to say what kind of resource we are sensing
47
45
 
48
- @deprecated(reason="use `hook` property instead.", category=AirflowProviderDeprecationWarning)
49
- def get_hook(self) -> SageMakerHook:
50
- """Get SageMakerHook."""
51
- return self.hook
52
-
53
46
  @cached_property
54
47
  def hook(self) -> SageMakerHook:
55
48
  return SageMakerHook(aws_conn_id=self.aws_conn_id)
@@ -238,7 +231,7 @@ class SageMakerTrainingSensor(SageMakerBaseSensor):
238
231
  return SageMakerHook.non_terminal_states
239
232
 
240
233
  def failed_states(self):
241
- return SageMakerHook.failed_states
234
+ return SageMakerHook.training_failed_states
242
235
 
243
236
  def get_sagemaker_response(self):
244
237
  if self.print_log:
@@ -22,10 +22,8 @@ from __future__ import annotations
22
22
  from datetime import timedelta
23
23
  from typing import TYPE_CHECKING, Any, Collection, Sequence
24
24
 
25
- from deprecated import deprecated
26
-
27
25
  from airflow.configuration import conf
28
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
26
+ from airflow.exceptions import AirflowException
29
27
  from airflow.providers.amazon.aws.hooks.sqs import SqsHook
30
28
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
31
29
  from airflow.providers.amazon.aws.triggers.sqs import SqsSensorTrigger
@@ -223,8 +221,3 @@ class SqsSensor(AwsBaseSensor[SqsHook]):
223
221
  return True
224
222
  else:
225
223
  return False
226
-
227
- @deprecated(reason="use `hook` property instead.", category=AirflowProviderDeprecationWarning)
228
- def get_hook(self) -> SqsHook:
229
- """Create and return an SqsHook."""
230
- return self.hook
@@ -19,9 +19,7 @@ from __future__ import annotations
19
19
  import json
20
20
  from typing import TYPE_CHECKING, Sequence
21
21
 
22
- from deprecated import deprecated
23
-
24
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
22
+ from airflow.exceptions import AirflowException
25
23
  from airflow.providers.amazon.aws.hooks.step_function import StepFunctionHook
26
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
25
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
@@ -84,8 +82,3 @@ class StepFunctionExecutionSensor(AwsBaseSensor[StepFunctionHook]):
84
82
  self.log.info("Doing xcom_push of output")
85
83
  self.xcom_push(context, "output", output)
86
84
  return True
87
-
88
- @deprecated(reason="use `hook` property instead.", category=AirflowProviderDeprecationWarning)
89
- def get_hook(self) -> StepFunctionHook:
90
- """Create and return a StepFunctionHook."""
91
- return self.hook