apache-airflow-providers-amazon 9.15.0__py3-none-any.whl → 9.18.1rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (138) hide show
  1. airflow/providers/amazon/__init__.py +3 -3
  2. airflow/providers/amazon/aws/auth_manager/avp/facade.py +1 -1
  3. airflow/providers/amazon/aws/auth_manager/routes/login.py +7 -1
  4. airflow/providers/amazon/aws/bundles/s3.py +1 -1
  5. airflow/providers/amazon/aws/exceptions.py +1 -1
  6. airflow/providers/amazon/aws/executors/aws_lambda/docker/app.py +5 -1
  7. airflow/providers/amazon/aws/executors/aws_lambda/lambda_executor.py +1 -2
  8. airflow/providers/amazon/aws/executors/batch/batch_executor.py +1 -2
  9. airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +1 -2
  10. airflow/providers/amazon/aws/hooks/athena.py +7 -3
  11. airflow/providers/amazon/aws/hooks/athena_sql.py +3 -3
  12. airflow/providers/amazon/aws/hooks/base_aws.py +3 -7
  13. airflow/providers/amazon/aws/hooks/batch_client.py +5 -7
  14. airflow/providers/amazon/aws/hooks/batch_waiters.py +1 -2
  15. airflow/providers/amazon/aws/hooks/chime.py +2 -2
  16. airflow/providers/amazon/aws/hooks/comprehend.py +1 -1
  17. airflow/providers/amazon/aws/hooks/datasync.py +3 -3
  18. airflow/providers/amazon/aws/hooks/dynamodb.py +1 -1
  19. airflow/providers/amazon/aws/hooks/ec2.py +1 -1
  20. airflow/providers/amazon/aws/hooks/elasticache_replication_group.py +1 -1
  21. airflow/providers/amazon/aws/hooks/emr.py +1 -1
  22. airflow/providers/amazon/aws/hooks/firehose.py +56 -0
  23. airflow/providers/amazon/aws/hooks/glue.py +9 -2
  24. airflow/providers/amazon/aws/hooks/glue_catalog.py +1 -1
  25. airflow/providers/amazon/aws/hooks/kinesis.py +31 -13
  26. airflow/providers/amazon/aws/hooks/logs.py +10 -2
  27. airflow/providers/amazon/aws/hooks/mwaa.py +38 -7
  28. airflow/providers/amazon/aws/hooks/quicksight.py +1 -1
  29. airflow/providers/amazon/aws/hooks/rds.py +1 -1
  30. airflow/providers/amazon/aws/hooks/redshift_sql.py +31 -8
  31. airflow/providers/amazon/aws/hooks/s3.py +14 -6
  32. airflow/providers/amazon/aws/hooks/sagemaker.py +1 -1
  33. airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py +1 -2
  34. airflow/providers/amazon/aws/hooks/ssm.py +34 -6
  35. airflow/providers/amazon/aws/hooks/step_function.py +1 -1
  36. airflow/providers/amazon/aws/links/base_aws.py +1 -1
  37. airflow/providers/amazon/aws/links/emr.py +1 -1
  38. airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +50 -20
  39. airflow/providers/amazon/aws/operators/appflow.py +1 -1
  40. airflow/providers/amazon/aws/operators/athena.py +1 -1
  41. airflow/providers/amazon/aws/operators/base_aws.py +2 -2
  42. airflow/providers/amazon/aws/operators/batch.py +1 -1
  43. airflow/providers/amazon/aws/operators/bedrock.py +3 -1
  44. airflow/providers/amazon/aws/operators/cloud_formation.py +2 -2
  45. airflow/providers/amazon/aws/operators/comprehend.py +1 -1
  46. airflow/providers/amazon/aws/operators/datasync.py +1 -1
  47. airflow/providers/amazon/aws/operators/dms.py +1 -1
  48. airflow/providers/amazon/aws/operators/ec2.py +1 -1
  49. airflow/providers/amazon/aws/operators/ecs.py +1 -1
  50. airflow/providers/amazon/aws/operators/eks.py +2 -1
  51. airflow/providers/amazon/aws/operators/emr.py +22 -12
  52. airflow/providers/amazon/aws/operators/eventbridge.py +1 -1
  53. airflow/providers/amazon/aws/operators/glue.py +1 -1
  54. airflow/providers/amazon/aws/operators/glue_crawler.py +1 -1
  55. airflow/providers/amazon/aws/operators/glue_databrew.py +1 -1
  56. airflow/providers/amazon/aws/operators/kinesis_analytics.py +1 -1
  57. airflow/providers/amazon/aws/operators/lambda_function.py +1 -1
  58. airflow/providers/amazon/aws/operators/mwaa.py +13 -4
  59. airflow/providers/amazon/aws/operators/neptune.py +1 -1
  60. airflow/providers/amazon/aws/operators/rds.py +1 -1
  61. airflow/providers/amazon/aws/operators/redshift_cluster.py +1 -1
  62. airflow/providers/amazon/aws/operators/redshift_data.py +1 -1
  63. airflow/providers/amazon/aws/operators/s3.py +1 -1
  64. airflow/providers/amazon/aws/operators/sagemaker.py +1 -1
  65. airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py +1 -2
  66. airflow/providers/amazon/aws/operators/ssm.py +122 -17
  67. airflow/providers/amazon/aws/operators/step_function.py +1 -1
  68. airflow/providers/amazon/aws/secrets/secrets_manager.py +3 -4
  69. airflow/providers/amazon/aws/sensors/athena.py +1 -1
  70. airflow/providers/amazon/aws/sensors/base_aws.py +2 -2
  71. airflow/providers/amazon/aws/sensors/batch.py +1 -1
  72. airflow/providers/amazon/aws/sensors/bedrock.py +1 -1
  73. airflow/providers/amazon/aws/sensors/comprehend.py +1 -1
  74. airflow/providers/amazon/aws/sensors/dms.py +1 -1
  75. airflow/providers/amazon/aws/sensors/ec2.py +1 -1
  76. airflow/providers/amazon/aws/sensors/ecs.py +1 -1
  77. airflow/providers/amazon/aws/sensors/eks.py +2 -1
  78. airflow/providers/amazon/aws/sensors/emr.py +1 -3
  79. airflow/providers/amazon/aws/sensors/glacier.py +1 -1
  80. airflow/providers/amazon/aws/sensors/glue.py +1 -1
  81. airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +1 -1
  82. airflow/providers/amazon/aws/sensors/glue_crawler.py +1 -1
  83. airflow/providers/amazon/aws/sensors/kinesis_analytics.py +1 -1
  84. airflow/providers/amazon/aws/sensors/lambda_function.py +1 -1
  85. airflow/providers/amazon/aws/sensors/mwaa.py +15 -2
  86. airflow/providers/amazon/aws/sensors/opensearch_serverless.py +1 -1
  87. airflow/providers/amazon/aws/sensors/quicksight.py +1 -1
  88. airflow/providers/amazon/aws/sensors/rds.py +1 -1
  89. airflow/providers/amazon/aws/sensors/redshift_cluster.py +1 -1
  90. airflow/providers/amazon/aws/sensors/s3.py +3 -3
  91. airflow/providers/amazon/aws/sensors/sagemaker.py +1 -1
  92. airflow/providers/amazon/aws/sensors/sagemaker_unified_studio.py +1 -2
  93. airflow/providers/amazon/aws/sensors/sqs.py +1 -1
  94. airflow/providers/amazon/aws/sensors/ssm.py +33 -17
  95. airflow/providers/amazon/aws/sensors/step_function.py +1 -1
  96. airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py +3 -3
  97. airflow/providers/amazon/aws/transfers/base.py +5 -5
  98. airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +4 -4
  99. airflow/providers/amazon/aws/transfers/exasol_to_s3.py +1 -1
  100. airflow/providers/amazon/aws/transfers/ftp_to_s3.py +1 -1
  101. airflow/providers/amazon/aws/transfers/gcs_to_s3.py +48 -6
  102. airflow/providers/amazon/aws/transfers/glacier_to_gcs.py +1 -1
  103. airflow/providers/amazon/aws/transfers/google_api_to_s3.py +2 -5
  104. airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py +1 -1
  105. airflow/providers/amazon/aws/transfers/http_to_s3.py +1 -1
  106. airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py +1 -1
  107. airflow/providers/amazon/aws/transfers/local_to_s3.py +1 -1
  108. airflow/providers/amazon/aws/transfers/mongo_to_s3.py +1 -1
  109. airflow/providers/amazon/aws/transfers/redshift_to_s3.py +6 -7
  110. airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py +1 -2
  111. airflow/providers/amazon/aws/transfers/s3_to_ftp.py +1 -1
  112. airflow/providers/amazon/aws/transfers/s3_to_redshift.py +6 -7
  113. airflow/providers/amazon/aws/transfers/s3_to_sftp.py +1 -1
  114. airflow/providers/amazon/aws/transfers/s3_to_sql.py +1 -2
  115. airflow/providers/amazon/aws/transfers/salesforce_to_s3.py +1 -1
  116. airflow/providers/amazon/aws/transfers/sftp_to_s3.py +1 -1
  117. airflow/providers/amazon/aws/transfers/sql_to_s3.py +8 -9
  118. airflow/providers/amazon/aws/triggers/bedrock.py +1 -1
  119. airflow/providers/amazon/aws/triggers/ecs.py +1 -1
  120. airflow/providers/amazon/aws/triggers/eks.py +1 -1
  121. airflow/providers/amazon/aws/triggers/s3.py +29 -2
  122. airflow/providers/amazon/aws/triggers/sagemaker.py +1 -1
  123. airflow/providers/amazon/aws/triggers/sqs.py +1 -1
  124. airflow/providers/amazon/aws/triggers/ssm.py +17 -1
  125. airflow/providers/amazon/aws/utils/__init__.py +1 -1
  126. airflow/providers/amazon/aws/utils/connection_wrapper.py +3 -6
  127. airflow/providers/amazon/aws/utils/mixins.py +1 -1
  128. airflow/providers/amazon/aws/utils/waiter.py +3 -3
  129. airflow/providers/amazon/aws/utils/waiter_with_logging.py +1 -1
  130. airflow/providers/amazon/aws/waiters/emr.json +6 -6
  131. airflow/providers/amazon/get_provider_info.py +19 -1
  132. airflow/providers/amazon/version_compat.py +19 -16
  133. {apache_airflow_providers_amazon-9.15.0.dist-info → apache_airflow_providers_amazon-9.18.1rc1.dist-info}/METADATA +29 -19
  134. {apache_airflow_providers_amazon-9.15.0.dist-info → apache_airflow_providers_amazon-9.18.1rc1.dist-info}/RECORD +138 -136
  135. apache_airflow_providers_amazon-9.18.1rc1.dist-info/licenses/NOTICE +5 -0
  136. {apache_airflow_providers_amazon-9.15.0.dist-info → apache_airflow_providers_amazon-9.18.1rc1.dist-info}/WHEEL +0 -0
  137. {apache_airflow_providers_amazon-9.15.0.dist-info → apache_airflow_providers_amazon-9.18.1rc1.dist-info}/entry_points.txt +0 -0
  138. {airflow/providers/amazon → apache_airflow_providers_amazon-9.18.1rc1.dist-info/licenses}/LICENSE +0 -0
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "9.15.0"
32
+ __version__ = "9.18.1"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.10.0"
35
+ "2.11.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-amazon:{__version__}` needs Apache Airflow 2.10.0+"
38
+ f"The package `apache-airflow-providers-amazon:{__version__}` needs Apache Airflow 2.11.0+"
39
39
  )
@@ -23,7 +23,6 @@ from pathlib import Path
23
23
  from typing import TYPE_CHECKING, TypedDict
24
24
 
25
25
  from airflow.configuration import conf
26
- from airflow.exceptions import AirflowException
27
26
  from airflow.providers.amazon.aws.auth_manager.avp.entities import AvpEntities, get_action_id, get_entity_type
28
27
  from airflow.providers.amazon.aws.auth_manager.constants import (
29
28
  CONF_AVP_POLICY_STORE_ID_KEY,
@@ -32,6 +31,7 @@ from airflow.providers.amazon.aws.auth_manager.constants import (
32
31
  CONF_SECTION_NAME,
33
32
  )
34
33
  from airflow.providers.amazon.aws.hooks.verified_permissions import VerifiedPermissionsHook
34
+ from airflow.providers.common.compat.sdk import AirflowException
35
35
  from airflow.utils.helpers import prune_dict
36
36
  from airflow.utils.log.logging_mixin import LoggingMixin
37
37
 
@@ -35,6 +35,7 @@ from airflow.configuration import conf
35
35
  from airflow.providers.amazon.aws.auth_manager.constants import CONF_SAML_METADATA_URL_KEY, CONF_SECTION_NAME
36
36
  from airflow.providers.amazon.aws.auth_manager.datamodels.login import LoginResponse
37
37
  from airflow.providers.amazon.aws.auth_manager.user import AwsAuthManagerUser
38
+ from airflow.providers.amazon.version_compat import AIRFLOW_V_3_1_1_PLUS
38
39
 
39
40
  try:
40
41
  from onelogin.saml2.auth import OneLogin_Saml2_Auth
@@ -101,7 +102,12 @@ def login_callback(request: Request):
101
102
  if relay_state == "login-redirect":
102
103
  response = RedirectResponse(url=url, status_code=303)
103
104
  secure = bool(conf.get("api", "ssl_cert", fallback=""))
104
- response.set_cookie(COOKIE_NAME_JWT_TOKEN, token, secure=secure)
105
+ # In Airflow 3.1.1 authentication changes, front-end no longer handle the token
106
+ # See https://github.com/apache/airflow/pull/55506
107
+ if AIRFLOW_V_3_1_1_PLUS:
108
+ response.set_cookie(COOKIE_NAME_JWT_TOKEN, token, secure=secure, httponly=True)
109
+ else:
110
+ response.set_cookie(COOKIE_NAME_JWT_TOKEN, token, secure=secure)
105
111
  return response
106
112
  if relay_state == "login-token":
107
113
  return LoginResponse(access_token=token)
@@ -22,9 +22,9 @@ from pathlib import Path
22
22
  import structlog
23
23
 
24
24
  from airflow.dag_processing.bundles.base import BaseDagBundle
25
- from airflow.exceptions import AirflowException
26
25
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
27
26
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
27
+ from airflow.providers.common.compat.sdk import AirflowException
28
28
 
29
29
 
30
30
  class S3DagBundle(BaseDagBundle):
@@ -17,7 +17,7 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- from airflow.exceptions import AirflowException
20
+ from airflow.providers.common.compat.sdk import AirflowException
21
21
 
22
22
  # Note: Any AirflowException raised is expected to cause the TaskInstance
23
23
  # to be marked in an ERROR state
@@ -66,7 +66,11 @@ def run_and_report(command, task_key):
66
66
  try:
67
67
  log.info("Starting execution for task: %s", task_key)
68
68
  result = subprocess.run(
69
- command, shell=isinstance(command, str), stdout=subprocess.PIPE, stderr=subprocess.STDOUT
69
+ command,
70
+ check=False,
71
+ shell=isinstance(command, str),
72
+ stdout=subprocess.PIPE,
73
+ stderr=subprocess.STDOUT,
70
74
  )
71
75
  return_code = result.returncode
72
76
  log.info("Execution completed for task %s with return code %s", task_key, return_code)
@@ -26,7 +26,6 @@ from boto3.session import NoCredentialsError
26
26
  from botocore.utils import ClientError
27
27
 
28
28
  from airflow.configuration import conf
29
- from airflow.exceptions import AirflowException
30
29
  from airflow.executors.base_executor import BaseExecutor
31
30
  from airflow.models.taskinstancekey import TaskInstanceKey
32
31
  from airflow.providers.amazon.aws.executors.aws_lambda.utils import (
@@ -42,7 +41,7 @@ from airflow.providers.amazon.aws.executors.utils.exponential_backoff_retry impo
42
41
  )
43
42
  from airflow.providers.amazon.aws.hooks.lambda_function import LambdaHook
44
43
  from airflow.providers.amazon.aws.hooks.sqs import SqsHook
45
- from airflow.stats import Stats
44
+ from airflow.providers.common.compat.sdk import AirflowException, Stats
46
45
 
47
46
  try:
48
47
  from airflow.sdk import timezone
@@ -29,7 +29,6 @@ from typing import TYPE_CHECKING, Any
29
29
  from botocore.exceptions import ClientError, NoCredentialsError
30
30
 
31
31
  from airflow.configuration import conf
32
- from airflow.exceptions import AirflowException
33
32
  from airflow.executors.base_executor import BaseExecutor
34
33
  from airflow.providers.amazon.aws.executors.utils.exponential_backoff_retry import (
35
34
  calculate_next_attempt_delay,
@@ -37,7 +36,7 @@ from airflow.providers.amazon.aws.executors.utils.exponential_backoff_retry impo
37
36
  )
38
37
  from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook
39
38
  from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
40
- from airflow.stats import Stats
39
+ from airflow.providers.common.compat.sdk import AirflowException, Stats
41
40
 
42
41
  try:
43
42
  from airflow.sdk import timezone
@@ -32,7 +32,6 @@ from typing import TYPE_CHECKING
32
32
 
33
33
  from botocore.exceptions import ClientError, NoCredentialsError
34
34
 
35
- from airflow.exceptions import AirflowException
36
35
  from airflow.executors.base_executor import BaseExecutor
37
36
  from airflow.providers.amazon.aws.executors.ecs.boto_schema import BotoDescribeTasksSchema, BotoRunTaskSchema
38
37
  from airflow.providers.amazon.aws.executors.ecs.utils import (
@@ -49,7 +48,7 @@ from airflow.providers.amazon.aws.executors.utils.exponential_backoff_retry impo
49
48
  )
50
49
  from airflow.providers.amazon.aws.hooks.ecs import EcsHook
51
50
  from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
52
- from airflow.stats import Stats
51
+ from airflow.providers.common.compat.sdk import AirflowException, Stats
53
52
 
54
53
  try:
55
54
  from airflow.sdk import timezone
@@ -28,9 +28,9 @@ from __future__ import annotations
28
28
  from collections.abc import Collection
29
29
  from typing import TYPE_CHECKING, Any
30
30
 
31
- from airflow.exceptions import AirflowException
32
31
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
33
32
  from airflow.providers.amazon.aws.utils.waiter_with_logging import wait
33
+ from airflow.providers.common.compat.sdk import AirflowException
34
34
 
35
35
  if TYPE_CHECKING:
36
36
  from botocore.paginate import PageIterator
@@ -40,11 +40,15 @@ MULTI_LINE_QUERY_LOG_PREFIX = "\n\t\t"
40
40
 
41
41
  def query_params_to_string(params: dict[str, str | Collection[str]]) -> str:
42
42
  result = ""
43
- for key, value in params.items():
43
+ for key, original_value in params.items():
44
+ value: str | Collection[str]
44
45
  if key == "QueryString":
45
46
  value = (
46
- MULTI_LINE_QUERY_LOG_PREFIX + str(value).replace("\n", MULTI_LINE_QUERY_LOG_PREFIX).rstrip()
47
+ MULTI_LINE_QUERY_LOG_PREFIX
48
+ + str(original_value).replace("\n", MULTI_LINE_QUERY_LOG_PREFIX).rstrip()
47
49
  )
50
+ else:
51
+ value = original_value
48
52
  result += f"\t{key}: {value}\n"
49
53
  return result.rstrip()
50
54
 
@@ -23,9 +23,9 @@ from typing import TYPE_CHECKING, Any
23
23
  import pyathena
24
24
  from sqlalchemy.engine.url import URL
25
25
 
26
- from airflow.exceptions import AirflowException, AirflowNotFoundException
27
26
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
28
27
  from airflow.providers.amazon.aws.utils.connection_wrapper import AwsConnectionWrapper
28
+ from airflow.providers.common.compat.sdk import AirflowException, AirflowNotFoundException
29
29
  from airflow.providers.common.sql.hooks.sql import DbApiHook
30
30
 
31
31
  if TYPE_CHECKING:
@@ -56,7 +56,7 @@ class AthenaSQLHook(AwsBaseHook, DbApiHook):
56
56
  :class:`~airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook`
57
57
 
58
58
  .. note::
59
- get_uri() depends on SQLAlchemy and PyAthena.
59
+ get_uri() depends on SQLAlchemy and PyAthena
60
60
  """
61
61
 
62
62
  conn_name_attr = "athena_conn_id"
@@ -163,7 +163,7 @@ class AthenaSQLHook(AwsBaseHook, DbApiHook):
163
163
  port=443,
164
164
  database=conn_params["schema_name"],
165
165
  query={"aws_session_token": creds.token, **self.conn.extra_dejson},
166
- )
166
+ ).render_as_string(hide_password=False)
167
167
 
168
168
  def get_conn(self) -> AthenaConnection:
169
169
  """Get a ``pyathena.Connection`` object."""
@@ -52,15 +52,11 @@ from dateutil.tz import tzlocal
52
52
  from slugify import slugify
53
53
 
54
54
  from airflow.configuration import conf
55
- from airflow.exceptions import (
56
- AirflowException,
57
- AirflowNotFoundException,
58
- AirflowProviderDeprecationWarning,
59
- )
55
+ from airflow.exceptions import AirflowProviderDeprecationWarning
60
56
  from airflow.providers.amazon.aws.utils.connection_wrapper import AwsConnectionWrapper
61
57
  from airflow.providers.amazon.aws.utils.identifiers import generate_uuid
62
58
  from airflow.providers.amazon.aws.utils.suppress import return_on_error
63
- from airflow.providers.amazon.version_compat import BaseHook
59
+ from airflow.providers.common.compat.sdk import AirflowException, AirflowNotFoundException, BaseHook
64
60
  from airflow.providers_manager import ProvidersManager
65
61
  from airflow.utils.helpers import exactly_one
66
62
  from airflow.utils.log.logging_mixin import LoggingMixin
@@ -790,7 +786,7 @@ class AwsGenericHook(BaseHook, Generic[BaseAwsConnection]):
790
786
  async def get_async_conn(self):
791
787
  """Get an aiobotocore client to use for async operations."""
792
788
  # We have to wrap the call `self.get_client_type` in another call `_get_async_conn`,
793
- # because one of it's arguments `self.region_name` is a `@property` decorated function
789
+ # because one of its arguments `self.region_name` is a `@property` decorated function
794
790
  # calling the cached property `self.conn_config` at the end.
795
791
  return await sync_to_async(self._get_async_conn)()
796
792
 
@@ -37,8 +37,8 @@ import botocore.client
37
37
  import botocore.exceptions
38
38
  import botocore.waiter
39
39
 
40
- from airflow.exceptions import AirflowException
41
40
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
41
+ from airflow.providers.common.compat.sdk import AirflowException
42
42
 
43
43
  if TYPE_CHECKING:
44
44
  from airflow.providers.amazon.aws.utils.task_log_fetcher import AwsTaskLogFetcher
@@ -386,8 +386,7 @@ class BatchClientHook(AwsBaseHook):
386
386
  )
387
387
  if job_status in match_status:
388
388
  return True
389
- else:
390
- raise AirflowException(f"AWS Batch job ({job_id}) status checks exceed max_retries")
389
+ raise AirflowException(f"AWS Batch job ({job_id}) status checks exceed max_retries")
391
390
 
392
391
  def get_job_description(self, job_id: str) -> dict:
393
392
  """
@@ -426,10 +425,9 @@ class BatchClientHook(AwsBaseHook):
426
425
  "check Amazon Provider AWS Connection documentation for more details.",
427
426
  str(err),
428
427
  )
429
- else:
430
- raise AirflowException(
431
- f"AWS Batch job ({job_id}) description error: exceeded status_retries ({self.status_retries})"
432
- )
428
+ raise AirflowException(
429
+ f"AWS Batch job ({job_id}) description error: exceeded status_retries ({self.status_retries})"
430
+ )
433
431
 
434
432
  @staticmethod
435
433
  def parse_job_description(job_id: str, response: dict) -> dict:
@@ -33,12 +33,11 @@ from copy import deepcopy
33
33
  from pathlib import Path
34
34
  from typing import TYPE_CHECKING, Any
35
35
 
36
- import botocore.client
37
36
  import botocore.exceptions
38
37
  import botocore.waiter
39
38
 
40
- from airflow.exceptions import AirflowException
41
39
  from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook
40
+ from airflow.providers.common.compat.sdk import AirflowException
42
41
 
43
42
  if TYPE_CHECKING:
44
43
  from airflow.providers.amazon.aws.utils.task_log_fetcher import AwsTaskLogFetcher
@@ -25,7 +25,7 @@ import re
25
25
  from functools import cached_property
26
26
  from typing import Any
27
27
 
28
- from airflow.exceptions import AirflowException
28
+ from airflow.providers.common.compat.sdk import AirflowException
29
29
  from airflow.providers.http.hooks.http import HttpHook
30
30
 
31
31
 
@@ -33,7 +33,7 @@ class ChimeWebhookHook(HttpHook):
33
33
  """
34
34
  Interact with Amazon Chime Webhooks to create notifications.
35
35
 
36
- .. warning:: This hook is only designed to work with web hooks and not chat bots.
36
+ .. warning:: This hook is only designed to work with web hooks and not chatbots.
37
37
 
38
38
  :param chime_conn_id: :ref:`Amazon Chime Connection ID <howto/connection:chime>`
39
39
  with Endpoint as `https://hooks.chime.aws` and the webhook token
@@ -16,8 +16,8 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
- from airflow.exceptions import AirflowException
20
19
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
20
+ from airflow.providers.common.compat.sdk import AirflowException
21
21
 
22
22
 
23
23
  class ComprehendHook(AwsBaseHook):
@@ -21,8 +21,9 @@ from __future__ import annotations
21
21
  import time
22
22
  from urllib.parse import urlsplit
23
23
 
24
- from airflow.exceptions import AirflowBadRequest, AirflowException, AirflowTaskTimeout
24
+ from airflow.exceptions import AirflowBadRequest
25
25
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
26
+ from airflow.providers.common.compat.sdk import AirflowException, AirflowTaskTimeout
26
27
 
27
28
 
28
29
  class DataSyncHook(AwsBaseHook):
@@ -319,5 +320,4 @@ class DataSyncHook(AwsBaseHook):
319
320
  else:
320
321
  raise AirflowException(f"Unknown status: {status}") # Should never happen
321
322
  time.sleep(self.wait_interval_seconds)
322
- else:
323
- raise AirflowTaskTimeout("Max iterations exceeded!")
323
+ raise AirflowTaskTimeout("Max iterations exceeded!")
@@ -25,8 +25,8 @@ from typing import TYPE_CHECKING
25
25
 
26
26
  from botocore.exceptions import ClientError
27
27
 
28
- from airflow.exceptions import AirflowException
29
28
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
29
+ from airflow.providers.common.compat.sdk import AirflowException
30
30
 
31
31
  if TYPE_CHECKING:
32
32
  from botocore.client import BaseClient
@@ -22,8 +22,8 @@ import time
22
22
  from collections.abc import Callable
23
23
  from typing import ParamSpec, TypeVar
24
24
 
25
- from airflow.exceptions import AirflowException
26
25
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
26
+ from airflow.providers.common.compat.sdk import AirflowException
27
27
 
28
28
  PS = ParamSpec("PS")
29
29
  RT = TypeVar("RT")
@@ -19,8 +19,8 @@ from __future__ import annotations
19
19
 
20
20
  import time
21
21
 
22
- from airflow.exceptions import AirflowException
23
22
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
23
+ from airflow.providers.common.compat.sdk import AirflowException
24
24
 
25
25
 
26
26
  class ElastiCacheReplicationGroupHook(AwsBaseHook):
@@ -26,9 +26,9 @@ import tenacity
26
26
  from botocore.exceptions import ClientError
27
27
  from tenacity import retry_if_exception, stop_after_attempt, wait_fixed
28
28
 
29
- from airflow.exceptions import AirflowException, AirflowNotFoundException
30
29
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
31
30
  from airflow.providers.amazon.aws.utils.waiter_with_logging import wait
31
+ from airflow.providers.common.compat.sdk import AirflowException, AirflowNotFoundException
32
32
 
33
33
 
34
34
  class EmrHook(AwsBaseHook):
@@ -0,0 +1,56 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ """This module contains AWS Firehose hook."""
19
+
20
+ from __future__ import annotations
21
+
22
+ from collections.abc import Iterable
23
+
24
+ from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
25
+
26
+
27
+ class FirehoseHook(AwsBaseHook):
28
+ """
29
+ Interact with Amazon Kinesis Firehose.
30
+
31
+ Provide thick wrapper around :external+boto3:py:class:`boto3.client("firehose") <Firehose.Client>`.
32
+
33
+ :param delivery_stream: Name of the delivery stream
34
+
35
+ Additional arguments (such as ``aws_conn_id``) may be specified and
36
+ are passed down to the underlying AwsBaseHook.
37
+
38
+ .. seealso::
39
+ - :class:`airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook`
40
+ """
41
+
42
+ def __init__(self, delivery_stream: str, *args, **kwargs) -> None:
43
+ self.delivery_stream = delivery_stream
44
+ kwargs["client_type"] = "firehose"
45
+ super().__init__(*args, **kwargs)
46
+
47
+ def put_records(self, records: Iterable) -> dict:
48
+ """
49
+ Write batch records to Kinesis Firehose.
50
+
51
+ .. seealso::
52
+ - :external+boto3:py:meth:`Firehose.Client.put_record_batch`
53
+
54
+ :param records: list of records
55
+ """
56
+ return self.get_conn().put_record_batch(DeliveryStreamName=self.delivery_stream, Records=records)
@@ -33,9 +33,10 @@ from tenacity import (
33
33
  wait_exponential,
34
34
  )
35
35
 
36
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
36
+ from airflow.exceptions import AirflowProviderDeprecationWarning
37
37
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
38
38
  from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
39
+ from airflow.providers.common.compat.sdk import AirflowException
39
40
 
40
41
  DEFAULT_LOG_SUFFIX = "output"
41
42
  ERROR_LOG_SUFFIX = "error"
@@ -565,7 +566,13 @@ class GlueDataQualityHook(AwsBaseHook):
565
566
  Rule_3 ColumnLength "marketplace" between 1 and 2 FAIL {'Column.marketplace.MaximumLength': 9.0, 'Column.marketplace.MinimumLength': 3.0} Value: 9.0 does not meet the constraint requirement!
566
567
 
567
568
  """
568
- import pandas as pd
569
+ try:
570
+ import pandas as pd
571
+ except ImportError:
572
+ self.log.warning(
573
+ "Pandas is not installed. Please install pandas to see the detailed Data Quality results."
574
+ )
575
+ return
569
576
 
570
577
  pd.set_option("display.max_rows", None)
571
578
  pd.set_option("display.max_columns", None)
@@ -23,8 +23,8 @@ from typing import Any
23
23
 
24
24
  from botocore.exceptions import ClientError
25
25
 
26
- from airflow.exceptions import AirflowException
27
26
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
27
+ from airflow.providers.common.compat.sdk import AirflowException
28
28
 
29
29
 
30
30
  class GlueCatalogHook(AwsBaseHook):
@@ -19,12 +19,14 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
- from collections.abc import Iterable
22
+ import warnings
23
23
 
24
+ from airflow.exceptions import AirflowProviderDeprecationWarning
24
25
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
26
+ from airflow.providers.amazon.aws.hooks.firehose import FirehoseHook as _FirehoseHook
25
27
 
26
28
 
27
- class FirehoseHook(AwsBaseHook):
29
+ class FirehoseHook(_FirehoseHook):
28
30
  """
29
31
  Interact with Amazon Kinesis Firehose.
30
32
 
@@ -37,20 +39,36 @@ class FirehoseHook(AwsBaseHook):
37
39
 
38
40
  .. seealso::
39
41
  - :class:`airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook`
42
+ .. deprecated::
43
+ This hook was moved. Import from
44
+ :class:`airflow.providers.amazon.aws.hooks.firehose.FirehoseHook`
45
+ instead of kinesis.py
40
46
  """
41
47
 
42
- def __init__(self, delivery_stream: str, *args, **kwargs) -> None:
43
- self.delivery_stream = delivery_stream
44
- kwargs["client_type"] = "firehose"
48
+ def __init__(self, *args, **kwargs) -> None:
49
+ warnings.warn(
50
+ "Importing FirehoseHook from kinesis.py is deprecated "
51
+ "and will be removed in a future release. "
52
+ "Please import it from firehose.py instead.",
53
+ AirflowProviderDeprecationWarning,
54
+ stacklevel=2,
55
+ )
45
56
  super().__init__(*args, **kwargs)
46
57
 
47
- def put_records(self, records: Iterable):
48
- """
49
- Write batch records to Kinesis Firehose.
50
58
 
51
- .. seealso::
52
- - :external+boto3:py:meth:`Firehose.Client.put_record_batch`
59
+ class KinesisHook(AwsBaseHook):
60
+ """
61
+ Interact with Amazon Kinesis.
62
+
63
+ Provide thin wrapper around :external+boto3:py:class:`boto3.client("kinesis") <Kinesis.Client>`.
64
+
65
+ Additional arguments (such as ``aws_conn_id``) may be specified and
66
+ are passed down to the underlying AwsBaseHook.
53
67
 
54
- :param records: list of records
55
- """
56
- return self.get_conn().put_record_batch(DeliveryStreamName=self.delivery_stream, Records=records)
68
+ .. seealso::
69
+ - :class:`airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook`
70
+ """
71
+
72
+ def __init__(self, *args, **kwargs) -> None:
73
+ kwargs["client_type"] = "kinesis"
74
+ super().__init__(*args, **kwargs)
@@ -19,7 +19,7 @@ from __future__ import annotations
19
19
 
20
20
  import asyncio
21
21
  from collections.abc import AsyncGenerator, Generator
22
- from typing import Any
22
+ from typing import Any, TypedDict
23
23
 
24
24
  from botocore.exceptions import ClientError
25
25
 
@@ -35,6 +35,14 @@ from airflow.utils.helpers import prune_dict
35
35
  NUM_CONSECUTIVE_EMPTY_RESPONSE_EXIT_THRESHOLD = 3
36
36
 
37
37
 
38
+ class CloudWatchLogEvent(TypedDict):
39
+ """TypedDict for CloudWatch Log Event."""
40
+
41
+ timestamp: int
42
+ message: str
43
+ ingestionTime: int
44
+
45
+
38
46
  class AwsLogsHook(AwsBaseHook):
39
47
  """
40
48
  Interact with Amazon CloudWatch Logs.
@@ -67,7 +75,7 @@ class AwsLogsHook(AwsBaseHook):
67
75
  start_from_head: bool | None = None,
68
76
  continuation_token: ContinuationToken | None = None,
69
77
  end_time: int | None = None,
70
- ) -> Generator:
78
+ ) -> Generator[CloudWatchLogEvent, None, None]:
71
79
  """
72
80
  Return a generator for log items in a single stream; yields all items available at the current moment.
73
81