apache-airflow-providers-amazon 9.1.0rc4__py3-none-any.whl → 9.2.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. airflow/providers/amazon/__init__.py +3 -3
  2. airflow/providers/amazon/aws/auth_manager/avp/facade.py +2 -1
  3. airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +4 -12
  4. airflow/providers/amazon/aws/executors/batch/batch_executor.py +4 -3
  5. airflow/providers/amazon/aws/executors/batch/utils.py +3 -3
  6. airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +2 -1
  7. airflow/providers/amazon/aws/executors/ecs/utils.py +3 -3
  8. airflow/providers/amazon/aws/fs/s3.py +2 -2
  9. airflow/providers/amazon/aws/hooks/appflow.py +15 -5
  10. airflow/providers/amazon/aws/hooks/athena.py +2 -1
  11. airflow/providers/amazon/aws/hooks/dms.py +161 -0
  12. airflow/providers/amazon/aws/hooks/dynamodb.py +2 -1
  13. airflow/providers/amazon/aws/hooks/eks.py +2 -1
  14. airflow/providers/amazon/aws/hooks/kinesis.py +1 -1
  15. airflow/providers/amazon/aws/hooks/logs.py +2 -1
  16. airflow/providers/amazon/aws/hooks/redshift_cluster.py +4 -3
  17. airflow/providers/amazon/aws/hooks/redshift_data.py +2 -1
  18. airflow/providers/amazon/aws/hooks/redshift_sql.py +2 -6
  19. airflow/providers/amazon/aws/hooks/s3.py +7 -1
  20. airflow/providers/amazon/aws/hooks/sagemaker.py +2 -1
  21. airflow/providers/amazon/aws/hooks/ses.py +2 -1
  22. airflow/providers/amazon/aws/notifications/sns.py +1 -1
  23. airflow/providers/amazon/aws/notifications/sqs.py +1 -1
  24. airflow/providers/amazon/aws/operators/athena.py +2 -1
  25. airflow/providers/amazon/aws/operators/base_aws.py +1 -1
  26. airflow/providers/amazon/aws/operators/batch.py +2 -1
  27. airflow/providers/amazon/aws/operators/bedrock.py +2 -1
  28. airflow/providers/amazon/aws/operators/cloud_formation.py +2 -1
  29. airflow/providers/amazon/aws/operators/comprehend.py +2 -1
  30. airflow/providers/amazon/aws/operators/datasync.py +2 -1
  31. airflow/providers/amazon/aws/operators/dms.py +531 -1
  32. airflow/providers/amazon/aws/operators/ec2.py +2 -1
  33. airflow/providers/amazon/aws/operators/ecs.py +4 -1
  34. airflow/providers/amazon/aws/operators/eks.py +4 -3
  35. airflow/providers/amazon/aws/operators/emr.py +31 -8
  36. airflow/providers/amazon/aws/operators/eventbridge.py +2 -1
  37. airflow/providers/amazon/aws/operators/glacier.py +2 -1
  38. airflow/providers/amazon/aws/operators/glue.py +2 -1
  39. airflow/providers/amazon/aws/operators/glue_crawler.py +2 -1
  40. airflow/providers/amazon/aws/operators/glue_databrew.py +2 -1
  41. airflow/providers/amazon/aws/operators/kinesis_analytics.py +2 -1
  42. airflow/providers/amazon/aws/operators/lambda_function.py +2 -1
  43. airflow/providers/amazon/aws/operators/neptune.py +2 -1
  44. airflow/providers/amazon/aws/operators/quicksight.py +2 -1
  45. airflow/providers/amazon/aws/operators/rds.py +2 -1
  46. airflow/providers/amazon/aws/operators/redshift_cluster.py +2 -1
  47. airflow/providers/amazon/aws/operators/s3.py +7 -1
  48. airflow/providers/amazon/aws/operators/sagemaker.py +2 -1
  49. airflow/providers/amazon/aws/operators/sns.py +2 -1
  50. airflow/providers/amazon/aws/operators/sqs.py +2 -1
  51. airflow/providers/amazon/aws/operators/step_function.py +2 -1
  52. airflow/providers/amazon/aws/sensors/athena.py +2 -1
  53. airflow/providers/amazon/aws/sensors/base_aws.py +1 -1
  54. airflow/providers/amazon/aws/sensors/batch.py +2 -1
  55. airflow/providers/amazon/aws/sensors/bedrock.py +2 -1
  56. airflow/providers/amazon/aws/sensors/cloud_formation.py +2 -1
  57. airflow/providers/amazon/aws/sensors/comprehend.py +2 -1
  58. airflow/providers/amazon/aws/sensors/dms.py +2 -1
  59. airflow/providers/amazon/aws/sensors/dynamodb.py +2 -1
  60. airflow/providers/amazon/aws/sensors/ec2.py +2 -1
  61. airflow/providers/amazon/aws/sensors/ecs.py +2 -1
  62. airflow/providers/amazon/aws/sensors/eks.py +2 -1
  63. airflow/providers/amazon/aws/sensors/emr.py +2 -1
  64. airflow/providers/amazon/aws/sensors/glacier.py +2 -1
  65. airflow/providers/amazon/aws/sensors/glue.py +2 -1
  66. airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +2 -1
  67. airflow/providers/amazon/aws/sensors/glue_crawler.py +2 -1
  68. airflow/providers/amazon/aws/sensors/kinesis_analytics.py +2 -1
  69. airflow/providers/amazon/aws/sensors/lambda_function.py +2 -1
  70. airflow/providers/amazon/aws/sensors/opensearch_serverless.py +2 -1
  71. airflow/providers/amazon/aws/sensors/quicksight.py +2 -1
  72. airflow/providers/amazon/aws/sensors/rds.py +2 -1
  73. airflow/providers/amazon/aws/sensors/redshift_cluster.py +2 -1
  74. airflow/providers/amazon/aws/sensors/s3.py +2 -1
  75. airflow/providers/amazon/aws/sensors/sagemaker.py +2 -1
  76. airflow/providers/amazon/aws/sensors/sqs.py +2 -1
  77. airflow/providers/amazon/aws/sensors/step_function.py +2 -1
  78. airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py +2 -1
  79. airflow/providers/amazon/aws/transfers/base.py +1 -1
  80. airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +2 -1
  81. airflow/providers/amazon/aws/transfers/exasol_to_s3.py +2 -1
  82. airflow/providers/amazon/aws/transfers/ftp_to_s3.py +2 -1
  83. airflow/providers/amazon/aws/transfers/gcs_to_s3.py +4 -3
  84. airflow/providers/amazon/aws/transfers/glacier_to_gcs.py +2 -1
  85. airflow/providers/amazon/aws/transfers/google_api_to_s3.py +4 -8
  86. airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py +2 -1
  87. airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py +2 -1
  88. airflow/providers/amazon/aws/transfers/local_to_s3.py +2 -1
  89. airflow/providers/amazon/aws/transfers/mongo_to_s3.py +2 -1
  90. airflow/providers/amazon/aws/transfers/redshift_to_s3.py +2 -1
  91. airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py +2 -1
  92. airflow/providers/amazon/aws/transfers/s3_to_ftp.py +2 -1
  93. airflow/providers/amazon/aws/transfers/s3_to_redshift.py +2 -1
  94. airflow/providers/amazon/aws/transfers/s3_to_sftp.py +2 -1
  95. airflow/providers/amazon/aws/transfers/s3_to_sql.py +2 -1
  96. airflow/providers/amazon/aws/transfers/salesforce_to_s3.py +2 -1
  97. airflow/providers/amazon/aws/transfers/sftp_to_s3.py +14 -1
  98. airflow/providers/amazon/aws/transfers/sql_to_s3.py +2 -1
  99. airflow/providers/amazon/aws/triggers/base.py +2 -1
  100. airflow/providers/amazon/aws/triggers/dms.py +221 -0
  101. airflow/providers/amazon/aws/triggers/glue.py +2 -1
  102. airflow/providers/amazon/aws/triggers/redshift_cluster.py +2 -1
  103. airflow/providers/amazon/aws/triggers/redshift_data.py +2 -1
  104. airflow/providers/amazon/aws/triggers/s3.py +2 -1
  105. airflow/providers/amazon/aws/triggers/sagemaker.py +2 -1
  106. airflow/providers/amazon/aws/triggers/sqs.py +2 -1
  107. airflow/providers/amazon/aws/utils/__init__.py +1 -15
  108. airflow/providers/amazon/aws/utils/task_log_fetcher.py +2 -1
  109. airflow/providers/amazon/aws/utils/waiter.py +20 -0
  110. airflow/providers/amazon/aws/waiters/dms.json +88 -0
  111. airflow/providers/amazon/get_provider_info.py +9 -4
  112. airflow/providers/amazon/version_compat.py +36 -0
  113. {apache_airflow_providers_amazon-9.1.0rc4.dist-info → apache_airflow_providers_amazon-9.2.0rc1.dist-info}/METADATA +11 -17
  114. {apache_airflow_providers_amazon-9.1.0rc4.dist-info → apache_airflow_providers_amazon-9.2.0rc1.dist-info}/RECORD +116 -113
  115. {apache_airflow_providers_amazon-9.1.0rc4.dist-info → apache_airflow_providers_amazon-9.2.0rc1.dist-info}/WHEEL +0 -0
  116. {apache_airflow_providers_amazon-9.1.0rc4.dist-info → apache_airflow_providers_amazon-9.2.0rc1.dist-info}/entry_points.txt +0 -0
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "9.1.0"
32
+ __version__ = "9.2.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.8.0"
35
+ "2.9.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-amazon:{__version__}` needs Apache Airflow 2.8.0+"
38
+ f"The package `apache-airflow-providers-amazon:{__version__}` needs Apache Airflow 2.9.0+"
39
39
  )
@@ -17,9 +17,10 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  import json
20
+ from collections.abc import Sequence
20
21
  from functools import cached_property
21
22
  from pathlib import Path
22
- from typing import TYPE_CHECKING, Sequence, TypedDict
23
+ from typing import TYPE_CHECKING, TypedDict
23
24
 
24
25
  from airflow.configuration import conf
25
26
  from airflow.exceptions import AirflowException
@@ -19,8 +19,9 @@ from __future__ import annotations
19
19
  import argparse
20
20
  import warnings
21
21
  from collections import defaultdict
22
+ from collections.abc import Container, Sequence
22
23
  from functools import cached_property
23
- from typing import TYPE_CHECKING, Container, Sequence, cast
24
+ from typing import TYPE_CHECKING, cast
24
25
 
25
26
  from flask import session, url_for
26
27
 
@@ -80,16 +81,6 @@ class AwsAuthManager(BaseAuthManager):
80
81
  """
81
82
 
82
83
  def __init__(self, appbuilder: AirflowAppBuilder) -> None:
83
- from packaging.version import Version
84
-
85
- from airflow.version import version
86
-
87
- # TODO: remove this if block when min_airflow_version is set to higher than 2.9.0
88
- if Version(version) < Version("2.9"):
89
- raise AirflowOptionalProviderFeatureException(
90
- "``AwsAuthManager`` is compatible with Airflow versions >= 2.9."
91
- )
92
-
93
84
  super().__init__(appbuilder)
94
85
  self._check_avp_schema_version()
95
86
 
@@ -430,7 +421,8 @@ class AwsAuthManager(BaseAuthManager):
430
421
  ]
431
422
 
432
423
  def register_views(self) -> None:
433
- self.appbuilder.add_view_no_menu(AwsAuthManagerAuthenticationViews())
424
+ if self.appbuilder:
425
+ self.appbuilder.add_view_no_menu(AwsAuthManagerAuthenticationViews())
434
426
 
435
427
  @staticmethod
436
428
  def _get_menu_item_request(resource_name: str) -> IsAuthorizedRequest:
@@ -21,9 +21,10 @@ from __future__ import annotations
21
21
 
22
22
  import time
23
23
  from collections import deque
24
+ from collections.abc import Sequence
24
25
  from contextlib import suppress
25
26
  from copy import deepcopy
26
- from typing import TYPE_CHECKING, Any, Dict, List, Sequence
27
+ from typing import TYPE_CHECKING, Any
27
28
 
28
29
  from botocore.exceptions import ClientError, NoCredentialsError
29
30
 
@@ -55,8 +56,8 @@ from airflow.providers.amazon.aws.executors.batch.utils import (
55
56
  )
56
57
  from airflow.utils.state import State
57
58
 
58
- CommandType = List[str]
59
- ExecutorConfigType = Dict[str, Any]
59
+ CommandType = list[str]
60
+ ExecutorConfigType = dict[str, Any]
60
61
 
61
62
  INVALID_CREDENTIALS_EXCEPTIONS = [
62
63
  "ExpiredTokenException",
@@ -19,7 +19,7 @@ from __future__ import annotations
19
19
  import datetime
20
20
  from collections import defaultdict
21
21
  from dataclasses import dataclass
22
- from typing import TYPE_CHECKING, Any, Dict, List
22
+ from typing import TYPE_CHECKING, Any
23
23
 
24
24
  from airflow.providers.amazon.aws.executors.utils.base_config_keys import BaseConfigKeys
25
25
  from airflow.utils.state import State
@@ -27,8 +27,8 @@ from airflow.utils.state import State
27
27
  if TYPE_CHECKING:
28
28
  from airflow.models.taskinstance import TaskInstanceKey
29
29
 
30
- CommandType = List[str]
31
- ExecutorConfigType = Dict[str, Any]
30
+ CommandType = list[str]
31
+ ExecutorConfigType = dict[str, Any]
32
32
 
33
33
  CONFIG_GROUP_NAME = "aws_batch_executor"
34
34
 
@@ -25,9 +25,10 @@ from __future__ import annotations
25
25
 
26
26
  import time
27
27
  from collections import defaultdict, deque
28
+ from collections.abc import Sequence
28
29
  from contextlib import suppress
29
30
  from copy import deepcopy
30
- from typing import TYPE_CHECKING, Sequence
31
+ from typing import TYPE_CHECKING
31
32
 
32
33
  from botocore.exceptions import ClientError, NoCredentialsError
33
34
 
@@ -26,7 +26,7 @@ from __future__ import annotations
26
26
  import datetime
27
27
  from collections import defaultdict
28
28
  from dataclasses import dataclass
29
- from typing import TYPE_CHECKING, Any, Callable, Dict, List
29
+ from typing import TYPE_CHECKING, Any, Callable
30
30
 
31
31
  from inflection import camelize
32
32
 
@@ -36,9 +36,9 @@ from airflow.utils.state import State
36
36
  if TYPE_CHECKING:
37
37
  from airflow.models.taskinstance import TaskInstanceKey
38
38
 
39
- CommandType = List[str]
39
+ CommandType = list[str]
40
40
  ExecutorConfigFunctionType = Callable[[CommandType], dict]
41
- ExecutorConfigType = Dict[str, Any]
41
+ ExecutorConfigType = dict[str, Any]
42
42
 
43
43
  ECS_LAUNCH_TYPE_EC2 = "EC2"
44
44
  ECS_LAUNCH_TYPE_FARGATE = "FARGATE"
@@ -19,7 +19,7 @@ from __future__ import annotations
19
19
  import asyncio
20
20
  import logging
21
21
  from functools import partial
22
- from typing import TYPE_CHECKING, Any, Callable, Dict
22
+ from typing import TYPE_CHECKING, Any, Callable
23
23
 
24
24
  import requests
25
25
  from botocore import UNSIGNED
@@ -32,7 +32,7 @@ if TYPE_CHECKING:
32
32
  from fsspec import AbstractFileSystem
33
33
 
34
34
 
35
- Properties = Dict[str, str]
35
+ Properties = dict[str, str]
36
36
 
37
37
  S3_PROXY_URI = "proxy-uri"
38
38
 
@@ -16,7 +16,15 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
- from typing import TYPE_CHECKING
19
+ from collections.abc import Sequence
20
+ from typing import TYPE_CHECKING, cast
21
+
22
+ from mypy_boto3_appflow.type_defs import (
23
+ DestinationFlowConfigTypeDef,
24
+ SourceFlowConfigTypeDef,
25
+ TaskTypeDef,
26
+ TriggerConfigTypeDef,
27
+ )
20
28
 
21
29
  from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
22
30
  from airflow.providers.amazon.aws.utils.waiter_with_logging import wait
@@ -117,9 +125,11 @@ class AppflowHook(AwsGenericHook["AppflowClient"]):
117
125
 
118
126
  self.conn.update_flow(
119
127
  flowName=response["flowName"],
120
- destinationFlowConfigList=response["destinationFlowConfigList"],
121
- sourceFlowConfig=response["sourceFlowConfig"],
122
- triggerConfig=response["triggerConfig"],
128
+ destinationFlowConfigList=cast(
129
+ Sequence[DestinationFlowConfigTypeDef], response["destinationFlowConfigList"]
130
+ ),
131
+ sourceFlowConfig=cast(SourceFlowConfigTypeDef, response["sourceFlowConfig"]),
132
+ triggerConfig=cast(TriggerConfigTypeDef, response["triggerConfig"]),
123
133
  description=response.get("description", "Flow description."),
124
- tasks=tasks,
134
+ tasks=cast(Sequence[TaskTypeDef], tasks),
125
135
  )
@@ -25,7 +25,8 @@ This module contains AWS Athena hook.
25
25
 
26
26
  from __future__ import annotations
27
27
 
28
- from typing import TYPE_CHECKING, Any, Collection
28
+ from collections.abc import Collection
29
+ from typing import TYPE_CHECKING, Any
29
30
 
30
31
  from airflow.exceptions import AirflowException
31
32
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
@@ -18,7 +18,12 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import json
21
+ from datetime import datetime
21
22
  from enum import Enum
23
+ from typing import Any
24
+
25
+ from botocore.exceptions import ClientError
26
+ from dateutil import parser
22
27
 
23
28
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
24
29
 
@@ -219,3 +224,159 @@ class DmsHook(AwsBaseHook):
219
224
  ],
220
225
  WithoutSettings=True,
221
226
  )
227
+
228
+ def describe_replication_configs(self, filters: list[dict] | None = None, **kwargs) -> list[dict]:
229
+ """
230
+ Return list of serverless replication configs.
231
+
232
+ .. seealso::
233
+ - :external+boto3:py:meth:`DatabaseMigrationService.Client.describe_replication_configs`
234
+
235
+ :param filters: List of filter objects
236
+ :return: List of replication tasks
237
+ """
238
+ filters = filters if filters is not None else []
239
+
240
+ try:
241
+ resp = self.conn.describe_replication_configs(Filters=filters, **kwargs)
242
+ return resp.get("ReplicationConfigs", [])
243
+ except Exception as ex:
244
+ self.log.error("Error while describing replication configs: %s", str(ex))
245
+ raise ex
246
+
247
+ def create_replication_config(
248
+ self,
249
+ replication_config_id: str,
250
+ source_endpoint_arn: str,
251
+ target_endpoint_arn: str,
252
+ compute_config: dict[str, Any],
253
+ replication_type: str,
254
+ table_mappings: str,
255
+ additional_config_kwargs: dict[str, Any] | None = None,
256
+ **kwargs,
257
+ ):
258
+ """
259
+ Create an AWS DMS Serverless configuration that can be used to start an DMS Serverless replication.
260
+
261
+ .. seealso::
262
+ - :external+boto3:py:meth:`DatabaseMigrationService.Client.create_replication_config`
263
+
264
+ :param replicationConfigId: Unique identifier used to create a ReplicationConfigArn.
265
+ :param sourceEndpointArn: ARN of the source endpoint
266
+ :param targetEndpointArn: ARN of the target endpoint
267
+ :param computeConfig: Parameters for provisioning an DMS Serverless replication.
268
+ :param replicationType: type of DMS Serverless replication
269
+ :param tableMappings: JSON table mappings
270
+ :param tags: Key-value tag pairs
271
+ :param resourceId: Unique value or name that you set for a given resource that can be used to construct an Amazon Resource Name (ARN) for that resource.
272
+ :param supplementalSettings: JSON settings for specifying supplemental data
273
+ :param replicationSettings: JSON settings for DMS Serverless replications
274
+
275
+ :return: ReplicationConfigArn
276
+
277
+ """
278
+ if additional_config_kwargs is None:
279
+ additional_config_kwargs = {}
280
+ try:
281
+ resp = self.conn.create_replication_config(
282
+ ReplicationConfigIdentifier=replication_config_id,
283
+ SourceEndpointArn=source_endpoint_arn,
284
+ TargetEndpointArn=target_endpoint_arn,
285
+ ComputeConfig=compute_config,
286
+ ReplicationType=replication_type,
287
+ TableMappings=table_mappings,
288
+ **additional_config_kwargs,
289
+ )
290
+ arn = resp.get("ReplicationConfig", {}).get("ReplicationConfigArn")
291
+ self.log.info("Successfully created replication config: %s", arn)
292
+ return arn
293
+
294
+ except ClientError as err:
295
+ err_str = f"Error: {err.get('Error','').get('Code','')}: {err.get('Error','').get('Message','')}"
296
+ self.log.error("Error while creating replication config: %s", err_str)
297
+ raise err
298
+
299
+ def describe_replications(self, filters: list[dict[str, Any]] | None = None, **kwargs) -> list[dict]:
300
+ """
301
+ Return list of serverless replications.
302
+
303
+ .. seealso::
304
+ - :external+boto3:py:meth:`DatabaseMigrationService.Client.describe_replications`
305
+
306
+ :param filters: List of filter objects
307
+ :return: List of replications
308
+ """
309
+ filters = filters if filters is not None else []
310
+ try:
311
+ resp = self.conn.describe_replications(Filters=filters, **kwargs)
312
+ return resp.get("Replications", [])
313
+ except Exception as ex:
314
+ self.log.error("Error while describing replications: %s", str(ex))
315
+ raise ex
316
+
317
+ def delete_replication_config(
318
+ self, replication_config_arn: str, delay: int = 60, max_attempts: int = 120
319
+ ):
320
+ """
321
+ Delete an AWS DMS Serverless configuration.
322
+
323
+ .. seealso::
324
+ - :external+boto3:py:meth:`DatabaseMigrationService.Client.delete_replication_config`
325
+
326
+ :param replication_config_arn: ReplicationConfigArn
327
+ """
328
+ try:
329
+ self.log.info("Deleting replication config: %s", replication_config_arn)
330
+
331
+ self.conn.delete_replication_config(ReplicationConfigArn=replication_config_arn)
332
+
333
+ except ClientError as err:
334
+ err_str = (
335
+ f"Error: {err.get('Error', '').get('Code', '')}: {err.get('Error', '').get('Message', '')}"
336
+ )
337
+ self.log.error("Error while deleting replication config: %s", err_str)
338
+ raise err
339
+
340
+ def start_replication(
341
+ self,
342
+ replication_config_arn: str,
343
+ start_replication_type: str,
344
+ cdc_start_time: datetime | str | None = None,
345
+ cdc_start_pos: str | None = None,
346
+ cdc_stop_pos: str | None = None,
347
+ ):
348
+ additional_args: dict[str, Any] = {}
349
+
350
+ if cdc_start_time:
351
+ additional_args["CdcStartTime"] = (
352
+ cdc_start_time if isinstance(cdc_start_time, datetime) else parser.parse(cdc_start_time)
353
+ )
354
+ if cdc_start_pos:
355
+ additional_args["CdcStartPosition"] = cdc_start_pos
356
+ if cdc_stop_pos:
357
+ additional_args["CdcStopPosition"] = cdc_stop_pos
358
+
359
+ try:
360
+ resp = self.conn.start_replication(
361
+ ReplicationConfigArn=replication_config_arn,
362
+ StartReplicationType=start_replication_type,
363
+ **additional_args,
364
+ )
365
+
366
+ return resp
367
+ except Exception as ex:
368
+ self.log.error("Error while starting replication: %s", str(ex))
369
+ raise ex
370
+
371
+ def stop_replication(self, replication_config_arn: str):
372
+ resp = self.conn.stop_replication(ReplicationConfigArn=replication_config_arn)
373
+ return resp
374
+
375
+ def get_provision_status(self, replication_config_arn: str) -> str:
376
+ """Get the provisioning status for a serverless replication."""
377
+ result = self.describe_replications(
378
+ filters=[{"Name": "replication-config-arn", "Values": [replication_config_arn]}]
379
+ )
380
+
381
+ provision_status = result[0].get("ProvisionData", {}).get("ProvisionState", "")
382
+ return provision_status
@@ -19,8 +19,9 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
+ from collections.abc import Iterable
22
23
  from functools import cached_property
23
- from typing import TYPE_CHECKING, Iterable
24
+ from typing import TYPE_CHECKING
24
25
 
25
26
  from botocore.exceptions import ClientError
26
27
 
@@ -22,10 +22,11 @@ import base64
22
22
  import json
23
23
  import sys
24
24
  import tempfile
25
+ from collections.abc import Generator
25
26
  from contextlib import contextmanager
26
27
  from enum import Enum
27
28
  from functools import partial
28
- from typing import Callable, Generator
29
+ from typing import Callable
29
30
 
30
31
  from botocore.exceptions import ClientError
31
32
  from botocore.signers import RequestSigner
@@ -19,7 +19,7 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
- from typing import Iterable
22
+ from collections.abc import Iterable
23
23
 
24
24
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
25
25
 
@@ -18,7 +18,8 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import asyncio
21
- from typing import Any, AsyncGenerator, Generator
21
+ from collections.abc import AsyncGenerator, Generator
22
+ from typing import Any
22
23
 
23
24
  from botocore.exceptions import ClientError
24
25
 
@@ -16,7 +16,8 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
- from typing import Any, Sequence
19
+ from collections.abc import Sequence
20
+ from typing import Any
20
21
 
21
22
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
22
23
 
@@ -93,8 +94,8 @@ class RedshiftHook(AwsBaseHook):
93
94
 
94
95
  async def cluster_status_async(self, cluster_identifier: str) -> str:
95
96
  async with self.async_conn as client:
96
- response = await client.describe_clusters(ClusterIdentifier=cluster_identifier)["Clusters"]
97
- return response[0]["ClusterStatus"] if response else None
97
+ response = await client.describe_clusters(ClusterIdentifier=cluster_identifier)
98
+ return response["Clusters"][0]["ClusterStatus"] if response else None
98
99
 
99
100
  def delete_cluster(
100
101
  self,
@@ -18,9 +18,10 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import time
21
+ from collections.abc import Iterable
21
22
  from dataclasses import dataclass
22
23
  from pprint import pformat
23
- from typing import TYPE_CHECKING, Any, Iterable
24
+ from typing import TYPE_CHECKING, Any
24
25
  from uuid import UUID
25
26
 
26
27
  from pendulum import duration
@@ -20,19 +20,15 @@ from functools import cached_property
20
20
  from typing import TYPE_CHECKING
21
21
 
22
22
  import redshift_connector
23
- from packaging.version import Version
24
23
  from redshift_connector import Connection as RedshiftConnection
25
24
  from sqlalchemy import create_engine
26
25
  from sqlalchemy.engine.url import URL
27
26
 
28
- from airflow import __version__ as AIRFLOW_VERSION
29
27
  from airflow.exceptions import AirflowException
30
28
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
29
+ from airflow.providers.amazon.version_compat import AIRFLOW_V_2_10_PLUS
31
30
  from airflow.providers.common.sql.hooks.sql import DbApiHook
32
31
 
33
- _IS_AIRFLOW_2_10_OR_HIGHER = Version(Version(AIRFLOW_VERSION).base_version) >= Version("2.10.0")
34
-
35
-
36
32
  if TYPE_CHECKING:
37
33
  from airflow.models.connection import Connection
38
34
  from airflow.providers.openlineage.sqlparser import DatabaseInfo
@@ -265,6 +261,6 @@ class RedshiftSQLHook(DbApiHook):
265
261
 
266
262
  def get_openlineage_default_schema(self) -> str | None:
267
263
  """Return current schema. This is usually changed with ``SEARCH_PATH`` parameter."""
268
- if _IS_AIRFLOW_2_10_OR_HIGHER:
264
+ if AIRFLOW_V_2_10_PLUS:
269
265
  return self.get_first("SELECT CURRENT_SCHEMA();")[0]
270
266
  return super().get_openlineage_default_schema()
@@ -28,6 +28,7 @@ import os
28
28
  import re
29
29
  import shutil
30
30
  import time
31
+ from collections.abc import AsyncIterator
31
32
  from contextlib import suppress
32
33
  from copy import deepcopy
33
34
  from datetime import datetime
@@ -36,7 +37,7 @@ from inspect import signature
36
37
  from io import BytesIO
37
38
  from pathlib import Path
38
39
  from tempfile import NamedTemporaryFile, gettempdir
39
- from typing import TYPE_CHECKING, Any, AsyncIterator, Callable
40
+ from typing import TYPE_CHECKING, Any, Callable
40
41
  from urllib.parse import urlsplit
41
42
  from uuid import uuid4
42
43
 
@@ -1297,6 +1298,7 @@ class S3Hook(AwsBaseHook):
1297
1298
  dest_bucket_name: str | None = None,
1298
1299
  source_version_id: str | None = None,
1299
1300
  acl_policy: str | None = None,
1301
+ meta_data_directive: str | None = None,
1300
1302
  **kwargs,
1301
1303
  ) -> None:
1302
1304
  """
@@ -1326,10 +1328,14 @@ class S3Hook(AwsBaseHook):
1326
1328
  :param source_version_id: Version ID of the source object (OPTIONAL)
1327
1329
  :param acl_policy: The string to specify the canned ACL policy for the
1328
1330
  object to be copied which is private by default.
1331
+ :param meta_data_directive: Whether to `COPY` the metadata from the source object or `REPLACE` it
1332
+ with metadata that's provided in the request.
1329
1333
  """
1330
1334
  acl_policy = acl_policy or "private"
1331
1335
  if acl_policy != NO_ACL:
1332
1336
  kwargs["ACL"] = acl_policy
1337
+ if meta_data_directive:
1338
+ kwargs["MetadataDirective"] = meta_data_directive
1333
1339
 
1334
1340
  dest_bucket_name, dest_bucket_key = self.get_s3_bucket_key(
1335
1341
  dest_bucket_name, dest_bucket_key, "dest_bucket_name", "dest_bucket_key"
@@ -23,9 +23,10 @@ import tarfile
23
23
  import tempfile
24
24
  import time
25
25
  from collections import Counter, namedtuple
26
+ from collections.abc import AsyncGenerator, Generator
26
27
  from datetime import datetime
27
28
  from functools import partial
28
- from typing import Any, AsyncGenerator, Callable, Generator, cast
29
+ from typing import Any, Callable, cast
29
30
 
30
31
  from asgiref.sync import sync_to_async
31
32
  from botocore.exceptions import ClientError
@@ -18,7 +18,8 @@
18
18
 
19
19
  from __future__ import annotations
20
20
 
21
- from typing import Any, Iterable
21
+ from collections.abc import Iterable
22
+ from typing import Any
22
23
 
23
24
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
24
25
  from airflow.utils.email import build_mime_message
@@ -17,8 +17,8 @@
17
17
 
18
18
  from __future__ import annotations
19
19
 
20
+ from collections.abc import Sequence
20
21
  from functools import cached_property
21
- from typing import Sequence
22
22
 
23
23
  from airflow.notifications.basenotifier import BaseNotifier
24
24
  from airflow.providers.amazon.aws.hooks.sns import SnsHook
@@ -17,8 +17,8 @@
17
17
 
18
18
  from __future__ import annotations
19
19
 
20
+ from collections.abc import Sequence
20
21
  from functools import cached_property
21
- from typing import Sequence
22
22
 
23
23
  from airflow.notifications.basenotifier import BaseNotifier
24
24
  from airflow.providers.amazon.aws.hooks.sqs import SqsHook
@@ -17,7 +17,8 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- from typing import TYPE_CHECKING, Any, Sequence
20
+ from collections.abc import Sequence
21
+ from typing import TYPE_CHECKING, Any
21
22
  from urllib.parse import urlparse
22
23
 
23
24
  from airflow.configuration import conf
@@ -17,7 +17,7 @@
17
17
 
18
18
  from __future__ import annotations
19
19
 
20
- from typing import Sequence
20
+ from collections.abc import Sequence
21
21
 
22
22
  from airflow.models import BaseOperator
23
23
  from airflow.providers.amazon.aws.utils.mixins import (
@@ -26,9 +26,10 @@ AWS Batch services.
26
26
 
27
27
  from __future__ import annotations
28
28
 
29
+ from collections.abc import Sequence
29
30
  from datetime import timedelta
30
31
  from functools import cached_property
31
- from typing import TYPE_CHECKING, Any, Sequence
32
+ from typing import TYPE_CHECKING, Any
32
33
 
33
34
  from airflow.configuration import conf
34
35
  from airflow.exceptions import AirflowException
@@ -17,8 +17,9 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  import json
20
+ from collections.abc import Sequence
20
21
  from time import sleep
21
- from typing import TYPE_CHECKING, Any, Sequence
22
+ from typing import TYPE_CHECKING, Any
22
23
 
23
24
  from botocore.exceptions import ClientError
24
25
 
@@ -19,7 +19,8 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
- from typing import TYPE_CHECKING, Sequence
22
+ from collections.abc import Sequence
23
+ from typing import TYPE_CHECKING
23
24
 
24
25
  from airflow.providers.amazon.aws.hooks.cloud_formation import CloudFormationHook
25
26
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
@@ -16,8 +16,9 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
+ from collections.abc import Sequence
19
20
  from functools import cached_property
20
- from typing import TYPE_CHECKING, Any, ClassVar, Sequence
21
+ from typing import TYPE_CHECKING, Any, ClassVar
21
22
 
22
23
  from airflow.configuration import conf
23
24
  from airflow.exceptions import AirflowException
@@ -20,7 +20,8 @@ from __future__ import annotations
20
20
 
21
21
  import logging
22
22
  import random
23
- from typing import TYPE_CHECKING, Any, Sequence
23
+ from collections.abc import Sequence
24
+ from typing import TYPE_CHECKING, Any
24
25
 
25
26
  from airflow.exceptions import AirflowException, AirflowTaskTimeout
26
27
  from airflow.providers.amazon.aws.hooks.datasync import DataSyncHook