apache-airflow-providers-amazon 8.28.0rc1__py3-none-any.whl → 8.29.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. airflow/providers/amazon/__init__.py +1 -1
  2. airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +4 -0
  3. airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py +5 -1
  4. airflow/providers/amazon/aws/executors/batch/batch_executor.py +1 -1
  5. airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py +13 -4
  6. airflow/providers/amazon/aws/executors/ecs/utils.py +8 -2
  7. airflow/providers/amazon/aws/hooks/s3.py +1 -1
  8. airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +2 -3
  9. airflow/providers/amazon/aws/log/s3_task_handler.py +2 -4
  10. airflow/providers/amazon/aws/operators/ecs.py +9 -0
  11. airflow/providers/amazon/aws/operators/glue_databrew.py +53 -16
  12. airflow/providers/amazon/aws/sensors/batch.py +1 -3
  13. airflow/providers/amazon/aws/sensors/ecs.py +4 -4
  14. airflow/providers/amazon/aws/sensors/glue.py +1 -5
  15. airflow/providers/amazon/aws/transfers/redshift_to_s3.py +13 -7
  16. airflow/providers/amazon/aws/transfers/s3_to_redshift.py +65 -11
  17. airflow/providers/amazon/aws/triggers/glue_databrew.py +35 -8
  18. airflow/providers/amazon/aws/utils/openlineage.py +136 -0
  19. airflow/providers/amazon/aws/utils/waiter_with_logging.py +26 -6
  20. airflow/providers/amazon/aws/waiters/stepfunctions.json +1 -1
  21. airflow/providers/amazon/get_provider_info.py +4 -3
  22. {apache_airflow_providers_amazon-8.28.0rc1.dist-info → apache_airflow_providers_amazon-8.29.0.dist-info}/METADATA +17 -17
  23. {apache_airflow_providers_amazon-8.28.0rc1.dist-info → apache_airflow_providers_amazon-8.29.0.dist-info}/RECORD +25 -24
  24. {apache_airflow_providers_amazon-8.28.0rc1.dist-info → apache_airflow_providers_amazon-8.29.0.dist-info}/WHEEL +0 -0
  25. {apache_airflow_providers_amazon-8.28.0rc1.dist-info → apache_airflow_providers_amazon-8.29.0.dist-info}/entry_points.txt +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "8.28.0"
32
+ __version__ = "8.29.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.8.0"
@@ -36,6 +36,7 @@ from airflow.providers.amazon.aws.auth_manager.cli.definition import (
36
36
  from airflow.providers.amazon.aws.auth_manager.security_manager.aws_security_manager_override import (
37
37
  AwsSecurityManagerOverride,
38
38
  )
39
+ from airflow.providers.amazon.aws.auth_manager.views.auth import AwsAuthManagerAuthenticationViews
39
40
 
40
41
  try:
41
42
  from airflow.auth.managers.base_auth_manager import BaseAuthManager, ResourceMethod
@@ -423,6 +424,9 @@ class AwsAuthManager(BaseAuthManager):
423
424
  ),
424
425
  ]
425
426
 
427
+ def register_views(self) -> None:
428
+ self.appbuilder.add_view_no_menu(AwsAuthManagerAuthenticationViews())
429
+
426
430
  @staticmethod
427
431
  def _get_menu_item_request(resource_name: str) -> IsAuthorizedRequest:
428
432
  return {
@@ -27,7 +27,11 @@ except ImportError:
27
27
 
28
28
 
29
29
  class AwsSecurityManagerOverride(AirflowSecurityManagerV2):
30
- """The security manager override specific to AWS auth manager."""
30
+ """
31
+ The security manager override specific to AWS auth manager.
32
+
33
+ This class is only used in Airflow 2. This can be safely be removed when min Airflow version >= 3
34
+ """
31
35
 
32
36
  def register_views(self):
33
37
  """Register views specific to AWS auth manager."""
@@ -448,7 +448,7 @@ class AwsBatchExecutor(BaseExecutor):
448
448
  airflow_cmd=ti.command_as_list(),
449
449
  queue=ti.queue,
450
450
  exec_config=ti.executor_config,
451
- attempt_number=ti.prev_attempted_tries,
451
+ attempt_number=ti.try_number,
452
452
  )
453
453
  adopted_tis.append(ti)
454
454
 
@@ -35,6 +35,8 @@ from json import JSONDecodeError
35
35
  from airflow.configuration import conf
36
36
  from airflow.providers.amazon.aws.executors.ecs.utils import (
37
37
  CONFIG_GROUP_NAME,
38
+ ECS_LAUNCH_TYPE_EC2,
39
+ ECS_LAUNCH_TYPE_FARGATE,
38
40
  AllEcsConfigKeys,
39
41
  RunTaskKwargsConfigKeys,
40
42
  camelize_dict_keys,
@@ -56,13 +58,15 @@ def _fetch_config_values() -> dict[str, str]:
56
58
 
57
59
 
58
60
  def build_task_kwargs() -> dict:
61
+ all_config_keys = AllEcsConfigKeys()
59
62
  # This will put some kwargs at the root of the dictionary that do NOT belong there. However,
60
63
  # the code below expects them to be there and will rearrange them as necessary.
61
64
  task_kwargs = _fetch_config_values()
62
65
  task_kwargs.update(_fetch_templated_kwargs())
63
66
 
64
- has_launch_type: bool = "launch_type" in task_kwargs
65
- has_capacity_provider: bool = "capacity_provider_strategy" in task_kwargs
67
+ has_launch_type: bool = all_config_keys.LAUNCH_TYPE in task_kwargs
68
+ has_capacity_provider: bool = all_config_keys.CAPACITY_PROVIDER_STRATEGY in task_kwargs
69
+ is_launch_type_ec2: bool = task_kwargs.get(all_config_keys.LAUNCH_TYPE, None) == ECS_LAUNCH_TYPE_EC2
66
70
 
67
71
  if has_capacity_provider and has_launch_type:
68
72
  raise ValueError(
@@ -75,7 +79,12 @@ def build_task_kwargs() -> dict:
75
79
  # the final fallback.
76
80
  cluster = EcsHook().conn.describe_clusters(clusters=[task_kwargs["cluster"]])["clusters"][0]
77
81
  if not cluster.get("defaultCapacityProviderStrategy"):
78
- task_kwargs["launch_type"] = "FARGATE"
82
+ task_kwargs[all_config_keys.LAUNCH_TYPE] = ECS_LAUNCH_TYPE_FARGATE
83
+
84
+ # If you're using the EC2 launch type, you should not/can not provide the platform_version. In this
85
+ # case we'll drop it on the floor on behalf of the user, instead of throwing an exception.
86
+ if is_launch_type_ec2:
87
+ task_kwargs.pop(all_config_keys.PLATFORM_VERSION, None)
79
88
 
80
89
  # There can only be 1 count of these containers
81
90
  task_kwargs["count"] = 1 # type: ignore
@@ -105,7 +114,7 @@ def build_task_kwargs() -> dict:
105
114
  "awsvpcConfiguration": {
106
115
  "subnets": str(subnets).split(",") if subnets else None,
107
116
  "securityGroups": str(security_groups).split(",") if security_groups else None,
108
- "assignPublicIp": parse_assign_public_ip(assign_public_ip),
117
+ "assignPublicIp": parse_assign_public_ip(assign_public_ip, is_launch_type_ec2),
109
118
  }
110
119
  }
111
120
  )
@@ -40,6 +40,9 @@ CommandType = List[str]
40
40
  ExecutorConfigFunctionType = Callable[[CommandType], dict]
41
41
  ExecutorConfigType = Dict[str, Any]
42
42
 
43
+ ECS_LAUNCH_TYPE_EC2 = "EC2"
44
+ ECS_LAUNCH_TYPE_FARGATE = "FARGATE"
45
+
43
46
  CONFIG_GROUP_NAME = "aws_ecs_executor"
44
47
 
45
48
  CONFIG_DEFAULTS = {
@@ -247,9 +250,12 @@ def _recursive_flatten_dict(nested_dict):
247
250
  return dict(items)
248
251
 
249
252
 
250
- def parse_assign_public_ip(assign_public_ip):
253
+ def parse_assign_public_ip(assign_public_ip, is_launch_type_ec2=False):
251
254
  """Convert "assign_public_ip" from True/False to ENABLE/DISABLE."""
252
- return "ENABLED" if assign_public_ip == "True" else "DISABLED"
255
+ # If the launch type is EC2, you cannot/should not provide the assignPublicIp parameter (which is
256
+ # specific to Fargate)
257
+ if not is_launch_type_ec2:
258
+ return "ENABLED" if assign_public_ip == "True" else "DISABLED"
253
259
 
254
260
 
255
261
  def camelize_dict_keys(nested_dict) -> dict:
@@ -238,7 +238,7 @@ class S3Hook(AwsBaseHook):
238
238
  valid_s3_virtual_hosted_format = "https://bucket-name.s3.region-code.amazonaws.com/key-name"
239
239
  format = s3url.split("//")
240
240
  if re.match(r"s3[na]?:", format[0], re.IGNORECASE):
241
- parsed_url = urlsplit(s3url)
241
+ parsed_url = urlsplit(s3url, allow_fragments=False)
242
242
  if not parsed_url.netloc:
243
243
  raise S3HookUriParseFailure(
244
244
  "Please provide a bucket name using a valid format of the form: "
@@ -71,13 +71,12 @@ class CloudwatchTaskHandler(FileTaskHandler, LoggingMixin):
71
71
  :param base_log_folder: base folder to store logs locally
72
72
  :param log_group_arn: ARN of the Cloudwatch log group for remote log storage
73
73
  with format ``arn:aws:logs:{region name}:{account id}:log-group:{group name}``
74
- :param filename_template: template for file name (local storage) or log stream name (remote)
75
74
  """
76
75
 
77
76
  trigger_should_wrap = True
78
77
 
79
- def __init__(self, base_log_folder: str, log_group_arn: str, filename_template: str | None = None):
80
- super().__init__(base_log_folder, filename_template)
78
+ def __init__(self, base_log_folder: str, log_group_arn: str, **kwargs):
79
+ super().__init__(base_log_folder)
81
80
  split_arn = log_group_arn.split(":")
82
81
 
83
82
  self.handler = None
@@ -42,10 +42,8 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
42
42
 
43
43
  trigger_should_wrap = True
44
44
 
45
- def __init__(
46
- self, base_log_folder: str, s3_log_folder: str, filename_template: str | None = None, **kwargs
47
- ):
48
- super().__init__(base_log_folder, filename_template)
45
+ def __init__(self, base_log_folder: str, s3_log_folder: str, **kwargs):
46
+ super().__init__(base_log_folder)
49
47
  self.handler: logging.FileHandler | None = None
50
48
  self.remote_base = s3_log_folder
51
49
  self.log_relative_path = ""
@@ -373,6 +373,10 @@ class EcsRunTaskOperator(EcsBaseOperator):
373
373
  When capacity_provider_strategy is specified, the launch_type parameter is omitted.
374
374
  If no capacity_provider_strategy or launch_type is specified,
375
375
  the default capacity provider strategy for the cluster is used.
376
+ :param volume_configurations: the volume configurations to use when using capacity provider. The name of the volume must match
377
+ the name from the task definition.
378
+ You can configure the settings like size, volume type, IOPS, throughput and others mentioned in
379
+ (https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_TaskManagedEBSVolumeConfiguration.html)
376
380
  :param group: the name of the task group associated with the task
377
381
  :param placement_constraints: an array of placement constraint objects to use for
378
382
  the task
@@ -420,6 +424,7 @@ class EcsRunTaskOperator(EcsBaseOperator):
420
424
  "overrides",
421
425
  "launch_type",
422
426
  "capacity_provider_strategy",
427
+ "volume_configurations",
423
428
  "group",
424
429
  "placement_constraints",
425
430
  "placement_strategy",
@@ -450,6 +455,7 @@ class EcsRunTaskOperator(EcsBaseOperator):
450
455
  overrides: dict,
451
456
  launch_type: str = "EC2",
452
457
  capacity_provider_strategy: list | None = None,
458
+ volume_configurations: list | None = None,
453
459
  group: str | None = None,
454
460
  placement_constraints: list | None = None,
455
461
  placement_strategy: list | None = None,
@@ -479,6 +485,7 @@ class EcsRunTaskOperator(EcsBaseOperator):
479
485
  self.overrides = overrides
480
486
  self.launch_type = launch_type
481
487
  self.capacity_provider_strategy = capacity_provider_strategy
488
+ self.volume_configurations = volume_configurations
482
489
  self.group = group
483
490
  self.placement_constraints = placement_constraints
484
491
  self.placement_strategy = placement_strategy
@@ -614,6 +621,8 @@ class EcsRunTaskOperator(EcsBaseOperator):
614
621
 
615
622
  if self.capacity_provider_strategy:
616
623
  run_opts["capacityProviderStrategy"] = self.capacity_provider_strategy
624
+ if self.volume_configurations is not None:
625
+ run_opts["volumeConfigurations"] = self.volume_configurations
617
626
  elif self.launch_type:
618
627
  run_opts["launchType"] = self.launch_type
619
628
  if self.platform_version is not None:
@@ -17,20 +17,22 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- from functools import cached_property
20
+ import warnings
21
21
  from typing import TYPE_CHECKING, Any, Sequence
22
22
 
23
23
  from airflow.configuration import conf
24
- from airflow.models import BaseOperator
24
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
25
25
  from airflow.providers.amazon.aws.hooks.glue_databrew import GlueDataBrewHook
26
+ from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
26
27
  from airflow.providers.amazon.aws.triggers.glue_databrew import GlueDataBrewJobCompleteTrigger
27
28
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
29
+ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
28
30
 
29
31
  if TYPE_CHECKING:
30
32
  from airflow.utils.context import Context
31
33
 
32
34
 
33
- class GlueDataBrewStartJobOperator(BaseOperator):
35
+ class GlueDataBrewStartJobOperator(AwsBaseOperator[GlueDataBrewHook]):
34
36
  """
35
37
  Start an AWS Glue DataBrew job.
36
38
 
@@ -47,14 +49,30 @@ class GlueDataBrewStartJobOperator(BaseOperator):
47
49
  :param deferrable: If True, the operator will wait asynchronously for the job to complete.
48
50
  This implies waiting for completion. This mode requires aiobotocore module to be installed.
49
51
  (default: False)
50
- :param delay: Time in seconds to wait between status checks. Default is 30.
52
+ :param delay: Time in seconds to wait between status checks. (Deprecated).
53
+ :param waiter_delay: Time in seconds to wait between status checks. Default is 30.
54
+ :param waiter_max_attempts: Maximum number of attempts to check for job completion. (default: 60)
51
55
  :return: dictionary with key run_id and value of the resulting job's run_id.
56
+
57
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
58
+ If this is ``None`` or empty then the default boto3 behaviour is used. If
59
+ running Airflow in a distributed manner and aws_conn_id is None or
60
+ empty, then default boto3 configuration would be used (and must be
61
+ maintained on each worker node).
62
+ :param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
63
+ :param verify: Whether or not to verify SSL certificates. See:
64
+ https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
65
+ :param botocore_config: Configuration dictionary (key-values) for botocore client. See:
66
+ https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
52
67
  """
53
68
 
54
- template_fields: Sequence[str] = (
69
+ aws_hook_class = GlueDataBrewHook
70
+
71
+ template_fields: Sequence[str] = aws_template_fields(
55
72
  "job_name",
56
73
  "wait_for_completion",
57
- "delay",
74
+ "waiter_delay",
75
+ "waiter_max_attempts",
58
76
  "deferrable",
59
77
  )
60
78
 
@@ -62,21 +80,25 @@ class GlueDataBrewStartJobOperator(BaseOperator):
62
80
  self,
63
81
  job_name: str,
64
82
  wait_for_completion: bool = True,
65
- delay: int = 30,
83
+ delay: int | None = None,
84
+ waiter_delay: int = 30,
85
+ waiter_max_attempts: int = 60,
66
86
  deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
67
- aws_conn_id: str | None = "aws_default",
68
87
  **kwargs,
69
88
  ):
70
89
  super().__init__(**kwargs)
71
90
  self.job_name = job_name
72
91
  self.wait_for_completion = wait_for_completion
92
+ self.waiter_delay = waiter_delay
93
+ self.waiter_max_attempts = waiter_max_attempts
73
94
  self.deferrable = deferrable
74
- self.delay = delay
75
- self.aws_conn_id = aws_conn_id
76
-
77
- @cached_property
78
- def hook(self) -> GlueDataBrewHook:
79
- return GlueDataBrewHook(aws_conn_id=self.aws_conn_id)
95
+ if delay is not None:
96
+ warnings.warn(
97
+ "please use `waiter_delay` instead of delay.",
98
+ AirflowProviderDeprecationWarning,
99
+ stacklevel=2,
100
+ )
101
+ self.waiter_delay = delay
80
102
 
81
103
  def execute(self, context: Context):
82
104
  job = self.hook.conn.start_job_run(Name=self.job_name)
@@ -88,7 +110,14 @@ class GlueDataBrewStartJobOperator(BaseOperator):
88
110
  self.log.info("Deferring job %s with run_id %s", self.job_name, run_id)
89
111
  self.defer(
90
112
  trigger=GlueDataBrewJobCompleteTrigger(
91
- aws_conn_id=self.aws_conn_id, job_name=self.job_name, run_id=run_id, delay=self.delay
113
+ job_name=self.job_name,
114
+ run_id=run_id,
115
+ waiter_delay=self.waiter_delay,
116
+ waiter_max_attempts=self.waiter_max_attempts,
117
+ aws_conn_id=self.aws_conn_id,
118
+ region_name=self.region_name,
119
+ verify=self.verify,
120
+ botocore_config=self.botocore_config,
92
121
  ),
93
122
  method_name="execute_complete",
94
123
  )
@@ -97,7 +126,12 @@ class GlueDataBrewStartJobOperator(BaseOperator):
97
126
  self.log.info(
98
127
  "Waiting for AWS Glue DataBrew Job: %s. Run Id: %s to complete.", self.job_name, run_id
99
128
  )
100
- status = self.hook.job_completion(job_name=self.job_name, delay=self.delay, run_id=run_id)
129
+ status = self.hook.job_completion(
130
+ job_name=self.job_name,
131
+ delay=self.waiter_delay,
132
+ run_id=run_id,
133
+ max_attempts=self.waiter_max_attempts,
134
+ )
101
135
  self.log.info("Glue DataBrew Job: %s status: %s", self.job_name, status)
102
136
 
103
137
  return {"run_id": run_id}
@@ -105,6 +139,9 @@ class GlueDataBrewStartJobOperator(BaseOperator):
105
139
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> dict[str, str]:
106
140
  event = validate_execute_complete_event(event)
107
141
 
142
+ if event["status"] != "success":
143
+ raise AirflowException("Error while running AWS Glue DataBrew job: %s", event)
144
+
108
145
  run_id = event.get("run_id", "")
109
146
  status = event.get("status", "")
110
147
 
@@ -23,7 +23,7 @@ from typing import TYPE_CHECKING, Any, Sequence
23
23
  from deprecated import deprecated
24
24
 
25
25
  from airflow.configuration import conf
26
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
26
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
27
27
  from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook
28
28
  from airflow.providers.amazon.aws.triggers.batch import BatchJobTrigger
29
29
  from airflow.sensors.base import BaseSensorOperator
@@ -265,6 +265,4 @@ class BatchJobQueueSensor(BaseSensorOperator):
265
265
  return False
266
266
 
267
267
  message = f"AWS Batch job queue failed. AWS Batch job queue status: {status}"
268
- if self.soft_fail:
269
- raise AirflowSkipException(message)
270
268
  raise AirflowException(message)
@@ -35,7 +35,7 @@ if TYPE_CHECKING:
35
35
  from airflow.utils.context import Context
36
36
 
37
37
 
38
- def _check_failed(current_state, target_state, failure_states, soft_fail: bool) -> None:
38
+ def _check_failed(current_state, target_state, failure_states) -> None:
39
39
  if (current_state != target_state) and (current_state in failure_states):
40
40
  raise AirflowException(
41
41
  f"Terminal state reached. Current state: {current_state}, Expected state: {target_state}"
@@ -86,7 +86,7 @@ class EcsClusterStateSensor(EcsBaseSensor):
86
86
  cluster_state = EcsClusterStates(self.hook.get_cluster_state(cluster_name=self.cluster_name))
87
87
 
88
88
  self.log.info("Cluster state: %s, waiting for: %s", cluster_state, self.target_state)
89
- _check_failed(cluster_state, self.target_state, self.failure_states, self.soft_fail)
89
+ _check_failed(cluster_state, self.target_state, self.failure_states)
90
90
 
91
91
  return cluster_state == self.target_state
92
92
 
@@ -132,7 +132,7 @@ class EcsTaskDefinitionStateSensor(EcsBaseSensor):
132
132
  )
133
133
 
134
134
  self.log.info("Task Definition state: %s, waiting for: %s", task_definition_state, self.target_state)
135
- _check_failed(task_definition_state, self.target_state, [self.failure_states], self.soft_fail)
135
+ _check_failed(task_definition_state, self.target_state, [self.failure_states])
136
136
  return task_definition_state == self.target_state
137
137
 
138
138
 
@@ -172,5 +172,5 @@ class EcsTaskStateSensor(EcsBaseSensor):
172
172
  task_state = EcsTaskStates(self.hook.get_task_state(cluster=self.cluster, task=self.task))
173
173
 
174
174
  self.log.info("Task state: %s, waiting for: %s", task_state, self.target_state)
175
- _check_failed(task_state, self.target_state, self.failure_states, self.soft_fail)
175
+ _check_failed(task_state, self.target_state, self.failure_states)
176
176
  return task_state == self.target_state
@@ -21,7 +21,7 @@ from functools import cached_property
21
21
  from typing import TYPE_CHECKING, Any, Sequence
22
22
 
23
23
  from airflow.configuration import conf
24
- from airflow.exceptions import AirflowException, AirflowSkipException
24
+ from airflow.exceptions import AirflowException
25
25
  from airflow.providers.amazon.aws.hooks.glue import GlueDataQualityHook, GlueJobHook
26
26
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
27
  from airflow.providers.amazon.aws.triggers.glue import (
@@ -177,8 +177,6 @@ class GlueDataQualityRuleSetEvaluationRunSensor(AwsBaseSensor[GlueDataQualityHoo
177
177
 
178
178
  if event["status"] != "success":
179
179
  message = f"Error: AWS Glue data quality ruleset evaluation run: {event}"
180
- if self.soft_fail:
181
- raise AirflowSkipException(message)
182
180
  raise AirflowException(message)
183
181
 
184
182
  self.hook.validate_evaluation_run_results(
@@ -300,8 +298,6 @@ class GlueDataQualityRuleRecommendationRunSensor(AwsBaseSensor[GlueDataQualityHo
300
298
 
301
299
  if event["status"] != "success":
302
300
  message = f"Error: AWS Glue data quality recommendation run: {event}"
303
- if self.soft_fail:
304
- raise AirflowSkipException(message)
305
301
  raise AirflowException(message)
306
302
 
307
303
  if self.show_results:
@@ -44,11 +44,12 @@ class RedshiftToS3Operator(BaseOperator):
44
44
  :param s3_bucket: reference to a specific S3 bucket
45
45
  :param s3_key: reference to a specific S3 key. If ``table_as_file_name`` is set
46
46
  to False, this param must include the desired file name
47
- :param schema: reference to a specific schema in redshift database
48
- Applicable when ``table`` param provided.
49
- :param table: reference to a specific table in redshift database
50
- Used when ``select_query`` param not provided.
51
- :param select_query: custom select query to fetch data from redshift database
47
+ :param schema: reference to a specific schema in redshift database,
48
+ used when ``table`` param provided and ``select_query`` param not provided
49
+ :param table: reference to a specific table in redshift database,
50
+ used when ``schema`` param provided and ``select_query`` param not provided
51
+ :param select_query: custom select query to fetch data from redshift database,
52
+ has precedence over default query `SELECT * FROM ``schema``.``table``
52
53
  :param redshift_conn_id: reference to a specific redshift database
53
54
  :param aws_conn_id: reference to a specific S3 connection
54
55
  If the AWS connection contains 'aws_iam_role' in ``extras``
@@ -138,12 +139,17 @@ class RedshiftToS3Operator(BaseOperator):
138
139
  {unload_options};
139
140
  """
140
141
 
142
+ @property
143
+ def default_select_query(self) -> str | None:
144
+ if self.schema and self.table:
145
+ return f"SELECT * FROM {self.schema}.{self.table}"
146
+ return None
147
+
141
148
  def execute(self, context: Context) -> None:
142
149
  if self.table and self.table_as_file_name:
143
150
  self.s3_key = f"{self.s3_key}/{self.table}_"
144
151
 
145
- if self.schema and self.table:
146
- self.select_query = f"SELECT * FROM {self.schema}.{self.table}"
152
+ self.select_query = self.select_query or self.default_select_query
147
153
 
148
154
  if self.select_query is None:
149
155
  raise ValueError(
@@ -59,7 +59,8 @@ class S3ToRedshiftOperator(BaseOperator):
59
59
  - ``path/to/cert/bundle.pem``: A filename of the CA cert bundle to uses.
60
60
  You can specify this argument if you want to use a different
61
61
  CA cert bundle than the one used by botocore.
62
- :param column_list: list of column names to load
62
+ :param column_list: list of column names to load source data fields into specific target columns
63
+ https://docs.aws.amazon.com/redshift/latest/dg/copy-parameters-column-mapping.html#copy-column-list
63
64
  :param copy_options: reference to a list of COPY options
64
65
  :param method: Action to be performed on execution. Available ``APPEND``, ``UPSERT`` and ``REPLACE``.
65
66
  :param upsert_keys: List of fields to use as key on upsert action
@@ -121,6 +122,10 @@ class S3ToRedshiftOperator(BaseOperator):
121
122
  if arg in self.redshift_data_api_kwargs:
122
123
  raise AirflowException(f"Cannot include param '{arg}' in Redshift Data API kwargs")
123
124
 
125
+ @property
126
+ def use_redshift_data(self):
127
+ return bool(self.redshift_data_api_kwargs)
128
+
124
129
  def _build_copy_query(
125
130
  self, copy_destination: str, credentials_block: str, region_info: str, copy_options: str
126
131
  ) -> str:
@@ -138,11 +143,11 @@ class S3ToRedshiftOperator(BaseOperator):
138
143
  if self.method not in AVAILABLE_METHODS:
139
144
  raise AirflowException(f"Method not found! Available methods: {AVAILABLE_METHODS}")
140
145
 
141
- redshift_hook: RedshiftDataHook | RedshiftSQLHook
142
- if self.redshift_data_api_kwargs:
143
- redshift_hook = RedshiftDataHook(aws_conn_id=self.redshift_conn_id)
146
+ if self.use_redshift_data:
147
+ redshift_data_hook = RedshiftDataHook(aws_conn_id=self.redshift_conn_id)
144
148
  else:
145
- redshift_hook = RedshiftSQLHook(redshift_conn_id=self.redshift_conn_id)
149
+ redshift_sql_hook = RedshiftSQLHook(redshift_conn_id=self.redshift_conn_id)
150
+
146
151
  conn = S3Hook.get_connection(conn_id=self.aws_conn_id) if self.aws_conn_id else None
147
152
  region_info = ""
148
153
  if conn and conn.extra_dejson.get("region", False):
@@ -167,12 +172,12 @@ class S3ToRedshiftOperator(BaseOperator):
167
172
  if self.method == "REPLACE":
168
173
  sql = ["BEGIN;", f"DELETE FROM {destination};", copy_statement, "COMMIT"]
169
174
  elif self.method == "UPSERT":
170
- if isinstance(redshift_hook, RedshiftDataHook):
171
- keys = self.upsert_keys or redshift_hook.get_table_primary_key(
175
+ if self.use_redshift_data:
176
+ keys = self.upsert_keys or redshift_data_hook.get_table_primary_key(
172
177
  table=self.table, schema=self.schema, **self.redshift_data_api_kwargs
173
178
  )
174
179
  else:
175
- keys = self.upsert_keys or redshift_hook.get_table_primary_key(self.table, self.schema)
180
+ keys = self.upsert_keys or redshift_sql_hook.get_table_primary_key(self.table, self.schema)
176
181
  if not keys:
177
182
  raise AirflowException(
178
183
  f"No primary key on {self.schema}.{self.table}. Please provide keys on 'upsert_keys'"
@@ -192,8 +197,57 @@ class S3ToRedshiftOperator(BaseOperator):
192
197
  sql = copy_statement
193
198
 
194
199
  self.log.info("Executing COPY command...")
195
- if isinstance(redshift_hook, RedshiftDataHook):
196
- redshift_hook.execute_query(sql=sql, **self.redshift_data_api_kwargs)
200
+ if self.use_redshift_data:
201
+ redshift_data_hook.execute_query(sql=sql, **self.redshift_data_api_kwargs)
197
202
  else:
198
- redshift_hook.run(sql, autocommit=self.autocommit)
203
+ redshift_sql_hook.run(sql, autocommit=self.autocommit)
199
204
  self.log.info("COPY command complete...")
205
+
206
+ def get_openlineage_facets_on_complete(self, task_instance):
207
+ """Implement on_complete as we will query destination table."""
208
+ from airflow.providers.amazon.aws.utils.openlineage import (
209
+ get_facets_from_redshift_table,
210
+ )
211
+ from airflow.providers.common.compat.openlineage.facet import (
212
+ Dataset,
213
+ LifecycleStateChange,
214
+ LifecycleStateChangeDatasetFacet,
215
+ )
216
+ from airflow.providers.openlineage.extractors import OperatorLineage
217
+
218
+ if self.use_redshift_data:
219
+ redshift_data_hook = RedshiftDataHook(aws_conn_id=self.redshift_conn_id)
220
+ database = self.redshift_data_api_kwargs.get("database")
221
+ identifier = self.redshift_data_api_kwargs.get(
222
+ "cluster_identifier", self.redshift_data_api_kwargs.get("workgroup_name")
223
+ )
224
+ port = self.redshift_data_api_kwargs.get("port", "5439")
225
+ authority = f"{identifier}.{redshift_data_hook.region_name}:{port}"
226
+ output_dataset_facets = get_facets_from_redshift_table(
227
+ redshift_data_hook, self.table, self.redshift_data_api_kwargs, self.schema
228
+ )
229
+ else:
230
+ redshift_sql_hook = RedshiftSQLHook(redshift_conn_id=self.redshift_conn_id)
231
+ database = redshift_sql_hook.conn.schema
232
+ authority = redshift_sql_hook.get_openlineage_database_info(redshift_sql_hook.conn).authority
233
+ output_dataset_facets = get_facets_from_redshift_table(
234
+ redshift_sql_hook, self.table, {}, self.schema
235
+ )
236
+
237
+ if self.method == "REPLACE":
238
+ output_dataset_facets["lifecycleStateChange"] = LifecycleStateChangeDatasetFacet(
239
+ lifecycleStateChange=LifecycleStateChange.OVERWRITE
240
+ )
241
+
242
+ output_dataset = Dataset(
243
+ namespace=f"redshift://{authority}",
244
+ name=f"{database}.{self.schema}.{self.table}",
245
+ facets=output_dataset_facets,
246
+ )
247
+
248
+ input_dataset = Dataset(
249
+ namespace=f"s3://{self.s3_bucket}",
250
+ name=self.s3_key,
251
+ )
252
+
253
+ return OperatorLineage(inputs=[input_dataset], outputs=[output_dataset])
@@ -17,6 +17,9 @@
17
17
 
18
18
  from __future__ import annotations
19
19
 
20
+ import warnings
21
+
22
+ from airflow.exceptions import AirflowProviderDeprecationWarning
20
23
  from airflow.providers.amazon.aws.hooks.glue_databrew import GlueDataBrewHook
21
24
  from airflow.providers.amazon.aws.triggers.base import AwsBaseWaiterTrigger
22
25
 
@@ -27,8 +30,10 @@ class GlueDataBrewJobCompleteTrigger(AwsBaseWaiterTrigger):
27
30
 
28
31
  :param job_name: Glue DataBrew job name
29
32
  :param run_id: the ID of the specific run to watch for that job
30
- :param delay: Number of seconds to wait between two checks. Default is 10 seconds.
31
- :param max_attempts: Maximum number of attempts to wait for the job to complete. Default is 60 attempts.
33
+ :param delay: Number of seconds to wait between two checks.(Deprecated).
34
+ :param waiter_delay: Number of seconds to wait between two checks. Default is 30 seconds.
35
+ :param max_attempts: Maximum number of attempts to wait for the job to complete.(Deprecated).
36
+ :param waiter_max_attempts: Maximum number of attempts to wait for the job to complete. Default is 60 attempts.
32
37
  :param aws_conn_id: The Airflow connection used for AWS credentials.
33
38
  """
34
39
 
@@ -36,11 +41,27 @@ class GlueDataBrewJobCompleteTrigger(AwsBaseWaiterTrigger):
36
41
  self,
37
42
  job_name: str,
38
43
  run_id: str,
39
- aws_conn_id: str | None,
40
- delay: int = 10,
41
- max_attempts: int = 60,
44
+ delay: int | None = None,
45
+ max_attempts: int | None = None,
46
+ waiter_delay: int = 30,
47
+ waiter_max_attempts: int = 60,
48
+ aws_conn_id: str | None = "aws_default",
42
49
  **kwargs,
43
50
  ):
51
+ if delay is not None:
52
+ warnings.warn(
53
+ "please use `waiter_delay` instead of delay.",
54
+ AirflowProviderDeprecationWarning,
55
+ stacklevel=2,
56
+ )
57
+ waiter_delay = delay or waiter_delay
58
+ if max_attempts is not None:
59
+ warnings.warn(
60
+ "please use `waiter_max_attempts` instead of max_attempts.",
61
+ AirflowProviderDeprecationWarning,
62
+ stacklevel=2,
63
+ )
64
+ waiter_max_attempts = max_attempts or waiter_max_attempts
44
65
  super().__init__(
45
66
  serialized_fields={"job_name": job_name, "run_id": run_id},
46
67
  waiter_name="job_complete",
@@ -50,10 +71,16 @@ class GlueDataBrewJobCompleteTrigger(AwsBaseWaiterTrigger):
50
71
  status_queries=["State"],
51
72
  return_value=run_id,
52
73
  return_key="run_id",
53
- waiter_delay=delay,
54
- waiter_max_attempts=max_attempts,
74
+ waiter_delay=waiter_delay,
75
+ waiter_max_attempts=waiter_max_attempts,
55
76
  aws_conn_id=aws_conn_id,
77
+ **kwargs,
56
78
  )
57
79
 
58
80
  def hook(self) -> GlueDataBrewHook:
59
- return GlueDataBrewHook(aws_conn_id=self.aws_conn_id)
81
+ return GlueDataBrewHook(
82
+ aws_conn_id=self.aws_conn_id,
83
+ region_name=self.region_name,
84
+ verify=self.verify,
85
+ config=self.botocore_config,
86
+ )
@@ -0,0 +1,136 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ from __future__ import annotations
19
+
20
+ from typing import TYPE_CHECKING, Any
21
+
22
+ from airflow.providers.amazon.aws.hooks.redshift_sql import RedshiftSQLHook
23
+ from airflow.providers.common.compat.openlineage.facet import (
24
+ ColumnLineageDatasetFacet,
25
+ DocumentationDatasetFacet,
26
+ Fields,
27
+ InputField,
28
+ SchemaDatasetFacet,
29
+ SchemaDatasetFacetFields,
30
+ )
31
+
32
+ if TYPE_CHECKING:
33
+ from airflow.providers.amazon.aws.hooks.redshift_data import RedshiftDataHook
34
+
35
+
36
+ def get_facets_from_redshift_table(
37
+ redshift_hook: RedshiftDataHook | RedshiftSQLHook,
38
+ table: str,
39
+ redshift_data_api_kwargs: dict,
40
+ schema: str = "public",
41
+ ) -> dict[Any, Any]:
42
+ """
43
+ Query redshift for table metadata.
44
+
45
+ SchemaDatasetFacet and DocumentationDatasetFacet (if table has description) will be created.
46
+ """
47
+ sql = f"""
48
+ SELECT
49
+ cols.column_name,
50
+ cols.data_type,
51
+ col_des.description as column_description,
52
+ tbl_des.description as table_description
53
+ FROM
54
+ information_schema.columns cols
55
+ LEFT JOIN
56
+ pg_catalog.pg_description col_des
57
+ ON
58
+ cols.ordinal_position = col_des.objsubid
59
+ AND col_des.objoid = (SELECT oid FROM pg_class WHERE relnamespace =
60
+ (SELECT oid FROM pg_namespace WHERE nspname = cols.table_schema) AND relname = cols.table_name)
61
+ LEFT JOIN
62
+ pg_catalog.pg_class tbl
63
+ ON
64
+ tbl.relname = cols.table_name
65
+ AND tbl.relnamespace = (SELECT oid FROM pg_namespace WHERE nspname = cols.table_schema)
66
+ LEFT JOIN
67
+ pg_catalog.pg_description tbl_des
68
+ ON
69
+ tbl.oid = tbl_des.objoid
70
+ AND tbl_des.objsubid = 0
71
+ WHERE
72
+ cols.table_name = '{table}'
73
+ AND cols.table_schema = '{schema}';
74
+ """
75
+ if isinstance(redshift_hook, RedshiftSQLHook):
76
+ records = redshift_hook.get_records(sql)
77
+ if records:
78
+ table_description = records[0][-1] # Assuming the table description is the same for all rows
79
+ else:
80
+ table_description = None
81
+ documentation = DocumentationDatasetFacet(description=table_description or "")
82
+ table_schema = SchemaDatasetFacet(
83
+ fields=[
84
+ SchemaDatasetFacetFields(name=field[0], type=field[1], description=field[2])
85
+ for field in records
86
+ ]
87
+ )
88
+ else:
89
+ statement_id = redshift_hook.execute_query(sql=sql, poll_interval=1, **redshift_data_api_kwargs)
90
+ response = redshift_hook.conn.get_statement_result(Id=statement_id)
91
+
92
+ table_schema = SchemaDatasetFacet(
93
+ fields=[
94
+ SchemaDatasetFacetFields(
95
+ name=field[0]["stringValue"],
96
+ type=field[1]["stringValue"],
97
+ description=field[2].get("stringValue"),
98
+ )
99
+ for field in response["Records"]
100
+ ]
101
+ )
102
+ # Table description will be the same for all fields, so we retrieve it from first field.
103
+ documentation = DocumentationDatasetFacet(
104
+ description=response["Records"][0][3].get("stringValue") or ""
105
+ )
106
+
107
+ return {"schema": table_schema, "documentation": documentation}
108
+
109
+
110
+ def get_identity_column_lineage_facet(
111
+ field_names,
112
+ input_datasets,
113
+ ) -> ColumnLineageDatasetFacet:
114
+ """
115
+ Get column lineage facet.
116
+
117
+ Simple lineage will be created, where each source column corresponds to single destination column
118
+ in each input dataset and there are no transformations made.
119
+ """
120
+ if field_names and not input_datasets:
121
+ raise ValueError("When providing `field_names` You must provide at least one `input_dataset`.")
122
+
123
+ column_lineage_facet = ColumnLineageDatasetFacet(
124
+ fields={
125
+ field: Fields(
126
+ inputFields=[
127
+ InputField(namespace=dataset.namespace, name=dataset.name, field=field)
128
+ for dataset in input_datasets
129
+ ],
130
+ transformationType="IDENTITY",
131
+ transformationDescription="identical",
132
+ )
133
+ for field in field_names
134
+ }
135
+ )
136
+ return column_lineage_facet
@@ -71,11 +71,21 @@ def wait(
71
71
  try:
72
72
  waiter.wait(**args, WaiterConfig={"MaxAttempts": 1})
73
73
  except WaiterError as error:
74
- if "terminal failure" in str(error):
75
- log.error("%s: %s", failure_message, _LazyStatusFormatter(status_args, error.last_response))
74
+ error_reason = str(error)
75
+ last_response = error.last_response
76
+
77
+ if "terminal failure" in error_reason:
78
+ log.error("%s: %s", failure_message, _LazyStatusFormatter(status_args, last_response))
79
+ raise AirflowException(f"{failure_message}: {error}")
80
+
81
+ if (
82
+ "An error occurred" in error_reason
83
+ and isinstance(last_response.get("Error"), dict)
84
+ and "Code" in last_response.get("Error")
85
+ ):
76
86
  raise AirflowException(f"{failure_message}: {error}")
77
87
 
78
- log.info("%s: %s", status_message, _LazyStatusFormatter(status_args, error.last_response))
88
+ log.info("%s: %s", status_message, _LazyStatusFormatter(status_args, last_response))
79
89
  else:
80
90
  break
81
91
  else:
@@ -122,11 +132,21 @@ async def async_wait(
122
132
  try:
123
133
  await waiter.wait(**args, WaiterConfig={"MaxAttempts": 1})
124
134
  except WaiterError as error:
125
- if "terminal failure" in str(error):
126
- log.error("%s: %s", failure_message, _LazyStatusFormatter(status_args, error.last_response))
135
+ error_reason = str(error)
136
+ last_response = error.last_response
137
+
138
+ if "terminal failure" in error_reason:
139
+ log.error("%s: %s", failure_message, _LazyStatusFormatter(status_args, last_response))
140
+ raise AirflowException(f"{failure_message}: {error}")
141
+
142
+ if (
143
+ "An error occurred" in error_reason
144
+ and isinstance(last_response.get("Error"), dict)
145
+ and "Code" in last_response.get("Error")
146
+ ):
127
147
  raise AirflowException(f"{failure_message}: {error}")
128
148
 
129
- log.info("%s: %s", status_message, _LazyStatusFormatter(status_args, error.last_response))
149
+ log.info("%s: %s", status_message, _LazyStatusFormatter(status_args, last_response))
130
150
  else:
131
151
  break
132
152
  else:
@@ -13,7 +13,7 @@
13
13
  "state": "success"
14
14
  },
15
15
  {
16
- "matcher": "error",
16
+ "matcher": "path",
17
17
  "argument": "status",
18
18
  "expected": "RUNNING",
19
19
  "state": "retry"
@@ -28,8 +28,9 @@ def get_provider_info():
28
28
  "name": "Amazon",
29
29
  "description": "Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1723968989,
31
+ "source-date-epoch": 1726859877,
32
32
  "versions": [
33
+ "8.29.0",
33
34
  "8.28.0",
34
35
  "8.27.0",
35
36
  "8.26.0",
@@ -99,17 +100,17 @@ def get_provider_info():
99
100
  "apache-airflow-providers-common-compat>=1.1.0",
100
101
  "apache-airflow-providers-common-sql>=1.3.1",
101
102
  "apache-airflow-providers-http",
102
- "apache-airflow-providers-common-compat>=1.1.0",
103
103
  "boto3>=1.34.90",
104
104
  "botocore>=1.34.90",
105
105
  "inflection>=0.5.1",
106
- "watchtower>=3.0.0,<4",
106
+ "watchtower>=3.0.0,!=3.3.0,<4",
107
107
  "jsonpath_ng>=1.5.3",
108
108
  "redshift_connector>=2.0.918",
109
109
  "sqlalchemy_redshift>=0.8.6",
110
110
  "asgiref>=2.3.0",
111
111
  "PyAthena>=3.0.10",
112
112
  "jmespath>=0.7.0",
113
+ "python3-saml>=1.16.0",
113
114
  ],
114
115
  "additional-extras": [
115
116
  {
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 8.28.0rc1
3
+ Version: 8.29.0
4
4
  Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
5
5
  Keywords: airflow-provider,amazon,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -22,23 +22,23 @@ Classifier: Programming Language :: Python :: 3.11
22
22
  Classifier: Programming Language :: Python :: 3.12
23
23
  Classifier: Topic :: System :: Monitoring
24
24
  Requires-Dist: PyAthena>=3.0.10
25
- Requires-Dist: apache-airflow-providers-common-compat>=1.1.0rc0
26
- Requires-Dist: apache-airflow-providers-common-compat>=1.1.0rc0
27
- Requires-Dist: apache-airflow-providers-common-sql>=1.3.1rc0
25
+ Requires-Dist: apache-airflow-providers-common-compat>=1.1.0
26
+ Requires-Dist: apache-airflow-providers-common-sql>=1.3.1
28
27
  Requires-Dist: apache-airflow-providers-http
29
- Requires-Dist: apache-airflow>=2.8.0rc0
28
+ Requires-Dist: apache-airflow>=2.8.0
30
29
  Requires-Dist: asgiref>=2.3.0
31
30
  Requires-Dist: boto3>=1.34.90
32
31
  Requires-Dist: botocore>=1.34.90
33
32
  Requires-Dist: inflection>=0.5.1
34
33
  Requires-Dist: jmespath>=0.7.0
35
34
  Requires-Dist: jsonpath_ng>=1.5.3
35
+ Requires-Dist: python3-saml>=1.16.0
36
36
  Requires-Dist: redshift_connector>=2.0.918
37
37
  Requires-Dist: sqlalchemy_redshift>=0.8.6
38
- Requires-Dist: watchtower>=3.0.0,<4
38
+ Requires-Dist: watchtower>=3.0.0,!=3.3.0,<4
39
39
  Requires-Dist: aiobotocore[boto3]>=2.13.0 ; extra == "aiobotocore"
40
40
  Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache.hive"
41
- Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0rc0 ; extra == "cncf.kubernetes"
41
+ Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0 ; extra == "cncf.kubernetes"
42
42
  Requires-Dist: apache-airflow-providers-common-compat ; extra == "common.compat"
43
43
  Requires-Dist: apache-airflow-providers-common-sql ; extra == "common.sql"
44
44
  Requires-Dist: apache-airflow-providers-exasol ; extra == "exasol"
@@ -56,8 +56,8 @@ Requires-Dist: s3fs>=2023.10.0 ; extra == "s3fs"
56
56
  Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
57
57
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
58
58
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
59
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.28.0/changelog.html
60
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.28.0
59
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.29.0/changelog.html
60
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.29.0
61
61
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
62
62
  Project-URL: Source Code, https://github.com/apache/airflow
63
63
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
@@ -125,7 +125,7 @@ Provides-Extra: ssh
125
125
 
126
126
  Package ``apache-airflow-providers-amazon``
127
127
 
128
- Release: ``8.28.0.rc1``
128
+ Release: ``8.29.0``
129
129
 
130
130
 
131
131
  Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
@@ -138,7 +138,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
138
138
  are in ``airflow.providers.amazon`` python package.
139
139
 
140
140
  You can find package information and changelog for the provider
141
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.28.0/>`_.
141
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.29.0/>`_.
142
142
 
143
143
  Installation
144
144
  ------------
@@ -152,25 +152,25 @@ The package supports the following python versions: 3.8,3.9,3.10,3.11,3.12
152
152
  Requirements
153
153
  ------------
154
154
 
155
- ========================================== ==================
155
+ ========================================== ======================
156
156
  PIP package Version required
157
- ========================================== ==================
157
+ ========================================== ======================
158
158
  ``apache-airflow`` ``>=2.8.0``
159
159
  ``apache-airflow-providers-common-compat`` ``>=1.1.0``
160
160
  ``apache-airflow-providers-common-sql`` ``>=1.3.1``
161
161
  ``apache-airflow-providers-http``
162
- ``apache-airflow-providers-common-compat`` ``>=1.1.0``
163
162
  ``boto3`` ``>=1.34.90``
164
163
  ``botocore`` ``>=1.34.90``
165
164
  ``inflection`` ``>=0.5.1``
166
- ``watchtower`` ``>=3.0.0,<4``
165
+ ``watchtower`` ``>=3.0.0,!=3.3.0,<4``
167
166
  ``jsonpath_ng`` ``>=1.5.3``
168
167
  ``redshift_connector`` ``>=2.0.918``
169
168
  ``sqlalchemy_redshift`` ``>=0.8.6``
170
169
  ``asgiref`` ``>=2.3.0``
171
170
  ``PyAthena`` ``>=3.0.10``
172
171
  ``jmespath`` ``>=0.7.0``
173
- ========================================== ==================
172
+ ``python3-saml`` ``>=1.16.0``
173
+ ========================================== ======================
174
174
 
175
175
  Cross provider package dependencies
176
176
  -----------------------------------
@@ -205,4 +205,4 @@ Dependent package
205
205
  ====================================================================================================================== ===================
206
206
 
207
207
  The changelog for the provider package can be found in the
208
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.28.0/changelog.html>`_.
208
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.29.0/changelog.html>`_.
@@ -1,10 +1,10 @@
1
1
  airflow/providers/amazon/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
2
- airflow/providers/amazon/__init__.py,sha256=Q81gd1BpNYVrDMTImImWYFH9aNzRG1HIDcotMYnzAfE,1494
3
- airflow/providers/amazon/get_provider_info.py,sha256=ooAm-teOdxdYJAv29APRVll6FTxWKDO4iwMaD9C2ezc,68588
2
+ airflow/providers/amazon/__init__.py,sha256=DVlqjb_QLk8ox7WCkFnM04OlVRPLseFoCWovvNvoN54,1494
3
+ airflow/providers/amazon/get_provider_info.py,sha256=UAYKuSFhpOhShfBdpa-Jdj7CTAUWiL6WWt01X5xCd_k,68593
4
4
  airflow/providers/amazon/aws/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
5
5
  airflow/providers/amazon/aws/exceptions.py,sha256=uRGNMgXvgdzfphpOTiyj74lQhjzb70J-X8n6fsx5Jog,1864
6
6
  airflow/providers/amazon/aws/auth_manager/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
7
- airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=4h0SZ63YxsMKM5xk8mm5v05PbEXUIL79a0YCbJ8QP_Y,16483
7
+ airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=i-yJp506WC1PE30XiD1rYN-jxagikgDoK-MZ6yJr44o,16699
8
8
  airflow/providers/amazon/aws/auth_manager/constants.py,sha256=Jdluo42InhyNGkYHB_dRtoFMpKanJLJdH0hyR9-5AZg,1050
9
9
  airflow/providers/amazon/aws/auth_manager/user.py,sha256=SoiiA3sVB1-G02qhQDSTst_25MjW4xbSE0vVDxwR-uw,1882
10
10
  airflow/providers/amazon/aws/auth_manager/avp/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -16,7 +16,7 @@ airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py,sha256=SCTdbAsedEp
16
16
  airflow/providers/amazon/aws/auth_manager/cli/definition.py,sha256=oGMUGwb6xz47vaGwAzTzn8g0xUcAbvtxGPeANFOc7Ec,2749
17
17
  airflow/providers/amazon/aws/auth_manager/cli/idc_commands.py,sha256=4r5xfkyyqUhqdYJuyISfBI7-WlPzxfEgqmh_PR37wj8,5837
18
18
  airflow/providers/amazon/aws/auth_manager/security_manager/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
19
- airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py,sha256=5XLvMpdpNpTYxntN2xL1-XfuxevQzz5EYHJb4TBOLFI,1566
19
+ airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py,sha256=b8vMzCFyJgkqnnqsR52tUlR7Lm4Dj8Yev7yFC3c22_Y,1679
20
20
  airflow/providers/amazon/aws/auth_manager/views/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
21
21
  airflow/providers/amazon/aws/auth_manager/views/auth.py,sha256=e5InDh2jYEBClkgn9xm6fYl8qK-miP692WuOGa5gC6g,5884
22
22
  airflow/providers/amazon/aws/datasets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -24,15 +24,15 @@ airflow/providers/amazon/aws/datasets/s3.py,sha256=GFTxmCNwfV3FeWW_b78-RIop6NRCk
24
24
  airflow/providers/amazon/aws/executors/Dockerfile,sha256=VZ-YOR59KSMoztJV_g7v5hUwetKR0Ii4wNNaKqDIfyQ,4275
25
25
  airflow/providers/amazon/aws/executors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
26
26
  airflow/providers/amazon/aws/executors/batch/__init__.py,sha256=TPSNZJ6E3zqN7mvdrMrarqwHeFYN9Efd2jD3hpN7tr0,970
27
- airflow/providers/amazon/aws/executors/batch/batch_executor.py,sha256=iGIYiedhoziVA3_ZIx20kXLyBO5l0Pf0kUjHjfxS5ok,21092
27
+ airflow/providers/amazon/aws/executors/batch/batch_executor.py,sha256=kXUuJVMW3_winuX81dE-_YEZ3kSz09oJ4mmf3liFt2k,21082
28
28
  airflow/providers/amazon/aws/executors/batch/batch_executor_config.py,sha256=7yYLKB1jRoBy0AeW5chcpz7i2UfvSQob9QLvMhYUWDQ,3223
29
29
  airflow/providers/amazon/aws/executors/batch/boto_schema.py,sha256=Rqr_uk6Tx6hNVYsQRPNlLj0zC8TC_awWk2rv3tkUuYU,2445
30
30
  airflow/providers/amazon/aws/executors/batch/utils.py,sha256=Jugs8lvvtWey_CcwMkHnRVe9G0Sn8wyVmbROVrjgk9A,5286
31
31
  airflow/providers/amazon/aws/executors/ecs/__init__.py,sha256=J_B7TIPPQmn67Y7kzr4pgzcpFRr0wUp6gVsyfz5GKc4,962
32
32
  airflow/providers/amazon/aws/executors/ecs/boto_schema.py,sha256=hxj76uoo4y9koshb5Ou2hyjvNKCtrSK5wXea3iVtPqs,3762
33
33
  airflow/providers/amazon/aws/executors/ecs/ecs_executor.py,sha256=72N8Y84j-cnICWqg7WuW2AI_B_DYlUcpkvPlzZGNI0Y,25002
34
- airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py,sha256=iRP39ynsYFOisLN9NQsiLikTrBBN54bWaFQs60Snrsw,5436
35
- airflow/providers/amazon/aws/executors/ecs/utils.py,sha256=RLsmPN5MpLpXQftkyoIb8i8HxAw2R3vQWK1zM_M5XDg,9477
34
+ airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py,sha256=pJwaSJLzsflJxlB-5pveRSzXS1O8ua9Tq7_P63RrQ9I,6003
35
+ airflow/providers/amazon/aws/executors/ecs/utils.py,sha256=pzMNZ8rtD3HCOdAfUCL6xyTvu9Z8wXfIzgRIbK4_g28,9732
36
36
  airflow/providers/amazon/aws/executors/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
37
37
  airflow/providers/amazon/aws/executors/utils/base_config_keys.py,sha256=q-xDVM8_iiygte8PK1khJjX7442sTNe72xJGwngtdV8,1169
38
38
  airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py,sha256=FDdxVfdiCBZRiKQXYdjUp33hYaoHvOSZ4HjZAzpL8NY,3100
@@ -76,7 +76,7 @@ airflow/providers/amazon/aws/hooks/rds.py,sha256=h7NF3GZ42RKeh70rlg2BQFVpa8vNadS
76
76
  airflow/providers/amazon/aws/hooks/redshift_cluster.py,sha256=NA9HDPkTwSVVwE1BN1WCym_HQ2OQcsDBXFjqxQixKhg,13119
77
77
  airflow/providers/amazon/aws/hooks/redshift_data.py,sha256=oYlUoRBE45XRQxkGCnXBv707iBk1QieOQfeZ3bn81Rw,10253
78
78
  airflow/providers/amazon/aws/hooks/redshift_sql.py,sha256=7Iqr_5IVREdsW9mdTZEm2_Ng13eIBvkqZKS-LtTMM-k,11318
79
- airflow/providers/amazon/aws/hooks/s3.py,sha256=jIMunEV-0mlD6sbdVd0Og07hAs9K9anBc7VXM7BIBvY,61423
79
+ airflow/providers/amazon/aws/hooks/s3.py,sha256=9-asub19mVios2uuwIt8wPwaXPSg-ljvhVzAI82rhp8,61446
80
80
  airflow/providers/amazon/aws/hooks/sagemaker.py,sha256=EF3fK_vYlklQd7YTIovLN25jYH1JMjqcTB9TlXJdzAQ,62154
81
81
  airflow/providers/amazon/aws/hooks/secrets_manager.py,sha256=6srh3jUeSGoqyrSj1M6aSOaA9xT5kna0VGUC0kzH-q0,2690
82
82
  airflow/providers/amazon/aws/hooks/ses.py,sha256=uOTjyhb87jNyf2B11zH1wg5Oomnsx0nM4aHteP-mCHs,4147
@@ -95,8 +95,8 @@ airflow/providers/amazon/aws/links/glue.py,sha256=NrXZ-jKmX3jNxGngBruudRcm7vgsxZ
95
95
  airflow/providers/amazon/aws/links/logs.py,sha256=BgRd61V_IvZpnZLShRN6zDqeoxjXC4M-6sfSgL0TGpM,1608
96
96
  airflow/providers/amazon/aws/links/step_function.py,sha256=xSL4vfKLnCn-QboRtruajpH5elRrNfw0XkY7eSfPpE4,2099
97
97
  airflow/providers/amazon/aws/log/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
98
- airflow/providers/amazon/aws/log/cloudwatch_task_handler.py,sha256=CMjD5z_HPCs_wkZ_dSB1JtnLb30s6OdTjrF4amguqqw,6950
99
- airflow/providers/amazon/aws/log/s3_task_handler.py,sha256=n361LFeRY_2-OVZiwjvvCbt03cmO-Ts7cPeoiN6kiXE,8467
98
+ airflow/providers/amazon/aws/log/cloudwatch_task_handler.py,sha256=11iFc08HE6bvLWsGCTi0RoiYsSziEXWFEjvCwDJ4liw,6806
99
+ airflow/providers/amazon/aws/log/s3_task_handler.py,sha256=opeXMThRDzKtcxlyNnM2QvNPdrrnekVWIskpUREQrMo,8396
100
100
  airflow/providers/amazon/aws/notifications/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
101
101
  airflow/providers/amazon/aws/notifications/chime.py,sha256=QCEdvVO7oCIbf_rGHtQZeitAUKARgsgtKjokxYb_kB0,2122
102
102
  airflow/providers/amazon/aws/notifications/sns.py,sha256=VCh3MpKl86RBBmI2zw0eyHG7Q8DxdL9ug8zaiC3YZyI,3101
@@ -112,14 +112,14 @@ airflow/providers/amazon/aws/operators/comprehend.py,sha256=JL0UfGpAekOeRFx3IT32
112
112
  airflow/providers/amazon/aws/operators/datasync.py,sha256=sZHqD1EDv0FfMj-r__P3MnN_39LzQhlT1_l9ghJbJ5c,18925
113
113
  airflow/providers/amazon/aws/operators/dms.py,sha256=6RhUtbELAjp0LLkUWl73kdcH4MRmyTzwHi1NxOlkE0Q,12313
114
114
  airflow/providers/amazon/aws/operators/ec2.py,sha256=aQj6cL3nZzu0tcn3dq6RBSPsByZe8fNtn6qcpQYtlNI,17051
115
- airflow/providers/amazon/aws/operators/ecs.py,sha256=pmm05ugkdyUNwkfY1X-mQdT8pqui0qJumwt92tEaEgU,32601
115
+ airflow/providers/amazon/aws/operators/ecs.py,sha256=h-rRbIU3MeVgoZz85O73F8vwiOG942h0_n6DpeYejVM,33344
116
116
  airflow/providers/amazon/aws/operators/eks.py,sha256=0xHtPzxZx3ymr6-iqGvAoZsnA24PrjYRSAiFzuxrl3U,50761
117
117
  airflow/providers/amazon/aws/operators/emr.py,sha256=XNafysiROWBcMxAPK_9MQplcY-itOivKYnljffQo6eE,84909
118
118
  airflow/providers/amazon/aws/operators/eventbridge.py,sha256=e686XFhVi54DbaCk7oVc0fhvH6GIPU3p8jgyCie1yBU,10394
119
119
  airflow/providers/amazon/aws/operators/glacier.py,sha256=zxwC6lLk6sWerjlogXq6HgNOJx4h0hkqpGpqn23hJWk,3654
120
120
  airflow/providers/amazon/aws/operators/glue.py,sha256=m8hdF6eTyzsK3onOqt6Td0dGshhgf_XU1f4EtMb42LU,28390
121
121
  airflow/providers/amazon/aws/operators/glue_crawler.py,sha256=6646Ru_DrGjcv_hCy5EjPXcFY6pdB0bjj6ko8Wj3XDk,5253
122
- airflow/providers/amazon/aws/operators/glue_databrew.py,sha256=zXP0_ZoqFmjcG0REDj-e7GdZmrSvJtfvZrqg1yKSH_U,4475
122
+ airflow/providers/amazon/aws/operators/glue_databrew.py,sha256=jpribaVZqaq7rxMMhOeF5veFzGHgv184AHUhwUpsbTc,6475
123
123
  airflow/providers/amazon/aws/operators/kinesis_analytics.py,sha256=Phjx24ESi2QIszD1O6OTCV_R7Wkr04qjUsASuLzCOoM,15773
124
124
  airflow/providers/amazon/aws/operators/lambda_function.py,sha256=96KtK5KUpMPW2i8Xay1UdKPMX211hS6FqweFnRNuTFQ,10619
125
125
  airflow/providers/amazon/aws/operators/neptune.py,sha256=on5oNX5K4yHfW1POE0eeZujta71vkJdVL07vucGjX-4,14751
@@ -138,18 +138,18 @@ airflow/providers/amazon/aws/secrets/systems_manager.py,sha256=4o9x02hR3i9BIEHJs
138
138
  airflow/providers/amazon/aws/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
139
139
  airflow/providers/amazon/aws/sensors/athena.py,sha256=AYPc1Jna3Erz4lq3sc8Buc-u1CIF9N_h-Uy-HBv5q_c,3634
140
140
  airflow/providers/amazon/aws/sensors/base_aws.py,sha256=8uzQbi8sksoxTpviirFYjLr3a-SUJMNAg7d9pI_w8-s,3858
141
- airflow/providers/amazon/aws/sensors/batch.py,sha256=ilTTHIu-F3MrFDDikgfpB3dmF_i8_LU_JnIXVgv5pEk,10027
141
+ airflow/providers/amazon/aws/sensors/batch.py,sha256=LhFl7XLX-w5FsJ9GZMLoLLxMPDhO8W1kRQbeNbiq6Oc,9930
142
142
  airflow/providers/amazon/aws/sensors/bedrock.py,sha256=AMoCyKmwdRMnhcoZqMLz1XgyfoK4LyJW0m4TeXxrFZk,15695
143
143
  airflow/providers/amazon/aws/sensors/cloud_formation.py,sha256=_ZwuQbUtN1bIgNQEtKM8DkrXbYAUfEt49esEJxArmn4,4992
144
144
  airflow/providers/amazon/aws/sensors/comprehend.py,sha256=84ejUDrsrINpcxkpMo1xBOuf2_V4meWu0_wJ295QT8M,10319
145
145
  airflow/providers/amazon/aws/sensors/dms.py,sha256=EJqh2ojqFoRj4cSb2AeuWwFtHahn-h1FRvtPhd_Kspc,5338
146
146
  airflow/providers/amazon/aws/sensors/dynamodb.py,sha256=P43g73ACBoDo0Lrxzm9wOoEepbRVAOjqtwWYvIq1Tls,5008
147
147
  airflow/providers/amazon/aws/sensors/ec2.py,sha256=QHo9djFIoWKj_C2WkbfZSTogDTbXcJfeXUUCEplKRZY,3890
148
- airflow/providers/amazon/aws/sensors/ecs.py,sha256=utRzJBU02nMV7T8rL83DvWW0CyVHUPk1kuRTyrmIbmc,6901
148
+ airflow/providers/amazon/aws/sensors/ecs.py,sha256=BMQbTWJZMIYA68jOi0d-t0GN2fP2lDAvXp2K5ybMTCA,6836
149
149
  airflow/providers/amazon/aws/sensors/eks.py,sha256=AWnPm41xs20jLiV9q5gP-CysNdUgIC0T1WvS3oMJ9JA,9631
150
150
  airflow/providers/amazon/aws/sensors/emr.py,sha256=0vkIlj1toOeImrPp5uH3WKhb4aBYiDKdnXaHlU0wyZU,24846
151
151
  airflow/providers/amazon/aws/sensors/glacier.py,sha256=qCeMA6IQMDiSGRjkw87psI6f8Kp-qHsHEjZ1u_vZgrc,4051
152
- airflow/providers/amazon/aws/sensors/glue.py,sha256=FqM-5-N-yC5vKKnjlHyx6JDXsbToZesyx_Q9Jtb9IVA,14358
152
+ airflow/providers/amazon/aws/sensors/glue.py,sha256=16iCyOteauc-ffU--YaZQmTWSp1xm124GcLbK9tYH7A,14170
153
153
  airflow/providers/amazon/aws/sensors/glue_catalog_partition.py,sha256=5Hfm_qw_Yo82s4TIOdASqwgejrK-xRqYwrvcSoJvM1Y,6051
154
154
  airflow/providers/amazon/aws/sensors/glue_crawler.py,sha256=jjvYG2hBnhadZ5vGqo16zaj_mlH2jA2HCjXOMdWkMbg,3686
155
155
  airflow/providers/amazon/aws/sensors/kinesis_analytics.py,sha256=aWrPRDjKuC3bbsTi_R1D8J5_NQI684F0DjpljDzDDJQ,9906
@@ -176,10 +176,10 @@ airflow/providers/amazon/aws/transfers/http_to_s3.py,sha256=J1HjIdGsd4Zl8kk-RJAX
176
176
  airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py,sha256=xYJ94xNDsadluJpUUv1fURLW7YhSgL-9GaebZ6l4RLU,4536
177
177
  airflow/providers/amazon/aws/transfers/local_to_s3.py,sha256=yp9m7aZuL6YgzYRsFcyZ1wcGTXZTMO0F0CuBfkH1eGo,4165
178
178
  airflow/providers/amazon/aws/transfers/mongo_to_s3.py,sha256=OU7Cge_0WQd7xEb38V-0hjSHbjZRCQ7Ay4xntcG9R28,6020
179
- airflow/providers/amazon/aws/transfers/redshift_to_s3.py,sha256=bw7VifyjePelD_X-hlqc_RHRz1tbCc1Cg1HI8ONjxxY,8279
179
+ airflow/providers/amazon/aws/transfers/redshift_to_s3.py,sha256=Ko-M-efK8D5AVLgLAuY3mMj_U2zq8vVu2IhJtot0uCc,8569
180
180
  airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py,sha256=BRYID2nCmjooVH4WVD7tcy5MnkGQPK4c9BwioeCsIsw,11658
181
181
  airflow/providers/amazon/aws/transfers/s3_to_ftp.py,sha256=cxyNRW_FJQNNluuYr5fVluGLYnNRUvN75iHSSEHrVnY,2966
182
- airflow/providers/amazon/aws/transfers/s3_to_redshift.py,sha256=HVLDIG_-wftPk6YQO1S-6DOaTgRISgxaY7kG-614o40,8672
182
+ airflow/providers/amazon/aws/transfers/s3_to_redshift.py,sha256=VX_ZYcCH8GvzQn1OH4j40PjtcnPYd8_lsB96AWTSiMw,11064
183
183
  airflow/providers/amazon/aws/transfers/s3_to_sftp.py,sha256=bgHgKv7o8ueC_zkhzW5k2xZpFnMlBHMcDf0t4sQ7kHY,3488
184
184
  airflow/providers/amazon/aws/transfers/s3_to_sql.py,sha256=kUuHDDR2ATDBHoRVq_3DOXGe8MVH9gcLHDMLMtFe4GI,4949
185
185
  airflow/providers/amazon/aws/transfers/salesforce_to_s3.py,sha256=yt77guCyYqVwDLdpmLb_psAI8PpGvbCjKQO6pz1J564,5686
@@ -198,7 +198,7 @@ airflow/providers/amazon/aws/triggers/eks.py,sha256=i7feJrOJgJKoToFVmtXeEoopOkJl
198
198
  airflow/providers/amazon/aws/triggers/emr.py,sha256=SYGZG4W79wQy_sH1UBd0Qau-eVg2XAv3CwqlJYayB60,18111
199
199
  airflow/providers/amazon/aws/triggers/glue.py,sha256=hv_nLzBRPG13MetjEfU_-KuTphLE-xyF6yW4uQJQuBc,9480
200
200
  airflow/providers/amazon/aws/triggers/glue_crawler.py,sha256=HtWE41ZD21C-At_bXfj0B0kuyYYklQtgXbodGgzPoF4,2863
201
- airflow/providers/amazon/aws/triggers/glue_databrew.py,sha256=cVFs6fBudG_w3errz9fa7n8qUlYOpo3ANHp9O1VzuXo,2375
201
+ airflow/providers/amazon/aws/triggers/glue_databrew.py,sha256=A_MLJgdd6N3KsiPOJjiQI5YX5qtay0-hXWjB9H223rQ,3495
202
202
  airflow/providers/amazon/aws/triggers/kinesis_analytics.py,sha256=FERA9pE2o4juRJZVlEauDcJcPkhlQ6K9Q6RHt2MZlcE,2937
203
203
  airflow/providers/amazon/aws/triggers/lambda_function.py,sha256=CWVJHvUSd1v4THrFOA59XW0AjOqfwTR87rT4tUaYaYQ,2847
204
204
  airflow/providers/amazon/aws/triggers/neptune.py,sha256=bL9W78zgDp5rASdLGm-_WO6XKne5_tcOMkbGBPmdX-8,5868
@@ -216,6 +216,7 @@ airflow/providers/amazon/aws/utils/eks_get_token.py,sha256=q4utFF2c02T2Lm6KIZLAB
216
216
  airflow/providers/amazon/aws/utils/emailer.py,sha256=y-bzg1BZzOQ8J9-ed-74LY3VMv6LrLfBDtw5S4t3Tv4,1855
217
217
  airflow/providers/amazon/aws/utils/identifiers.py,sha256=KqkEJ96mz4BYt0wuKX-_DaFk-8Lv9CuDVo-VrlAK29U,1944
218
218
  airflow/providers/amazon/aws/utils/mixins.py,sha256=aBdHNvrjSKzPczOG_lpWHjyzU3jw6RB1GyxUuukb7c4,6609
219
+ airflow/providers/amazon/aws/utils/openlineage.py,sha256=ooyRbKoLyjfGZYy3I6k3JxToCK07VHE5uc4f4b2ntr8,4924
219
220
  airflow/providers/amazon/aws/utils/rds.py,sha256=Qx5NEHGdzdmqNOtmAnSk1xErt9v6f-25a5Huos9bvPY,963
220
221
  airflow/providers/amazon/aws/utils/redshift.py,sha256=-Fh3kgrv_3VN6Ys5sEudtRWR9ITZqjhrDeY03hIvdkc,1957
221
222
  airflow/providers/amazon/aws/utils/sagemaker.py,sha256=893W8DBPhsyPINbFph9MKKP4O_zwptse0oUWm3XtGDE,1040
@@ -224,7 +225,7 @@ airflow/providers/amazon/aws/utils/suppress.py,sha256=5jFviuoFOJ0L3vBKI0qoCSgpVx
224
225
  airflow/providers/amazon/aws/utils/tags.py,sha256=-WPb4MpzZxV4MHS6OD09EronbR_jlfuVQeEqu4cVnj0,1762
225
226
  airflow/providers/amazon/aws/utils/task_log_fetcher.py,sha256=JlZq7Nt_2Qb94S76h_zR2B_Trg4gtvrfDQG8Tm7bNgo,5325
226
227
  airflow/providers/amazon/aws/utils/waiter.py,sha256=FO1WupdK7Z9AonrC8w_XcRpQE7A-o4VlgaqQxV65dbk,3509
227
- airflow/providers/amazon/aws/utils/waiter_with_logging.py,sha256=Y2yKAy6v64kj4miDvC5bcK0jP8GDyWDzy-jUaI9ONMM,5892
228
+ airflow/providers/amazon/aws/utils/waiter_with_logging.py,sha256=7fzlo4Fc_w-1TXrnFKlnaeoPT4d_k4IK_VUWZww7jBw,6600
228
229
  airflow/providers/amazon/aws/waiters/README.md,sha256=ftfKyOH1Rqxa77DyLHkqRF1IltQty3uczLXWX7ekE0A,4535
229
230
  airflow/providers/amazon/aws/waiters/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
230
231
  airflow/providers/amazon/aws/waiters/appflow.json,sha256=aeYUa6gDxvOjDxarOUOPVjp9zp0blN2MiWxiiR0G-PE,1014
@@ -248,8 +249,8 @@ airflow/providers/amazon/aws/waiters/opensearchserverless.json,sha256=7UkPgv_tBm
248
249
  airflow/providers/amazon/aws/waiters/rds.json,sha256=HNmNQm5J-VaFHzjWb1pE5P7-Ix-yR4CQrlhfK9bAdM4,9511
249
250
  airflow/providers/amazon/aws/waiters/redshift.json,sha256=jOBotCgbkko1b_CHcGEbhhRvusgt0YSzVuFiZrqVP30,1742
250
251
  airflow/providers/amazon/aws/waiters/sagemaker.json,sha256=JPHuQtUFZ1B7EMLfVmCRevNZ9jgpB71LM0dva8ZEO9A,5254
251
- airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=aBaAZaGv8ZZGdN-2gvYEbq3fL_WHI_7s6SSDL-nWS1A,1034
252
- apache_airflow_providers_amazon-8.28.0rc1.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
253
- apache_airflow_providers_amazon-8.28.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
254
- apache_airflow_providers_amazon-8.28.0rc1.dist-info/METADATA,sha256=bfilJwgJ2XE4Xes0UaS1egO6b3nNqyUhS8u0pIHhlfA,10844
255
- apache_airflow_providers_amazon-8.28.0rc1.dist-info/RECORD,,
252
+ airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=GsOH-emGerKGBAUFmI5lpMfNGH4c0ol_PSiea25DCEY,1033
253
+ apache_airflow_providers_amazon-8.29.0.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
254
+ apache_airflow_providers_amazon-8.29.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
255
+ apache_airflow_providers_amazon-8.29.0.dist-info/METADATA,sha256=WLZea2DqkaKnL0fLEaghRDJ_PpbzAuccd1f_xeuEUXA,10826
256
+ apache_airflow_providers_amazon-8.29.0.dist-info/RECORD,,