apache-airflow-providers-amazon 8.27.0rc2__py3-none-any.whl → 8.28.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. airflow/providers/amazon/__init__.py +3 -3
  2. airflow/providers/amazon/aws/hooks/batch_client.py +3 -0
  3. airflow/providers/amazon/aws/hooks/rds.py +2 -2
  4. airflow/providers/amazon/aws/operators/batch.py +8 -0
  5. airflow/providers/amazon/aws/operators/redshift_data.py +3 -3
  6. airflow/providers/amazon/aws/sensors/athena.py +2 -6
  7. airflow/providers/amazon/aws/sensors/batch.py +7 -33
  8. airflow/providers/amazon/aws/sensors/bedrock.py +1 -4
  9. airflow/providers/amazon/aws/sensors/cloud_formation.py +2 -11
  10. airflow/providers/amazon/aws/sensors/comprehend.py +1 -7
  11. airflow/providers/amazon/aws/sensors/dms.py +5 -11
  12. airflow/providers/amazon/aws/sensors/ec2.py +2 -6
  13. airflow/providers/amazon/aws/sensors/ecs.py +4 -6
  14. airflow/providers/amazon/aws/sensors/eks.py +5 -7
  15. airflow/providers/amazon/aws/sensors/emr.py +9 -36
  16. airflow/providers/amazon/aws/sensors/glacier.py +2 -6
  17. airflow/providers/amazon/aws/sensors/glue.py +0 -9
  18. airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +2 -6
  19. airflow/providers/amazon/aws/sensors/glue_crawler.py +2 -6
  20. airflow/providers/amazon/aws/sensors/kinesis_analytics.py +1 -4
  21. airflow/providers/amazon/aws/sensors/lambda_function.py +4 -6
  22. airflow/providers/amazon/aws/sensors/opensearch_serverless.py +1 -4
  23. airflow/providers/amazon/aws/sensors/quicksight.py +2 -5
  24. airflow/providers/amazon/aws/sensors/redshift_cluster.py +2 -6
  25. airflow/providers/amazon/aws/sensors/s3.py +3 -14
  26. airflow/providers/amazon/aws/sensors/sagemaker.py +4 -6
  27. airflow/providers/amazon/aws/sensors/sqs.py +3 -11
  28. airflow/providers/amazon/aws/sensors/step_function.py +2 -6
  29. airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +41 -10
  30. airflow/providers/amazon/aws/triggers/eks.py +2 -4
  31. airflow/providers/amazon/aws/utils/task_log_fetcher.py +12 -0
  32. airflow/providers/amazon/get_provider_info.py +3 -2
  33. {apache_airflow_providers_amazon-8.27.0rc2.dist-info → apache_airflow_providers_amazon-8.28.0rc1.dist-info}/METADATA +8 -8
  34. {apache_airflow_providers_amazon-8.27.0rc2.dist-info → apache_airflow_providers_amazon-8.28.0rc1.dist-info}/RECORD +36 -36
  35. {apache_airflow_providers_amazon-8.27.0rc2.dist-info → apache_airflow_providers_amazon-8.28.0rc1.dist-info}/WHEEL +0 -0
  36. {apache_airflow_providers_amazon-8.27.0rc2.dist-info → apache_airflow_providers_amazon-8.28.0rc1.dist-info}/entry_points.txt +0 -0
@@ -19,7 +19,7 @@ from __future__ import annotations
19
19
 
20
20
  from typing import TYPE_CHECKING, Any, Sequence
21
21
 
22
- from airflow.exceptions import AirflowException, AirflowSkipException
22
+ from airflow.exceptions import AirflowException
23
23
  from airflow.providers.amazon.aws.hooks.lambda_function import LambdaHook
24
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
25
25
  from airflow.providers.amazon.aws.utils import trim_none_values
@@ -78,10 +78,8 @@ class LambdaFunctionStateSensor(AwsBaseSensor[LambdaHook]):
78
78
  state = self.hook.conn.get_function(**trim_none_values(get_function_args))["Configuration"]["State"]
79
79
 
80
80
  if state in self.FAILURE_STATES:
81
- message = "Lambda function state sensor failed because the Lambda is in a failed state"
82
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
83
- if self.soft_fail:
84
- raise AirflowSkipException(message)
85
- raise AirflowException(message)
81
+ raise AirflowException(
82
+ "Lambda function state sensor failed because the Lambda is in a failed state"
83
+ )
86
84
 
87
85
  return state in self.target_states
@@ -19,7 +19,7 @@ from __future__ import annotations
19
19
  from typing import TYPE_CHECKING, Any, Sequence
20
20
 
21
21
  from airflow.configuration import conf
22
- from airflow.exceptions import AirflowException, AirflowSkipException
22
+ from airflow.exceptions import AirflowException
23
23
  from airflow.providers.amazon.aws.hooks.opensearch_serverless import OpenSearchServerlessHook
24
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
25
25
  from airflow.providers.amazon.aws.triggers.opensearch_serverless import (
@@ -104,9 +104,6 @@ class OpenSearchServerlessCollectionActiveSensor(AwsBaseSensor[OpenSearchServerl
104
104
  state = self.hook.conn.batch_get_collection(**call_args)["collectionDetails"][0]["status"]
105
105
 
106
106
  if state in self.FAILURE_STATES:
107
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
108
- if self.soft_fail:
109
- raise AirflowSkipException(self.FAILURE_MESSAGE)
110
107
  raise AirflowException(self.FAILURE_MESSAGE)
111
108
 
112
109
  if state in self.INTERMEDIATE_STATES:
@@ -22,7 +22,7 @@ from typing import TYPE_CHECKING, Sequence
22
22
 
23
23
  from deprecated import deprecated
24
24
 
25
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
25
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
26
26
  from airflow.providers.amazon.aws.hooks.quicksight import QuickSightHook
27
27
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
28
28
 
@@ -74,10 +74,7 @@ class QuickSightSensor(AwsBaseSensor[QuickSightHook]):
74
74
  self.log.info("QuickSight Status: %s", quicksight_ingestion_state)
75
75
  if quicksight_ingestion_state in self.errored_statuses:
76
76
  error = self.hook.get_error_info(None, self.data_set_id, self.ingestion_id)
77
- message = f"The QuickSight Ingestion failed. Error info: {error}"
78
- if self.soft_fail:
79
- raise AirflowSkipException(message)
80
- raise AirflowException(message)
77
+ raise AirflowException(f"The QuickSight Ingestion failed. Error info: {error}")
81
78
  return quicksight_ingestion_state == self.success_status
82
79
 
83
80
  @cached_property
@@ -23,7 +23,7 @@ from typing import TYPE_CHECKING, Any, Sequence
23
23
  from deprecated import deprecated
24
24
 
25
25
  from airflow.configuration import conf
26
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
26
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
27
27
  from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftHook
28
28
  from airflow.providers.amazon.aws.triggers.redshift_cluster import RedshiftClusterTrigger
29
29
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
@@ -93,11 +93,7 @@ class RedshiftClusterSensor(BaseSensorOperator):
93
93
 
94
94
  status = event["status"]
95
95
  if status == "error":
96
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
97
- message = f"{event['status']}: {event['message']}"
98
- if self.soft_fail:
99
- raise AirflowSkipException(message)
100
- raise AirflowException(message)
96
+ raise AirflowException(f"{event['status']}: {event['message']}")
101
97
  elif status == "success":
102
98
  self.log.info("%s completed successfully.", self.task_id)
103
99
  self.log.info("Cluster Identifier %s is in %s state", self.cluster_identifier, self.target_status)
@@ -33,7 +33,7 @@ from airflow.providers.amazon.aws.utils import validate_execute_complete_event
33
33
  if TYPE_CHECKING:
34
34
  from airflow.utils.context import Context
35
35
 
36
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
36
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
37
37
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
38
38
  from airflow.providers.amazon.aws.triggers.s3 import S3KeysUnchangedTrigger, S3KeyTrigger
39
39
  from airflow.sensors.base import BaseSensorOperator, poke_mode_only
@@ -219,9 +219,6 @@ class S3KeySensor(BaseSensorOperator):
219
219
  if not found_keys:
220
220
  self._defer()
221
221
  elif event["status"] == "error":
222
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
223
- if self.soft_fail:
224
- raise AirflowSkipException(event["message"])
225
222
  raise AirflowException(event["message"])
226
223
 
227
224
  @deprecated(reason="use `hook` property instead.", category=AirflowProviderDeprecationWarning)
@@ -342,14 +339,9 @@ class S3KeysUnchangedSensor(BaseSensorOperator):
342
339
  )
343
340
  return False
344
341
 
345
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
346
- message = (
347
- f"Illegal behavior: objects were deleted in"
348
- f" {os.path.join(self.bucket_name, self.prefix)} between pokes."
342
+ raise AirflowException(
343
+ f"Illegal behavior: objects were deleted in {os.path.join(self.bucket_name, self.prefix)} between pokes."
349
344
  )
350
- if self.soft_fail:
351
- raise AirflowSkipException(message)
352
- raise AirflowException(message)
353
345
 
354
346
  if self.last_activity_time:
355
347
  self.inactivity_seconds = int((datetime.now() - self.last_activity_time).total_seconds())
@@ -411,8 +403,5 @@ class S3KeysUnchangedSensor(BaseSensorOperator):
411
403
  event = validate_execute_complete_event(event)
412
404
 
413
405
  if event and event["status"] == "error":
414
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
415
- if self.soft_fail:
416
- raise AirflowSkipException(event["message"])
417
406
  raise AirflowException(event["message"])
418
407
  return None
@@ -22,7 +22,7 @@ from typing import TYPE_CHECKING, Sequence
22
22
 
23
23
  from deprecated import deprecated
24
24
 
25
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
25
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
26
26
  from airflow.providers.amazon.aws.hooks.sagemaker import LogState, SageMakerHook
27
27
  from airflow.sensors.base import BaseSensorOperator
28
28
 
@@ -65,11 +65,9 @@ class SageMakerBaseSensor(BaseSensorOperator):
65
65
  return False
66
66
  if state in self.failed_states():
67
67
  failed_reason = self.get_failed_reason_from_response(response)
68
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
69
- message = f"Sagemaker {self.resource_type} failed for the following reason: {failed_reason}"
70
- if self.soft_fail:
71
- raise AirflowSkipException(message)
72
- raise AirflowException(message)
68
+ raise AirflowException(
69
+ f"Sagemaker {self.resource_type} failed for the following reason: {failed_reason}"
70
+ )
73
71
  return True
74
72
 
75
73
  def non_terminal_states(self) -> set[str]:
@@ -25,7 +25,7 @@ from typing import TYPE_CHECKING, Any, Collection, Sequence
25
25
  from deprecated import deprecated
26
26
 
27
27
  from airflow.configuration import conf
28
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
28
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
29
29
  from airflow.providers.amazon.aws.hooks.sqs import SqsHook
30
30
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
31
31
  from airflow.providers.amazon.aws.triggers.sqs import SqsSensorTrigger
@@ -160,11 +160,7 @@ class SqsSensor(AwsBaseSensor[SqsHook]):
160
160
  event = validate_execute_complete_event(event)
161
161
 
162
162
  if event["status"] != "success":
163
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
164
- message = f"Trigger error: event is {event}"
165
- if self.soft_fail:
166
- raise AirflowSkipException(message)
167
- raise AirflowException(message)
163
+ raise AirflowException(f"Trigger error: event is {event}")
168
164
  context["ti"].xcom_push(key="messages", value=event["message_batch"])
169
165
 
170
166
  def poll_sqs(self, sqs_conn: BaseAwsConnection) -> Collection:
@@ -221,11 +217,7 @@ class SqsSensor(AwsBaseSensor[SqsHook]):
221
217
  response = self.hook.conn.delete_message_batch(QueueUrl=self.sqs_queue, Entries=entries)
222
218
 
223
219
  if "Successful" not in response:
224
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
225
- error_message = f"Delete SQS Messages failed {response} for messages {messages}"
226
- if self.soft_fail:
227
- raise AirflowSkipException(error_message)
228
- raise AirflowException(error_message)
220
+ raise AirflowException(f"Delete SQS Messages failed {response} for messages {messages}")
229
221
  if message_batch:
230
222
  context["ti"].xcom_push(key="messages", value=message_batch)
231
223
  return True
@@ -21,7 +21,7 @@ from typing import TYPE_CHECKING, Sequence
21
21
 
22
22
  from deprecated import deprecated
23
23
 
24
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
24
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
25
25
  from airflow.providers.amazon.aws.hooks.step_function import StepFunctionHook
26
26
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
27
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
@@ -76,11 +76,7 @@ class StepFunctionExecutionSensor(AwsBaseSensor[StepFunctionHook]):
76
76
  output = json.loads(execution_status["output"]) if "output" in execution_status else None
77
77
 
78
78
  if state in self.FAILURE_STATES:
79
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
80
- message = f"Step Function sensor failed. State Machine Output: {output}"
81
- if self.soft_fail:
82
- raise AirflowSkipException(message)
83
- raise AirflowException(message)
79
+ raise AirflowException(f"Step Function sensor failed. State Machine Output: {output}")
84
80
 
85
81
  if state in self.INTERMEDIATE_STATES:
86
82
  return False
@@ -32,6 +32,7 @@ from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
32
32
  from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
33
33
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
34
34
  from airflow.providers.amazon.aws.transfers.base import AwsToAwsBaseOperator
35
+ from airflow.utils.helpers import prune_dict
35
36
 
36
37
  if TYPE_CHECKING:
37
38
  from airflow.utils.context import Context
@@ -89,10 +90,13 @@ class DynamoDBToS3Operator(AwsToAwsBaseOperator):
89
90
  <https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#DynamoDB.Table.scan>
90
91
  :param s3_key_prefix: Prefix of s3 object key
91
92
  :param process_func: How we transform a dynamodb item to bytes. By default, we dump the json
93
+ :param point_in_time_export: Boolean value indicating the operator to use 'scan' or 'point in time export'
92
94
  :param export_time: Time in the past from which to export table data, counted in seconds from the start of
93
95
  the Unix epoch. The table export will be a snapshot of the table's state at this point in time.
94
96
  :param export_format: The format for the exported data. Valid values for ExportFormat are DYNAMODB_JSON
95
97
  or ION.
98
+ :param export_table_to_point_in_time_kwargs: extra parameters for the boto3
99
+ `export_table_to_point_in_time` function all. e.g. `ExportType`, `IncrementalExportSpecification`
96
100
  :param check_interval: The amount of time in seconds to wait between attempts. Only if ``export_time`` is
97
101
  provided.
98
102
  :param max_attempts: The maximum number of attempts to be made. Only if ``export_time`` is provided.
@@ -107,12 +111,14 @@ class DynamoDBToS3Operator(AwsToAwsBaseOperator):
107
111
  "s3_key_prefix",
108
112
  "export_time",
109
113
  "export_format",
114
+ "export_table_to_point_in_time_kwargs",
110
115
  "check_interval",
111
116
  "max_attempts",
112
117
  )
113
118
 
114
119
  template_fields_renderers = {
115
120
  "dynamodb_scan_kwargs": "json",
121
+ "export_table_to_point_in_time_kwargs": "json",
116
122
  }
117
123
 
118
124
  def __init__(
@@ -120,12 +126,14 @@ class DynamoDBToS3Operator(AwsToAwsBaseOperator):
120
126
  *,
121
127
  dynamodb_table_name: str,
122
128
  s3_bucket_name: str,
123
- file_size: int,
129
+ file_size: int = 1000,
124
130
  dynamodb_scan_kwargs: dict[str, Any] | None = None,
125
131
  s3_key_prefix: str = "",
126
132
  process_func: Callable[[dict[str, Any]], bytes] = _convert_item_to_json_bytes,
133
+ point_in_time_export: bool = False,
127
134
  export_time: datetime | None = None,
128
135
  export_format: str = "DYNAMODB_JSON",
136
+ export_table_to_point_in_time_kwargs: dict | None = None,
129
137
  check_interval: int = 30,
130
138
  max_attempts: int = 60,
131
139
  **kwargs,
@@ -137,8 +145,10 @@ class DynamoDBToS3Operator(AwsToAwsBaseOperator):
137
145
  self.dynamodb_scan_kwargs = dynamodb_scan_kwargs
138
146
  self.s3_bucket_name = s3_bucket_name
139
147
  self.s3_key_prefix = s3_key_prefix
148
+ self.point_in_time_export = point_in_time_export
140
149
  self.export_time = export_time
141
150
  self.export_format = export_format
151
+ self.export_table_to_point_in_time_kwargs = export_table_to_point_in_time_kwargs or {}
142
152
  self.check_interval = check_interval
143
153
  self.max_attempts = max_attempts
144
154
 
@@ -148,29 +158,50 @@ class DynamoDBToS3Operator(AwsToAwsBaseOperator):
148
158
  return DynamoDBHook(aws_conn_id=self.source_aws_conn_id)
149
159
 
150
160
  def execute(self, context: Context) -> None:
151
- if self.export_time:
161
+ # There are 2 separate export to point in time configuration:
162
+ # 1. Full export, which takes the export_time arg.
163
+ # 2. Incremental export, which takes the incremental_export_... args
164
+ # Hence export time could not be used as the proper indicator for the `_export_table_to_point_in_time`
165
+ # function. This change introduces a new boolean, as the indicator for whether the operator scans
166
+ # and export entire data or using the point in time functionality.
167
+ if self.point_in_time_export or self.export_time:
152
168
  self._export_table_to_point_in_time()
153
169
  else:
154
170
  self._export_entire_data()
155
171
 
156
172
  def _export_table_to_point_in_time(self):
157
173
  """
158
- Export data from start of epoc till `export_time`.
174
+ Export data to point in time.
159
175
 
176
+ Full export exports data from start of epoc till `export_time`.
160
177
  Table export will be a snapshot of the table's state at this point in time.
178
+
179
+ Incremental export exports the data from a specific datetime to a specific datetime
180
+
181
+
182
+ Note: S3BucketOwner is a required parameter when exporting to a S3 bucket in another account.
161
183
  """
162
184
  if self.export_time and self.export_time > datetime.now(self.export_time.tzinfo):
163
185
  raise ValueError("The export_time parameter cannot be a future time.")
164
186
 
165
187
  client = self.hook.conn.meta.client
166
188
  table_description = client.describe_table(TableName=self.dynamodb_table_name)
167
- response = client.export_table_to_point_in_time(
168
- TableArn=table_description.get("Table", {}).get("TableArn"),
169
- ExportTime=self.export_time,
170
- S3Bucket=self.s3_bucket_name,
171
- S3Prefix=self.s3_key_prefix,
172
- ExportFormat=self.export_format,
173
- )
189
+
190
+ export_table_to_point_in_time_base_args = {
191
+ "TableArn": table_description.get("Table", {}).get("TableArn"),
192
+ "ExportTime": self.export_time,
193
+ "S3Bucket": self.s3_bucket_name,
194
+ "S3Prefix": self.s3_key_prefix,
195
+ "ExportFormat": self.export_format,
196
+ }
197
+ export_table_to_point_in_time_args = {
198
+ **export_table_to_point_in_time_base_args,
199
+ **self.export_table_to_point_in_time_kwargs,
200
+ }
201
+
202
+ args_filtered = prune_dict(export_table_to_point_in_time_args)
203
+
204
+ response = client.export_table_to_point_in_time(**args_filtered)
174
205
  waiter = self.hook.get_waiter("export_table")
175
206
  export_arn = response.get("ExportDescription", {}).get("ExportArn")
176
207
  waiter.wait(
@@ -318,13 +318,12 @@ class EksCreateNodegroupTrigger(AwsBaseWaiterTrigger):
318
318
  The trigger will asynchronously poll the boto3 API and wait for the
319
319
  nodegroup to be in the state specified by the waiter.
320
320
 
321
- :param waiter_name: Name of the waiter to use, for instance 'nodegroup_active' or 'nodegroup_deleted'
322
321
  :param cluster_name: The name of the EKS cluster associated with the node group.
323
322
  :param nodegroup_name: The name of the nodegroup to check.
324
323
  :param waiter_delay: The amount of time in seconds to wait between attempts.
325
324
  :param waiter_max_attempts: The maximum number of attempts to be made.
326
325
  :param aws_conn_id: The Airflow connection used for AWS credentials.
327
- :param region: Which AWS region the connection should use. (templated)
326
+ :param region_name: Which AWS region the connection should use. (templated)
328
327
  If this is None or empty then the default boto3 behaviour is used.
329
328
  """
330
329
 
@@ -366,13 +365,12 @@ class EksDeleteNodegroupTrigger(AwsBaseWaiterTrigger):
366
365
  The trigger will asynchronously poll the boto3 API and wait for the
367
366
  nodegroup to be in the state specified by the waiter.
368
367
 
369
- :param waiter_name: Name of the waiter to use, for instance 'nodegroup_active' or 'nodegroup_deleted'
370
368
  :param cluster_name: The name of the EKS cluster associated with the node group.
371
369
  :param nodegroup_name: The name of the nodegroup to check.
372
370
  :param waiter_delay: The amount of time in seconds to wait between attempts.
373
371
  :param waiter_max_attempts: The maximum number of attempts to be made.
374
372
  :param aws_conn_id: The Airflow connection used for AWS credentials.
375
- :param region: Which AWS region the connection should use. (templated)
373
+ :param region_name: Which AWS region the connection should use. (templated)
376
374
  If this is None or empty then the default boto3 behaviour is used.
377
375
  """
378
376
 
@@ -59,8 +59,20 @@ class AwsTaskLogFetcher(Thread):
59
59
  while not self.is_stopped():
60
60
  time.sleep(self.fetch_interval.total_seconds())
61
61
  log_events = self._get_log_events(continuation_token)
62
+ prev_timestamp_event = None
62
63
  for log_event in log_events:
64
+ current_timestamp_event = datetime.fromtimestamp(
65
+ log_event["timestamp"] / 1000.0, tz=timezone.utc
66
+ )
67
+ if current_timestamp_event == prev_timestamp_event:
68
+ # When multiple events have the same timestamp, somehow, only one event is logged
69
+ # As a consequence, some logs are missed in the log group (in case they have the same
70
+ # timestamp)
71
+ # When a slight delay is added before logging the event, that solves the issue
72
+ # See https://github.com/apache/airflow/issues/40875
73
+ time.sleep(0.1)
63
74
  self.logger.info(self.event_to_str(log_event))
75
+ prev_timestamp_event = current_timestamp_event
64
76
 
65
77
  def _get_log_events(self, skip_token: AwsLogsHook.ContinuationToken | None = None) -> Generator:
66
78
  if skip_token is None:
@@ -28,8 +28,9 @@ def get_provider_info():
28
28
  "name": "Amazon",
29
29
  "description": "Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1722145222,
31
+ "source-date-epoch": 1723968989,
32
32
  "versions": [
33
+ "8.28.0",
33
34
  "8.27.0",
34
35
  "8.26.0",
35
36
  "8.25.0",
@@ -94,7 +95,7 @@ def get_provider_info():
94
95
  "1.0.0",
95
96
  ],
96
97
  "dependencies": [
97
- "apache-airflow>=2.7.0",
98
+ "apache-airflow>=2.8.0",
98
99
  "apache-airflow-providers-common-compat>=1.1.0",
99
100
  "apache-airflow-providers-common-sql>=1.3.1",
100
101
  "apache-airflow-providers-http",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 8.27.0rc2
3
+ Version: 8.28.0rc1
4
4
  Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
5
5
  Keywords: airflow-provider,amazon,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -26,7 +26,7 @@ Requires-Dist: apache-airflow-providers-common-compat>=1.1.0rc0
26
26
  Requires-Dist: apache-airflow-providers-common-compat>=1.1.0rc0
27
27
  Requires-Dist: apache-airflow-providers-common-sql>=1.3.1rc0
28
28
  Requires-Dist: apache-airflow-providers-http
29
- Requires-Dist: apache-airflow>=2.7.0rc0
29
+ Requires-Dist: apache-airflow>=2.8.0rc0
30
30
  Requires-Dist: asgiref>=2.3.0
31
31
  Requires-Dist: boto3>=1.34.90
32
32
  Requires-Dist: botocore>=1.34.90
@@ -56,8 +56,8 @@ Requires-Dist: s3fs>=2023.10.0 ; extra == "s3fs"
56
56
  Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
57
57
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
58
58
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
59
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.27.0/changelog.html
60
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.27.0
59
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.28.0/changelog.html
60
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.28.0
61
61
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
62
62
  Project-URL: Source Code, https://github.com/apache/airflow
63
63
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
@@ -125,7 +125,7 @@ Provides-Extra: ssh
125
125
 
126
126
  Package ``apache-airflow-providers-amazon``
127
127
 
128
- Release: ``8.27.0.rc2``
128
+ Release: ``8.28.0.rc1``
129
129
 
130
130
 
131
131
  Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
@@ -138,7 +138,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
138
138
  are in ``airflow.providers.amazon`` python package.
139
139
 
140
140
  You can find package information and changelog for the provider
141
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.27.0/>`_.
141
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.28.0/>`_.
142
142
 
143
143
  Installation
144
144
  ------------
@@ -155,7 +155,7 @@ Requirements
155
155
  ========================================== ==================
156
156
  PIP package Version required
157
157
  ========================================== ==================
158
- ``apache-airflow`` ``>=2.7.0``
158
+ ``apache-airflow`` ``>=2.8.0``
159
159
  ``apache-airflow-providers-common-compat`` ``>=1.1.0``
160
160
  ``apache-airflow-providers-common-sql`` ``>=1.3.1``
161
161
  ``apache-airflow-providers-http``
@@ -205,4 +205,4 @@ Dependent package
205
205
  ====================================================================================================================== ===================
206
206
 
207
207
  The changelog for the provider package can be found in the
208
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.27.0/changelog.html>`_.
208
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.28.0/changelog.html>`_.
@@ -1,6 +1,6 @@
1
1
  airflow/providers/amazon/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
2
- airflow/providers/amazon/__init__.py,sha256=IuFQba9UmD4C80gY5BOgm7vj1RlCc4TS7DkotYsOKpE,1494
3
- airflow/providers/amazon/get_provider_info.py,sha256=fXag10VZkktZ_HewHJTuRMrH24hduCTNO-rDG2bLz14,68566
2
+ airflow/providers/amazon/__init__.py,sha256=Q81gd1BpNYVrDMTImImWYFH9aNzRG1HIDcotMYnzAfE,1494
3
+ airflow/providers/amazon/get_provider_info.py,sha256=ooAm-teOdxdYJAv29APRVll6FTxWKDO4iwMaD9C2ezc,68588
4
4
  airflow/providers/amazon/aws/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
5
5
  airflow/providers/amazon/aws/exceptions.py,sha256=uRGNMgXvgdzfphpOTiyj74lQhjzb70J-X8n6fsx5Jog,1864
6
6
  airflow/providers/amazon/aws/auth_manager/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -43,7 +43,7 @@ airflow/providers/amazon/aws/hooks/appflow.py,sha256=-le6RsIMWIqTav7KGknsph9Td42
43
43
  airflow/providers/amazon/aws/hooks/athena.py,sha256=gkIBOFgSDD_UJsdN45EPzQPJyZ2Z22RgdxQ65Q8dfig,13644
44
44
  airflow/providers/amazon/aws/hooks/athena_sql.py,sha256=vFIUbMMTem3xvYAUTvW3h1ypjpKVLNck3VbrAlupVLA,6844
45
45
  airflow/providers/amazon/aws/hooks/base_aws.py,sha256=SjPVRuJ8cgM4wLIAl2wXY6hEFaqGUL_WxcpRn3P2GIE,50037
46
- airflow/providers/amazon/aws/hooks/batch_client.py,sha256=yIsCXL_5ezf5zp36jDnbcialXITi-Y52JbrhYieELSY,21550
46
+ airflow/providers/amazon/aws/hooks/batch_client.py,sha256=w4pIsSfHyGz2rRH95cIxCHocqXwxjpYVDT9Tf3sqUso,21669
47
47
  airflow/providers/amazon/aws/hooks/batch_waiters.json,sha256=eoN5YDgeTNZ2Xz17TrbKBPhd7z9-6KD3RhaDKXXOvqU,2511
48
48
  airflow/providers/amazon/aws/hooks/batch_waiters.py,sha256=VlAf3NYyGsfmOw9S4Ods8VKua3mBWSLHEAr8hHCHdmY,10579
49
49
  airflow/providers/amazon/aws/hooks/bedrock.py,sha256=TZHEBOcDyeGSx-QLL8LydiytHEMdUETji_4emJfv4Ng,3343
@@ -72,7 +72,7 @@ airflow/providers/amazon/aws/hooks/logs.py,sha256=Rt55traQesi358YqL9Xk2cWyYzGEKX
72
72
  airflow/providers/amazon/aws/hooks/neptune.py,sha256=a3r26msR8U5oCTMHQYqA-2OspVOxsXL2HTBDFbWZY3I,4704
73
73
  airflow/providers/amazon/aws/hooks/opensearch_serverless.py,sha256=0zFRXXjlbQRCTt5D_q1FCp965FC8LyOhMRk2x6nvsIc,1543
74
74
  airflow/providers/amazon/aws/hooks/quicksight.py,sha256=MFTlrWV88wLky2swo-b5fFQDLbMQCw6w6lcgAvJqveU,7957
75
- airflow/providers/amazon/aws/hooks/rds.py,sha256=2pCdiZ7dQiLJ6WiBBIYtxfiBzeGeI3sAEt2GRxhbszM,15161
75
+ airflow/providers/amazon/aws/hooks/rds.py,sha256=h7NF3GZ42RKeh70rlg2BQFVpa8vNadS37slj0MsAT3w,15211
76
76
  airflow/providers/amazon/aws/hooks/redshift_cluster.py,sha256=NA9HDPkTwSVVwE1BN1WCym_HQ2OQcsDBXFjqxQixKhg,13119
77
77
  airflow/providers/amazon/aws/hooks/redshift_data.py,sha256=oYlUoRBE45XRQxkGCnXBv707iBk1QieOQfeZ3bn81Rw,10253
78
78
  airflow/providers/amazon/aws/hooks/redshift_sql.py,sha256=7Iqr_5IVREdsW9mdTZEm2_Ng13eIBvkqZKS-LtTMM-k,11318
@@ -105,7 +105,7 @@ airflow/providers/amazon/aws/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39k
105
105
  airflow/providers/amazon/aws/operators/appflow.py,sha256=PoHw62T_lD4burfRzXPJsjAHAwXqmb3vKKk4YK0y-jk,21226
106
106
  airflow/providers/amazon/aws/operators/athena.py,sha256=n18FbhkOGSHPHBXqcK5z9xEGA26bfKHr8tC4cu20JKY,14657
107
107
  airflow/providers/amazon/aws/operators/base_aws.py,sha256=cdc5GZkl_YGDDtlV9CVsdbTH3j7bza6d3RrDm93seOo,3864
108
- airflow/providers/amazon/aws/operators/batch.py,sha256=zhoraA4orru-Xh-AZnGth1XM-xtglC3iuFXYi7feTuY,22759
108
+ airflow/providers/amazon/aws/operators/batch.py,sha256=qmqY_pDX_H6ZNQ0Uc8kUScxGBscv69Q87TOGJ6YTAEY,23251
109
109
  airflow/providers/amazon/aws/operators/bedrock.py,sha256=PJcsRoTht4w23XG8W1B4Rl6BEJiomQLnwTcuPfMM3pI,40140
110
110
  airflow/providers/amazon/aws/operators/cloud_formation.py,sha256=-WMYq-oA8WpPN2i5aTgBenFj9-CjbeEcy9NuRCnSwpM,5066
111
111
  airflow/providers/amazon/aws/operators/comprehend.py,sha256=JL0UfGpAekOeRFx3IT32u3fWhMhCwTyziA_OWB6xgjk,15954
@@ -126,7 +126,7 @@ airflow/providers/amazon/aws/operators/neptune.py,sha256=on5oNX5K4yHfW1POE0eeZuj
126
126
  airflow/providers/amazon/aws/operators/quicksight.py,sha256=jc3Eof19UfLt5IqbQswRzaHaK8h0ACLY99i_1Prtq10,4089
127
127
  airflow/providers/amazon/aws/operators/rds.py,sha256=zGiUIwpO2EdUByCYkgFwLbFCQhg_TCTTHVuNnee6X_g,39325
128
128
  airflow/providers/amazon/aws/operators/redshift_cluster.py,sha256=rmBHCssxrYEJ8EnENY-AnzC004lbtHvxXHpy69sHtV0,36681
129
- airflow/providers/amazon/aws/operators/redshift_data.py,sha256=wK-vTDcn0MqOuF9e-71JYIEkLKihah6oGU-p_8VT2HI,8612
129
+ airflow/providers/amazon/aws/operators/redshift_data.py,sha256=2RRVhUntrTrsG0Fc9eNyyjdu2KF9saofGVY2qQpDOcE,8607
130
130
  airflow/providers/amazon/aws/operators/s3.py,sha256=8mzkCBGnAbb5CHf0PFokQwdEmio_D3u99JnNoqTBUW4,35844
131
131
  airflow/providers/amazon/aws/operators/sagemaker.py,sha256=0ZP6vyPHMOVkco80sViyDtwhguawSJFo5l1GLOvunQc,83614
132
132
  airflow/providers/amazon/aws/operators/sns.py,sha256=Rttd015UhLo4pCplGybxtLhflyu_26IFzYP7WTmQFk8,3730
@@ -136,36 +136,36 @@ airflow/providers/amazon/aws/secrets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOF
136
136
  airflow/providers/amazon/aws/secrets/secrets_manager.py,sha256=aCS2TGp0aS0_O8J-PkxXhHGb-4SQ259b5n9LZOZ4YAg,15541
137
137
  airflow/providers/amazon/aws/secrets/systems_manager.py,sha256=4o9x02hR3i9BIEHJsElLMxDFPhA7MTgt-t-3Fg3Aqtg,8496
138
138
  airflow/providers/amazon/aws/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
139
- airflow/providers/amazon/aws/sensors/athena.py,sha256=CS7sTcQrMW6VVxaUhOY0pIm8382_KpkCBmXl5fCCMk4,3863
139
+ airflow/providers/amazon/aws/sensors/athena.py,sha256=AYPc1Jna3Erz4lq3sc8Buc-u1CIF9N_h-Uy-HBv5q_c,3634
140
140
  airflow/providers/amazon/aws/sensors/base_aws.py,sha256=8uzQbi8sksoxTpviirFYjLr3a-SUJMNAg7d9pI_w8-s,3858
141
- airflow/providers/amazon/aws/sensors/batch.py,sha256=_--bT2Wh59x4C3W9Xu4h7xrC--bCOPn29YzbsB6TRzw,11487
142
- airflow/providers/amazon/aws/sensors/bedrock.py,sha256=fq8xeYWUapJZmAM0OWYeD9nMNJitvZNY8D9vfu2gcSQ,15907
143
- airflow/providers/amazon/aws/sensors/cloud_formation.py,sha256=kLINQol-ZFpjOpSBFQ7I4JXZkBjEICT-g8MT-gXscLw,5426
144
- airflow/providers/amazon/aws/sensors/comprehend.py,sha256=pbxEo0MYgvXdmtgWMAfHU0HOKD4_3ixPGcZErrXJi4I,10721
145
- airflow/providers/amazon/aws/sensors/dms.py,sha256=SzKUgEtL0agejWKWJvjqmrJdtwbd2vJriHcCsYV0j8s,5744
141
+ airflow/providers/amazon/aws/sensors/batch.py,sha256=ilTTHIu-F3MrFDDikgfpB3dmF_i8_LU_JnIXVgv5pEk,10027
142
+ airflow/providers/amazon/aws/sensors/bedrock.py,sha256=AMoCyKmwdRMnhcoZqMLz1XgyfoK4LyJW0m4TeXxrFZk,15695
143
+ airflow/providers/amazon/aws/sensors/cloud_formation.py,sha256=_ZwuQbUtN1bIgNQEtKM8DkrXbYAUfEt49esEJxArmn4,4992
144
+ airflow/providers/amazon/aws/sensors/comprehend.py,sha256=84ejUDrsrINpcxkpMo1xBOuf2_V4meWu0_wJ295QT8M,10319
145
+ airflow/providers/amazon/aws/sensors/dms.py,sha256=EJqh2ojqFoRj4cSb2AeuWwFtHahn-h1FRvtPhd_Kspc,5338
146
146
  airflow/providers/amazon/aws/sensors/dynamodb.py,sha256=P43g73ACBoDo0Lrxzm9wOoEepbRVAOjqtwWYvIq1Tls,5008
147
- airflow/providers/amazon/aws/sensors/ec2.py,sha256=GpEVRZEl4oW4eB4ION8H2nfAHp-MGptwav7DpqL3Uo0,4119
148
- airflow/providers/amazon/aws/sensors/ecs.py,sha256=Bju2xJHNI8SdddD1muDcqtihL__EAHQwa-RtYxPjfoI,7087
149
- airflow/providers/amazon/aws/sensors/eks.py,sha256=TeSQ__B3eUYfdkr4a8x0oT0JUl_8JdiLs3p0p92YRlo,9831
150
- airflow/providers/amazon/aws/sensors/emr.py,sha256=Ayo-nzYKP7UbI1wUx05WQFAkEtdqjsMkEHunmyoUE8Q,26341
151
- airflow/providers/amazon/aws/sensors/glacier.py,sha256=2UUI-y-x07DH8I5OikA_d5_FHCMQjBpxMKxRvlZSlS4,4282
152
- airflow/providers/amazon/aws/sensors/glue.py,sha256=acyKgd4W77xkT2j2-QmT74oH42n3lXsm_8D1sWD-eF8,14931
153
- airflow/providers/amazon/aws/sensors/glue_catalog_partition.py,sha256=-D9XyNkc5bEEEa0UFjxv_XeyCAHON0Gm_XmMchMZauc,6280
154
- airflow/providers/amazon/aws/sensors/glue_crawler.py,sha256=nRFhSajoj4VbyL7YZt8lM6obUECegGUqN_nDPB_KQcQ,3931
155
- airflow/providers/amazon/aws/sensors/kinesis_analytics.py,sha256=f6pfYU5mLj-peZgKOoVGEDpTnGgB5m80NAGEDGs49bY,10118
156
- airflow/providers/amazon/aws/sensors/lambda_function.py,sha256=jlrhImfJw8yBnI-FVDKZZ53uUGntgVo8FKeW_u8IX-0,3459
157
- airflow/providers/amazon/aws/sensors/opensearch_serverless.py,sha256=o2OSYxWLP12248kQ-Z-OCEjxzTjUusIoFzj0ZOXhCjA,5693
158
- airflow/providers/amazon/aws/sensors/quicksight.py,sha256=_jw5455fWYAttuLl63uDmzt9EYU1FjaRvXtG_S_1CUE,4625
147
+ airflow/providers/amazon/aws/sensors/ec2.py,sha256=QHo9djFIoWKj_C2WkbfZSTogDTbXcJfeXUUCEplKRZY,3890
148
+ airflow/providers/amazon/aws/sensors/ecs.py,sha256=utRzJBU02nMV7T8rL83DvWW0CyVHUPk1kuRTyrmIbmc,6901
149
+ airflow/providers/amazon/aws/sensors/eks.py,sha256=AWnPm41xs20jLiV9q5gP-CysNdUgIC0T1WvS3oMJ9JA,9631
150
+ airflow/providers/amazon/aws/sensors/emr.py,sha256=0vkIlj1toOeImrPp5uH3WKhb4aBYiDKdnXaHlU0wyZU,24846
151
+ airflow/providers/amazon/aws/sensors/glacier.py,sha256=qCeMA6IQMDiSGRjkw87psI6f8Kp-qHsHEjZ1u_vZgrc,4051
152
+ airflow/providers/amazon/aws/sensors/glue.py,sha256=FqM-5-N-yC5vKKnjlHyx6JDXsbToZesyx_Q9Jtb9IVA,14358
153
+ airflow/providers/amazon/aws/sensors/glue_catalog_partition.py,sha256=5Hfm_qw_Yo82s4TIOdASqwgejrK-xRqYwrvcSoJvM1Y,6051
154
+ airflow/providers/amazon/aws/sensors/glue_crawler.py,sha256=jjvYG2hBnhadZ5vGqo16zaj_mlH2jA2HCjXOMdWkMbg,3686
155
+ airflow/providers/amazon/aws/sensors/kinesis_analytics.py,sha256=aWrPRDjKuC3bbsTi_R1D8J5_NQI684F0DjpljDzDDJQ,9906
156
+ airflow/providers/amazon/aws/sensors/lambda_function.py,sha256=vwqC2QBTIWe0ae1vy2OqRxCjB4KftrmMcgEVbS1B6jE,3260
157
+ airflow/providers/amazon/aws/sensors/opensearch_serverless.py,sha256=TuERrY9AMK9Oyvxb75amsq9gB2BtsKcJKrtQ6eaOin4,5481
158
+ airflow/providers/amazon/aws/sensors/quicksight.py,sha256=nOcUiM9wTqud2Jt7i4oC76_AJYOmqNifa8zFv7etINs,4490
159
159
  airflow/providers/amazon/aws/sensors/rds.py,sha256=AB2dH7fLwAaQogj0NYRrOOftfeOk_INetsyVHr1_qfM,6476
160
- airflow/providers/amazon/aws/sensors/redshift_cluster.py,sha256=4A2Fq-_mERIdNKMM8_kss1zC3C4CItRuqCdZCRRKcGo,4533
161
- airflow/providers/amazon/aws/sensors/s3.py,sha256=CEzAumhalpNegZ8UCHDRqDMUF4dv19RxuuUqGXjQKY0,18201
162
- airflow/providers/amazon/aws/sensors/sagemaker.py,sha256=Pd8S0hExbaqdqOKglQAi51EggJEMxHRn_sZu-QWqsts,12984
163
- airflow/providers/amazon/aws/sensors/sqs.py,sha256=GFzHT5nFSyIMATqwqjhEmOWZfwdOcAe4T6yUFNUlvWk,11329
164
- airflow/providers/amazon/aws/sensors/step_function.py,sha256=pqAtBJd3m003qvaJwr4BrKBHhYWGrJ67yaqczjcE1_w,4089
160
+ airflow/providers/amazon/aws/sensors/redshift_cluster.py,sha256=nqn1cuUafG6jZ6dPEOkA8vExreBoRb_lR4gQAU3cPpI,4304
161
+ airflow/providers/amazon/aws/sensors/s3.py,sha256=B-nahpVb4IcRvcwcCtgts2pzTKUBgsTszwOYznHWuwQ,17578
162
+ airflow/providers/amazon/aws/sensors/sagemaker.py,sha256=Iu0TbybsDoxGZYj77KIctperUd7BvFbBiZ1AOCUMvLg,12785
163
+ airflow/providers/amazon/aws/sensors/sqs.py,sha256=L6CIvLuKOfTkZSEuoYQRZOiG_wNjhO9GAk1x_oX8ptM,10843
164
+ airflow/providers/amazon/aws/sensors/step_function.py,sha256=H1Rqm-iZMrnWJggXam1uiGbWNMvKrGGgNIbDL2A_kzo,3860
165
165
  airflow/providers/amazon/aws/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
166
166
  airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py,sha256=NtfuaKa5Jp5rTafp71xoTaOR52jrfmhYeZKGEppJJ3U,7221
167
167
  airflow/providers/amazon/aws/transfers/base.py,sha256=LMIgzvDgCMMojkhoJCSJh4egcpUZ9V2FoX8yMF3xeOk,2987
168
- airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py,sha256=sX8bbKt1M4LtKncNUmcIAaIB_BEDAAGNP9qwFAoiM4E,8328
168
+ airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py,sha256=nBEavKOyxu7AQi4K-sgnHIQlCSdqbAw-1j5F7GNFEnA,10144
169
169
  airflow/providers/amazon/aws/transfers/exasol_to_s3.py,sha256=kevKzRidFKgibSKSz5-blkxHDV0isc8euU08iHn83z0,4418
170
170
  airflow/providers/amazon/aws/transfers/ftp_to_s3.py,sha256=eSE5nlx8wmUCJtgOgVtCRBaZSswFaHLCXhPy7_9hYhQ,6366
171
171
  airflow/providers/amazon/aws/transfers/gcs_to_s3.py,sha256=PGjM6abAWwD8lgnG5A1bSls5tmI3Wd12aZe_7QjjGNU,10365
@@ -194,7 +194,7 @@ airflow/providers/amazon/aws/triggers/bedrock.py,sha256=IiKyl0UUax-ex4siLjZpQGDZ
194
194
  airflow/providers/amazon/aws/triggers/comprehend.py,sha256=atK02t-G6e-Rgd-a-IHc4n-wGZ3oC4pKueOwNeaLCrI,4063
195
195
  airflow/providers/amazon/aws/triggers/ec2.py,sha256=gMY3EP4TmL6SodLw12FNSLttlHd7hRhOu-q3CiG7y2w,3245
196
196
  airflow/providers/amazon/aws/triggers/ecs.py,sha256=NFgFR9rkj5wiLN4X9t5itHf4ayOD7eoiWbnG7W13N8A,9202
197
- airflow/providers/amazon/aws/triggers/eks.py,sha256=t69h3uIgeQTG3d7n7faOJyMPfsbL6OtkmCTUrpq_jqI,16980
197
+ airflow/providers/amazon/aws/triggers/eks.py,sha256=i7feJrOJgJKoToFVmtXeEoopOkJlw79T9ZRmG-vyCyQ,16778
198
198
  airflow/providers/amazon/aws/triggers/emr.py,sha256=SYGZG4W79wQy_sH1UBd0Qau-eVg2XAv3CwqlJYayB60,18111
199
199
  airflow/providers/amazon/aws/triggers/glue.py,sha256=hv_nLzBRPG13MetjEfU_-KuTphLE-xyF6yW4uQJQuBc,9480
200
200
  airflow/providers/amazon/aws/triggers/glue_crawler.py,sha256=HtWE41ZD21C-At_bXfj0B0kuyYYklQtgXbodGgzPoF4,2863
@@ -222,7 +222,7 @@ airflow/providers/amazon/aws/utils/sagemaker.py,sha256=893W8DBPhsyPINbFph9MKKP4O
222
222
  airflow/providers/amazon/aws/utils/sqs.py,sha256=s97MhAX-6pWdxkrpFfknaIDvL2QzYr411J9l4pL_no8,3493
223
223
  airflow/providers/amazon/aws/utils/suppress.py,sha256=5jFviuoFOJ0L3vBKI0qoCSgpVxMxUMgAeXPsQ1Iyq80,2360
224
224
  airflow/providers/amazon/aws/utils/tags.py,sha256=-WPb4MpzZxV4MHS6OD09EronbR_jlfuVQeEqu4cVnj0,1762
225
- airflow/providers/amazon/aws/utils/task_log_fetcher.py,sha256=bOF0WriEfnCUu0jpeZXfSPW_couD3aefiOTTJF5apsQ,4552
225
+ airflow/providers/amazon/aws/utils/task_log_fetcher.py,sha256=JlZq7Nt_2Qb94S76h_zR2B_Trg4gtvrfDQG8Tm7bNgo,5325
226
226
  airflow/providers/amazon/aws/utils/waiter.py,sha256=FO1WupdK7Z9AonrC8w_XcRpQE7A-o4VlgaqQxV65dbk,3509
227
227
  airflow/providers/amazon/aws/utils/waiter_with_logging.py,sha256=Y2yKAy6v64kj4miDvC5bcK0jP8GDyWDzy-jUaI9ONMM,5892
228
228
  airflow/providers/amazon/aws/waiters/README.md,sha256=ftfKyOH1Rqxa77DyLHkqRF1IltQty3uczLXWX7ekE0A,4535
@@ -249,7 +249,7 @@ airflow/providers/amazon/aws/waiters/rds.json,sha256=HNmNQm5J-VaFHzjWb1pE5P7-Ix-
249
249
  airflow/providers/amazon/aws/waiters/redshift.json,sha256=jOBotCgbkko1b_CHcGEbhhRvusgt0YSzVuFiZrqVP30,1742
250
250
  airflow/providers/amazon/aws/waiters/sagemaker.json,sha256=JPHuQtUFZ1B7EMLfVmCRevNZ9jgpB71LM0dva8ZEO9A,5254
251
251
  airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=aBaAZaGv8ZZGdN-2gvYEbq3fL_WHI_7s6SSDL-nWS1A,1034
252
- apache_airflow_providers_amazon-8.27.0rc2.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
253
- apache_airflow_providers_amazon-8.27.0rc2.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
254
- apache_airflow_providers_amazon-8.27.0rc2.dist-info/METADATA,sha256=AZjFZHwmOK2Qdc3jrmyBgvDHz-U1KKlKQ1HCDUbOo2A,10844
255
- apache_airflow_providers_amazon-8.27.0rc2.dist-info/RECORD,,
252
+ apache_airflow_providers_amazon-8.28.0rc1.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
253
+ apache_airflow_providers_amazon-8.28.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
254
+ apache_airflow_providers_amazon-8.28.0rc1.dist-info/METADATA,sha256=bfilJwgJ2XE4Xes0UaS1egO6b3nNqyUhS8u0pIHhlfA,10844
255
+ apache_airflow_providers_amazon-8.28.0rc1.dist-info/RECORD,,