apache-airflow-providers-amazon 8.27.0rc2__py3-none-any.whl → 8.28.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. airflow/providers/amazon/__init__.py +3 -3
  2. airflow/providers/amazon/aws/hooks/batch_client.py +3 -0
  3. airflow/providers/amazon/aws/hooks/rds.py +2 -2
  4. airflow/providers/amazon/aws/operators/batch.py +8 -0
  5. airflow/providers/amazon/aws/operators/redshift_data.py +3 -3
  6. airflow/providers/amazon/aws/sensors/athena.py +2 -6
  7. airflow/providers/amazon/aws/sensors/batch.py +7 -33
  8. airflow/providers/amazon/aws/sensors/bedrock.py +1 -4
  9. airflow/providers/amazon/aws/sensors/cloud_formation.py +2 -11
  10. airflow/providers/amazon/aws/sensors/comprehend.py +1 -7
  11. airflow/providers/amazon/aws/sensors/dms.py +5 -11
  12. airflow/providers/amazon/aws/sensors/ec2.py +2 -6
  13. airflow/providers/amazon/aws/sensors/ecs.py +4 -6
  14. airflow/providers/amazon/aws/sensors/eks.py +5 -7
  15. airflow/providers/amazon/aws/sensors/emr.py +9 -36
  16. airflow/providers/amazon/aws/sensors/glacier.py +2 -6
  17. airflow/providers/amazon/aws/sensors/glue.py +0 -9
  18. airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +2 -6
  19. airflow/providers/amazon/aws/sensors/glue_crawler.py +2 -6
  20. airflow/providers/amazon/aws/sensors/kinesis_analytics.py +1 -4
  21. airflow/providers/amazon/aws/sensors/lambda_function.py +4 -6
  22. airflow/providers/amazon/aws/sensors/opensearch_serverless.py +1 -4
  23. airflow/providers/amazon/aws/sensors/quicksight.py +2 -5
  24. airflow/providers/amazon/aws/sensors/redshift_cluster.py +2 -6
  25. airflow/providers/amazon/aws/sensors/s3.py +3 -14
  26. airflow/providers/amazon/aws/sensors/sagemaker.py +4 -6
  27. airflow/providers/amazon/aws/sensors/sqs.py +3 -11
  28. airflow/providers/amazon/aws/sensors/step_function.py +2 -6
  29. airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +41 -10
  30. airflow/providers/amazon/aws/triggers/eks.py +2 -4
  31. airflow/providers/amazon/aws/utils/task_log_fetcher.py +12 -0
  32. airflow/providers/amazon/get_provider_info.py +3 -2
  33. {apache_airflow_providers_amazon-8.27.0rc2.dist-info → apache_airflow_providers_amazon-8.28.0.dist-info}/METADATA +12 -12
  34. {apache_airflow_providers_amazon-8.27.0rc2.dist-info → apache_airflow_providers_amazon-8.28.0.dist-info}/RECORD +36 -36
  35. {apache_airflow_providers_amazon-8.27.0rc2.dist-info → apache_airflow_providers_amazon-8.28.0.dist-info}/WHEEL +0 -0
  36. {apache_airflow_providers_amazon-8.27.0rc2.dist-info → apache_airflow_providers_amazon-8.28.0.dist-info}/entry_points.txt +0 -0
@@ -19,7 +19,7 @@ from __future__ import annotations
19
19
 
20
20
  from typing import TYPE_CHECKING, Any, Sequence
21
21
 
22
- from airflow.exceptions import AirflowException, AirflowSkipException
22
+ from airflow.exceptions import AirflowException
23
23
  from airflow.providers.amazon.aws.hooks.lambda_function import LambdaHook
24
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
25
25
  from airflow.providers.amazon.aws.utils import trim_none_values
@@ -78,10 +78,8 @@ class LambdaFunctionStateSensor(AwsBaseSensor[LambdaHook]):
78
78
  state = self.hook.conn.get_function(**trim_none_values(get_function_args))["Configuration"]["State"]
79
79
 
80
80
  if state in self.FAILURE_STATES:
81
- message = "Lambda function state sensor failed because the Lambda is in a failed state"
82
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
83
- if self.soft_fail:
84
- raise AirflowSkipException(message)
85
- raise AirflowException(message)
81
+ raise AirflowException(
82
+ "Lambda function state sensor failed because the Lambda is in a failed state"
83
+ )
86
84
 
87
85
  return state in self.target_states
@@ -19,7 +19,7 @@ from __future__ import annotations
19
19
  from typing import TYPE_CHECKING, Any, Sequence
20
20
 
21
21
  from airflow.configuration import conf
22
- from airflow.exceptions import AirflowException, AirflowSkipException
22
+ from airflow.exceptions import AirflowException
23
23
  from airflow.providers.amazon.aws.hooks.opensearch_serverless import OpenSearchServerlessHook
24
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
25
25
  from airflow.providers.amazon.aws.triggers.opensearch_serverless import (
@@ -104,9 +104,6 @@ class OpenSearchServerlessCollectionActiveSensor(AwsBaseSensor[OpenSearchServerl
104
104
  state = self.hook.conn.batch_get_collection(**call_args)["collectionDetails"][0]["status"]
105
105
 
106
106
  if state in self.FAILURE_STATES:
107
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
108
- if self.soft_fail:
109
- raise AirflowSkipException(self.FAILURE_MESSAGE)
110
107
  raise AirflowException(self.FAILURE_MESSAGE)
111
108
 
112
109
  if state in self.INTERMEDIATE_STATES:
@@ -22,7 +22,7 @@ from typing import TYPE_CHECKING, Sequence
22
22
 
23
23
  from deprecated import deprecated
24
24
 
25
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
25
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
26
26
  from airflow.providers.amazon.aws.hooks.quicksight import QuickSightHook
27
27
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
28
28
 
@@ -74,10 +74,7 @@ class QuickSightSensor(AwsBaseSensor[QuickSightHook]):
74
74
  self.log.info("QuickSight Status: %s", quicksight_ingestion_state)
75
75
  if quicksight_ingestion_state in self.errored_statuses:
76
76
  error = self.hook.get_error_info(None, self.data_set_id, self.ingestion_id)
77
- message = f"The QuickSight Ingestion failed. Error info: {error}"
78
- if self.soft_fail:
79
- raise AirflowSkipException(message)
80
- raise AirflowException(message)
77
+ raise AirflowException(f"The QuickSight Ingestion failed. Error info: {error}")
81
78
  return quicksight_ingestion_state == self.success_status
82
79
 
83
80
  @cached_property
@@ -23,7 +23,7 @@ from typing import TYPE_CHECKING, Any, Sequence
23
23
  from deprecated import deprecated
24
24
 
25
25
  from airflow.configuration import conf
26
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
26
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
27
27
  from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftHook
28
28
  from airflow.providers.amazon.aws.triggers.redshift_cluster import RedshiftClusterTrigger
29
29
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
@@ -93,11 +93,7 @@ class RedshiftClusterSensor(BaseSensorOperator):
93
93
 
94
94
  status = event["status"]
95
95
  if status == "error":
96
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
97
- message = f"{event['status']}: {event['message']}"
98
- if self.soft_fail:
99
- raise AirflowSkipException(message)
100
- raise AirflowException(message)
96
+ raise AirflowException(f"{event['status']}: {event['message']}")
101
97
  elif status == "success":
102
98
  self.log.info("%s completed successfully.", self.task_id)
103
99
  self.log.info("Cluster Identifier %s is in %s state", self.cluster_identifier, self.target_status)
@@ -33,7 +33,7 @@ from airflow.providers.amazon.aws.utils import validate_execute_complete_event
33
33
  if TYPE_CHECKING:
34
34
  from airflow.utils.context import Context
35
35
 
36
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
36
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
37
37
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
38
38
  from airflow.providers.amazon.aws.triggers.s3 import S3KeysUnchangedTrigger, S3KeyTrigger
39
39
  from airflow.sensors.base import BaseSensorOperator, poke_mode_only
@@ -219,9 +219,6 @@ class S3KeySensor(BaseSensorOperator):
219
219
  if not found_keys:
220
220
  self._defer()
221
221
  elif event["status"] == "error":
222
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
223
- if self.soft_fail:
224
- raise AirflowSkipException(event["message"])
225
222
  raise AirflowException(event["message"])
226
223
 
227
224
  @deprecated(reason="use `hook` property instead.", category=AirflowProviderDeprecationWarning)
@@ -342,14 +339,9 @@ class S3KeysUnchangedSensor(BaseSensorOperator):
342
339
  )
343
340
  return False
344
341
 
345
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
346
- message = (
347
- f"Illegal behavior: objects were deleted in"
348
- f" {os.path.join(self.bucket_name, self.prefix)} between pokes."
342
+ raise AirflowException(
343
+ f"Illegal behavior: objects were deleted in {os.path.join(self.bucket_name, self.prefix)} between pokes."
349
344
  )
350
- if self.soft_fail:
351
- raise AirflowSkipException(message)
352
- raise AirflowException(message)
353
345
 
354
346
  if self.last_activity_time:
355
347
  self.inactivity_seconds = int((datetime.now() - self.last_activity_time).total_seconds())
@@ -411,8 +403,5 @@ class S3KeysUnchangedSensor(BaseSensorOperator):
411
403
  event = validate_execute_complete_event(event)
412
404
 
413
405
  if event and event["status"] == "error":
414
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
415
- if self.soft_fail:
416
- raise AirflowSkipException(event["message"])
417
406
  raise AirflowException(event["message"])
418
407
  return None
@@ -22,7 +22,7 @@ from typing import TYPE_CHECKING, Sequence
22
22
 
23
23
  from deprecated import deprecated
24
24
 
25
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
25
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
26
26
  from airflow.providers.amazon.aws.hooks.sagemaker import LogState, SageMakerHook
27
27
  from airflow.sensors.base import BaseSensorOperator
28
28
 
@@ -65,11 +65,9 @@ class SageMakerBaseSensor(BaseSensorOperator):
65
65
  return False
66
66
  if state in self.failed_states():
67
67
  failed_reason = self.get_failed_reason_from_response(response)
68
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
69
- message = f"Sagemaker {self.resource_type} failed for the following reason: {failed_reason}"
70
- if self.soft_fail:
71
- raise AirflowSkipException(message)
72
- raise AirflowException(message)
68
+ raise AirflowException(
69
+ f"Sagemaker {self.resource_type} failed for the following reason: {failed_reason}"
70
+ )
73
71
  return True
74
72
 
75
73
  def non_terminal_states(self) -> set[str]:
@@ -25,7 +25,7 @@ from typing import TYPE_CHECKING, Any, Collection, Sequence
25
25
  from deprecated import deprecated
26
26
 
27
27
  from airflow.configuration import conf
28
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
28
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
29
29
  from airflow.providers.amazon.aws.hooks.sqs import SqsHook
30
30
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
31
31
  from airflow.providers.amazon.aws.triggers.sqs import SqsSensorTrigger
@@ -160,11 +160,7 @@ class SqsSensor(AwsBaseSensor[SqsHook]):
160
160
  event = validate_execute_complete_event(event)
161
161
 
162
162
  if event["status"] != "success":
163
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
164
- message = f"Trigger error: event is {event}"
165
- if self.soft_fail:
166
- raise AirflowSkipException(message)
167
- raise AirflowException(message)
163
+ raise AirflowException(f"Trigger error: event is {event}")
168
164
  context["ti"].xcom_push(key="messages", value=event["message_batch"])
169
165
 
170
166
  def poll_sqs(self, sqs_conn: BaseAwsConnection) -> Collection:
@@ -221,11 +217,7 @@ class SqsSensor(AwsBaseSensor[SqsHook]):
221
217
  response = self.hook.conn.delete_message_batch(QueueUrl=self.sqs_queue, Entries=entries)
222
218
 
223
219
  if "Successful" not in response:
224
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
225
- error_message = f"Delete SQS Messages failed {response} for messages {messages}"
226
- if self.soft_fail:
227
- raise AirflowSkipException(error_message)
228
- raise AirflowException(error_message)
220
+ raise AirflowException(f"Delete SQS Messages failed {response} for messages {messages}")
229
221
  if message_batch:
230
222
  context["ti"].xcom_push(key="messages", value=message_batch)
231
223
  return True
@@ -21,7 +21,7 @@ from typing import TYPE_CHECKING, Sequence
21
21
 
22
22
  from deprecated import deprecated
23
23
 
24
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
24
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
25
25
  from airflow.providers.amazon.aws.hooks.step_function import StepFunctionHook
26
26
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
27
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
@@ -76,11 +76,7 @@ class StepFunctionExecutionSensor(AwsBaseSensor[StepFunctionHook]):
76
76
  output = json.loads(execution_status["output"]) if "output" in execution_status else None
77
77
 
78
78
  if state in self.FAILURE_STATES:
79
- # TODO: remove this if block when min_airflow_version is set to higher than 2.7.1
80
- message = f"Step Function sensor failed. State Machine Output: {output}"
81
- if self.soft_fail:
82
- raise AirflowSkipException(message)
83
- raise AirflowException(message)
79
+ raise AirflowException(f"Step Function sensor failed. State Machine Output: {output}")
84
80
 
85
81
  if state in self.INTERMEDIATE_STATES:
86
82
  return False
@@ -32,6 +32,7 @@ from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
32
32
  from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
33
33
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
34
34
  from airflow.providers.amazon.aws.transfers.base import AwsToAwsBaseOperator
35
+ from airflow.utils.helpers import prune_dict
35
36
 
36
37
  if TYPE_CHECKING:
37
38
  from airflow.utils.context import Context
@@ -89,10 +90,13 @@ class DynamoDBToS3Operator(AwsToAwsBaseOperator):
89
90
  <https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#DynamoDB.Table.scan>
90
91
  :param s3_key_prefix: Prefix of s3 object key
91
92
  :param process_func: How we transform a dynamodb item to bytes. By default, we dump the json
93
+ :param point_in_time_export: Boolean value indicating the operator to use 'scan' or 'point in time export'
92
94
  :param export_time: Time in the past from which to export table data, counted in seconds from the start of
93
95
  the Unix epoch. The table export will be a snapshot of the table's state at this point in time.
94
96
  :param export_format: The format for the exported data. Valid values for ExportFormat are DYNAMODB_JSON
95
97
  or ION.
98
+ :param export_table_to_point_in_time_kwargs: extra parameters for the boto3
99
+ `export_table_to_point_in_time` function all. e.g. `ExportType`, `IncrementalExportSpecification`
96
100
  :param check_interval: The amount of time in seconds to wait between attempts. Only if ``export_time`` is
97
101
  provided.
98
102
  :param max_attempts: The maximum number of attempts to be made. Only if ``export_time`` is provided.
@@ -107,12 +111,14 @@ class DynamoDBToS3Operator(AwsToAwsBaseOperator):
107
111
  "s3_key_prefix",
108
112
  "export_time",
109
113
  "export_format",
114
+ "export_table_to_point_in_time_kwargs",
110
115
  "check_interval",
111
116
  "max_attempts",
112
117
  )
113
118
 
114
119
  template_fields_renderers = {
115
120
  "dynamodb_scan_kwargs": "json",
121
+ "export_table_to_point_in_time_kwargs": "json",
116
122
  }
117
123
 
118
124
  def __init__(
@@ -120,12 +126,14 @@ class DynamoDBToS3Operator(AwsToAwsBaseOperator):
120
126
  *,
121
127
  dynamodb_table_name: str,
122
128
  s3_bucket_name: str,
123
- file_size: int,
129
+ file_size: int = 1000,
124
130
  dynamodb_scan_kwargs: dict[str, Any] | None = None,
125
131
  s3_key_prefix: str = "",
126
132
  process_func: Callable[[dict[str, Any]], bytes] = _convert_item_to_json_bytes,
133
+ point_in_time_export: bool = False,
127
134
  export_time: datetime | None = None,
128
135
  export_format: str = "DYNAMODB_JSON",
136
+ export_table_to_point_in_time_kwargs: dict | None = None,
129
137
  check_interval: int = 30,
130
138
  max_attempts: int = 60,
131
139
  **kwargs,
@@ -137,8 +145,10 @@ class DynamoDBToS3Operator(AwsToAwsBaseOperator):
137
145
  self.dynamodb_scan_kwargs = dynamodb_scan_kwargs
138
146
  self.s3_bucket_name = s3_bucket_name
139
147
  self.s3_key_prefix = s3_key_prefix
148
+ self.point_in_time_export = point_in_time_export
140
149
  self.export_time = export_time
141
150
  self.export_format = export_format
151
+ self.export_table_to_point_in_time_kwargs = export_table_to_point_in_time_kwargs or {}
142
152
  self.check_interval = check_interval
143
153
  self.max_attempts = max_attempts
144
154
 
@@ -148,29 +158,50 @@ class DynamoDBToS3Operator(AwsToAwsBaseOperator):
148
158
  return DynamoDBHook(aws_conn_id=self.source_aws_conn_id)
149
159
 
150
160
  def execute(self, context: Context) -> None:
151
- if self.export_time:
161
+ # There are 2 separate export to point in time configuration:
162
+ # 1. Full export, which takes the export_time arg.
163
+ # 2. Incremental export, which takes the incremental_export_... args
164
+ # Hence export time could not be used as the proper indicator for the `_export_table_to_point_in_time`
165
+ # function. This change introduces a new boolean, as the indicator for whether the operator scans
166
+ # and export entire data or using the point in time functionality.
167
+ if self.point_in_time_export or self.export_time:
152
168
  self._export_table_to_point_in_time()
153
169
  else:
154
170
  self._export_entire_data()
155
171
 
156
172
  def _export_table_to_point_in_time(self):
157
173
  """
158
- Export data from start of epoc till `export_time`.
174
+ Export data to point in time.
159
175
 
176
+ Full export exports data from start of epoc till `export_time`.
160
177
  Table export will be a snapshot of the table's state at this point in time.
178
+
179
+ Incremental export exports the data from a specific datetime to a specific datetime
180
+
181
+
182
+ Note: S3BucketOwner is a required parameter when exporting to a S3 bucket in another account.
161
183
  """
162
184
  if self.export_time and self.export_time > datetime.now(self.export_time.tzinfo):
163
185
  raise ValueError("The export_time parameter cannot be a future time.")
164
186
 
165
187
  client = self.hook.conn.meta.client
166
188
  table_description = client.describe_table(TableName=self.dynamodb_table_name)
167
- response = client.export_table_to_point_in_time(
168
- TableArn=table_description.get("Table", {}).get("TableArn"),
169
- ExportTime=self.export_time,
170
- S3Bucket=self.s3_bucket_name,
171
- S3Prefix=self.s3_key_prefix,
172
- ExportFormat=self.export_format,
173
- )
189
+
190
+ export_table_to_point_in_time_base_args = {
191
+ "TableArn": table_description.get("Table", {}).get("TableArn"),
192
+ "ExportTime": self.export_time,
193
+ "S3Bucket": self.s3_bucket_name,
194
+ "S3Prefix": self.s3_key_prefix,
195
+ "ExportFormat": self.export_format,
196
+ }
197
+ export_table_to_point_in_time_args = {
198
+ **export_table_to_point_in_time_base_args,
199
+ **self.export_table_to_point_in_time_kwargs,
200
+ }
201
+
202
+ args_filtered = prune_dict(export_table_to_point_in_time_args)
203
+
204
+ response = client.export_table_to_point_in_time(**args_filtered)
174
205
  waiter = self.hook.get_waiter("export_table")
175
206
  export_arn = response.get("ExportDescription", {}).get("ExportArn")
176
207
  waiter.wait(
@@ -318,13 +318,12 @@ class EksCreateNodegroupTrigger(AwsBaseWaiterTrigger):
318
318
  The trigger will asynchronously poll the boto3 API and wait for the
319
319
  nodegroup to be in the state specified by the waiter.
320
320
 
321
- :param waiter_name: Name of the waiter to use, for instance 'nodegroup_active' or 'nodegroup_deleted'
322
321
  :param cluster_name: The name of the EKS cluster associated with the node group.
323
322
  :param nodegroup_name: The name of the nodegroup to check.
324
323
  :param waiter_delay: The amount of time in seconds to wait between attempts.
325
324
  :param waiter_max_attempts: The maximum number of attempts to be made.
326
325
  :param aws_conn_id: The Airflow connection used for AWS credentials.
327
- :param region: Which AWS region the connection should use. (templated)
326
+ :param region_name: Which AWS region the connection should use. (templated)
328
327
  If this is None or empty then the default boto3 behaviour is used.
329
328
  """
330
329
 
@@ -366,13 +365,12 @@ class EksDeleteNodegroupTrigger(AwsBaseWaiterTrigger):
366
365
  The trigger will asynchronously poll the boto3 API and wait for the
367
366
  nodegroup to be in the state specified by the waiter.
368
367
 
369
- :param waiter_name: Name of the waiter to use, for instance 'nodegroup_active' or 'nodegroup_deleted'
370
368
  :param cluster_name: The name of the EKS cluster associated with the node group.
371
369
  :param nodegroup_name: The name of the nodegroup to check.
372
370
  :param waiter_delay: The amount of time in seconds to wait between attempts.
373
371
  :param waiter_max_attempts: The maximum number of attempts to be made.
374
372
  :param aws_conn_id: The Airflow connection used for AWS credentials.
375
- :param region: Which AWS region the connection should use. (templated)
373
+ :param region_name: Which AWS region the connection should use. (templated)
376
374
  If this is None or empty then the default boto3 behaviour is used.
377
375
  """
378
376
 
@@ -59,8 +59,20 @@ class AwsTaskLogFetcher(Thread):
59
59
  while not self.is_stopped():
60
60
  time.sleep(self.fetch_interval.total_seconds())
61
61
  log_events = self._get_log_events(continuation_token)
62
+ prev_timestamp_event = None
62
63
  for log_event in log_events:
64
+ current_timestamp_event = datetime.fromtimestamp(
65
+ log_event["timestamp"] / 1000.0, tz=timezone.utc
66
+ )
67
+ if current_timestamp_event == prev_timestamp_event:
68
+ # When multiple events have the same timestamp, somehow, only one event is logged
69
+ # As a consequence, some logs are missed in the log group (in case they have the same
70
+ # timestamp)
71
+ # When a slight delay is added before logging the event, that solves the issue
72
+ # See https://github.com/apache/airflow/issues/40875
73
+ time.sleep(0.1)
63
74
  self.logger.info(self.event_to_str(log_event))
75
+ prev_timestamp_event = current_timestamp_event
64
76
 
65
77
  def _get_log_events(self, skip_token: AwsLogsHook.ContinuationToken | None = None) -> Generator:
66
78
  if skip_token is None:
@@ -28,8 +28,9 @@ def get_provider_info():
28
28
  "name": "Amazon",
29
29
  "description": "Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1722145222,
31
+ "source-date-epoch": 1723968989,
32
32
  "versions": [
33
+ "8.28.0",
33
34
  "8.27.0",
34
35
  "8.26.0",
35
36
  "8.25.0",
@@ -94,7 +95,7 @@ def get_provider_info():
94
95
  "1.0.0",
95
96
  ],
96
97
  "dependencies": [
97
- "apache-airflow>=2.7.0",
98
+ "apache-airflow>=2.8.0",
98
99
  "apache-airflow-providers-common-compat>=1.1.0",
99
100
  "apache-airflow-providers-common-sql>=1.3.1",
100
101
  "apache-airflow-providers-http",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 8.27.0rc2
3
+ Version: 8.28.0
4
4
  Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
5
5
  Keywords: airflow-provider,amazon,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -22,11 +22,11 @@ Classifier: Programming Language :: Python :: 3.11
22
22
  Classifier: Programming Language :: Python :: 3.12
23
23
  Classifier: Topic :: System :: Monitoring
24
24
  Requires-Dist: PyAthena>=3.0.10
25
- Requires-Dist: apache-airflow-providers-common-compat>=1.1.0rc0
26
- Requires-Dist: apache-airflow-providers-common-compat>=1.1.0rc0
27
- Requires-Dist: apache-airflow-providers-common-sql>=1.3.1rc0
25
+ Requires-Dist: apache-airflow-providers-common-compat>=1.1.0
26
+ Requires-Dist: apache-airflow-providers-common-compat>=1.1.0
27
+ Requires-Dist: apache-airflow-providers-common-sql>=1.3.1
28
28
  Requires-Dist: apache-airflow-providers-http
29
- Requires-Dist: apache-airflow>=2.7.0rc0
29
+ Requires-Dist: apache-airflow>=2.8.0
30
30
  Requires-Dist: asgiref>=2.3.0
31
31
  Requires-Dist: boto3>=1.34.90
32
32
  Requires-Dist: botocore>=1.34.90
@@ -38,7 +38,7 @@ Requires-Dist: sqlalchemy_redshift>=0.8.6
38
38
  Requires-Dist: watchtower>=3.0.0,<4
39
39
  Requires-Dist: aiobotocore[boto3]>=2.13.0 ; extra == "aiobotocore"
40
40
  Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache.hive"
41
- Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0rc0 ; extra == "cncf.kubernetes"
41
+ Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0 ; extra == "cncf.kubernetes"
42
42
  Requires-Dist: apache-airflow-providers-common-compat ; extra == "common.compat"
43
43
  Requires-Dist: apache-airflow-providers-common-sql ; extra == "common.sql"
44
44
  Requires-Dist: apache-airflow-providers-exasol ; extra == "exasol"
@@ -56,8 +56,8 @@ Requires-Dist: s3fs>=2023.10.0 ; extra == "s3fs"
56
56
  Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
57
57
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
58
58
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
59
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.27.0/changelog.html
60
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.27.0
59
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.28.0/changelog.html
60
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.28.0
61
61
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
62
62
  Project-URL: Source Code, https://github.com/apache/airflow
63
63
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
@@ -125,7 +125,7 @@ Provides-Extra: ssh
125
125
 
126
126
  Package ``apache-airflow-providers-amazon``
127
127
 
128
- Release: ``8.27.0.rc2``
128
+ Release: ``8.28.0``
129
129
 
130
130
 
131
131
  Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
@@ -138,7 +138,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
138
138
  are in ``airflow.providers.amazon`` python package.
139
139
 
140
140
  You can find package information and changelog for the provider
141
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.27.0/>`_.
141
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.28.0/>`_.
142
142
 
143
143
  Installation
144
144
  ------------
@@ -155,7 +155,7 @@ Requirements
155
155
  ========================================== ==================
156
156
  PIP package Version required
157
157
  ========================================== ==================
158
- ``apache-airflow`` ``>=2.7.0``
158
+ ``apache-airflow`` ``>=2.8.0``
159
159
  ``apache-airflow-providers-common-compat`` ``>=1.1.0``
160
160
  ``apache-airflow-providers-common-sql`` ``>=1.3.1``
161
161
  ``apache-airflow-providers-http``
@@ -205,4 +205,4 @@ Dependent package
205
205
  ====================================================================================================================== ===================
206
206
 
207
207
  The changelog for the provider package can be found in the
208
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.27.0/changelog.html>`_.
208
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.28.0/changelog.html>`_.