apache-airflow-providers-amazon 9.5.0rc2__py3-none-any.whl → 9.5.0rc3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +12 -14
  2. airflow/providers/amazon/aws/auth_manager/router/login.py +3 -1
  3. airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +1 -1
  4. airflow/providers/amazon/aws/hooks/athena_sql.py +2 -2
  5. airflow/providers/amazon/aws/hooks/batch_client.py +1 -2
  6. airflow/providers/amazon/aws/hooks/batch_waiters.py +11 -3
  7. airflow/providers/amazon/aws/hooks/dms.py +3 -1
  8. airflow/providers/amazon/aws/hooks/redshift_cluster.py +9 -9
  9. airflow/providers/amazon/aws/hooks/redshift_data.py +1 -2
  10. airflow/providers/amazon/aws/hooks/sagemaker.py +1 -1
  11. airflow/providers/amazon/aws/links/athena.py +1 -2
  12. airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +174 -54
  13. airflow/providers/amazon/aws/log/s3_task_handler.py +123 -86
  14. airflow/providers/amazon/aws/operators/ec2.py +1 -1
  15. airflow/providers/amazon/aws/operators/eks.py +3 -3
  16. airflow/providers/amazon/aws/operators/redshift_cluster.py +10 -3
  17. airflow/providers/amazon/aws/operators/sagemaker.py +3 -5
  18. airflow/providers/amazon/aws/sensors/glacier.py +1 -1
  19. airflow/providers/amazon/aws/sensors/mwaa.py +2 -1
  20. airflow/providers/amazon/aws/sensors/s3.py +1 -1
  21. airflow/providers/amazon/aws/sensors/step_function.py +2 -1
  22. airflow/providers/amazon/aws/transfers/mongo_to_s3.py +2 -2
  23. airflow/providers/amazon/aws/transfers/sql_to_s3.py +1 -1
  24. airflow/providers/amazon/get_provider_info.py +2 -2
  25. {apache_airflow_providers_amazon-9.5.0rc2.dist-info → apache_airflow_providers_amazon-9.5.0rc3.dist-info}/METADATA +1 -1
  26. {apache_airflow_providers_amazon-9.5.0rc2.dist-info → apache_airflow_providers_amazon-9.5.0rc3.dist-info}/RECORD +28 -28
  27. {apache_airflow_providers_amazon-9.5.0rc2.dist-info → apache_airflow_providers_amazon-9.5.0rc3.dist-info}/WHEEL +0 -0
  28. {apache_airflow_providers_amazon-9.5.0rc2.dist-info → apache_airflow_providers_amazon-9.5.0rc3.dist-info}/entry_points.txt +0 -0
@@ -27,15 +27,6 @@ from fastapi import FastAPI
27
27
 
28
28
  from airflow.api_fastapi.app import AUTH_MANAGER_FASTAPI_APP_PREFIX
29
29
  from airflow.api_fastapi.auth.managers.base_auth_manager import BaseAuthManager
30
- from airflow.api_fastapi.auth.managers.models.resource_details import (
31
- AccessView,
32
- BackfillDetails,
33
- ConnectionDetails,
34
- DagAccessEntity,
35
- DagDetails,
36
- PoolDetails,
37
- VariableDetails,
38
- )
39
30
  from airflow.cli.cli_config import CLICommand, DefaultHelpParser, GroupCommand
40
31
  from airflow.configuration import conf
41
32
  from airflow.exceptions import AirflowOptionalProviderFeatureException
@@ -59,9 +50,16 @@ if TYPE_CHECKING:
59
50
  IsAuthorizedVariableRequest,
60
51
  )
61
52
  from airflow.api_fastapi.auth.managers.models.resource_details import (
53
+ AccessView,
62
54
  AssetAliasDetails,
63
55
  AssetDetails,
56
+ BackfillDetails,
64
57
  ConfigurationDetails,
58
+ ConnectionDetails,
59
+ DagAccessEntity,
60
+ DagDetails,
61
+ PoolDetails,
62
+ VariableDetails,
65
63
  )
66
64
  from airflow.api_fastapi.common.types import MenuItem
67
65
 
@@ -256,7 +254,7 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
256
254
  {
257
255
  "method": request["method"],
258
256
  "entity_type": AvpEntities.CONNECTION,
259
- "entity_id": cast(ConnectionDetails, request["details"]).conn_id
257
+ "entity_id": cast("ConnectionDetails", request["details"]).conn_id
260
258
  if request.get("details")
261
259
  else None,
262
260
  }
@@ -274,10 +272,10 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
274
272
  {
275
273
  "method": request["method"],
276
274
  "entity_type": AvpEntities.DAG,
277
- "entity_id": cast(DagDetails, request["details"]).id if request.get("details") else None,
275
+ "entity_id": cast("DagDetails", request["details"]).id if request.get("details") else None,
278
276
  "context": {
279
277
  "dag_entity": {
280
- "string": cast(DagAccessEntity, request["access_entity"]).value,
278
+ "string": cast("DagAccessEntity", request["access_entity"]).value,
281
279
  },
282
280
  }
283
281
  if request.get("access_entity")
@@ -297,7 +295,7 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
297
295
  {
298
296
  "method": request["method"],
299
297
  "entity_type": AvpEntities.POOL,
300
- "entity_id": cast(PoolDetails, request["details"]).name if request.get("details") else None,
298
+ "entity_id": cast("PoolDetails", request["details"]).name if request.get("details") else None,
301
299
  }
302
300
  for request in requests
303
301
  ]
@@ -313,7 +311,7 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
313
311
  {
314
312
  "method": request["method"],
315
313
  "entity_type": AvpEntities.VARIABLE,
316
- "entity_id": cast(VariableDetails, request["details"]).key
314
+ "entity_id": cast("VariableDetails", request["details"]).key
317
315
  if request.get("details")
318
316
  else None,
319
317
  }
@@ -83,7 +83,9 @@ def login_callback(request: Request):
83
83
  url = conf.get("api", "base_url")
84
84
  token = get_auth_manager().generate_jwt(user)
85
85
  response = RedirectResponse(url=url, status_code=303)
86
- response.set_cookie(COOKIE_NAME_JWT_TOKEN, token, secure=True)
86
+
87
+ secure = conf.has_option("api", "ssl_cert")
88
+ response.set_cookie(COOKIE_NAME_JWT_TOKEN, token, secure=secure)
87
89
  return response
88
90
 
89
91
 
@@ -278,7 +278,7 @@ class AwsEcsExecutor(BaseExecutor):
278
278
  if not has_exit_codes:
279
279
  return ""
280
280
  reasons = [
281
- f'{container["container_arn"]} - {container["reason"]}'
281
+ f"{container['container_arn']} - {container['reason']}"
282
282
  for container in containers
283
283
  if "reason" in container
284
284
  ]
@@ -146,10 +146,10 @@ class AthenaSQLHook(AwsBaseHook, DbApiHook):
146
146
  creds = self.get_credentials(region_name=conn_params["region_name"])
147
147
 
148
148
  return URL.create(
149
- f'awsathena+{conn_params["driver"]}',
149
+ f"awsathena+{conn_params['driver']}",
150
150
  username=creds.access_key,
151
151
  password=creds.secret_key,
152
- host=f'athena.{conn_params["region_name"]}.{conn_params["aws_domain"]}',
152
+ host=f"athena.{conn_params['region_name']}.{conn_params['aws_domain']}",
153
153
  port=443,
154
154
  database=conn_params["schema_name"],
155
155
  query={"aws_session_token": creds.token, **self.conn.extra_dejson},
@@ -416,8 +416,7 @@ class BatchClientHook(AwsBaseHook):
416
416
  )
417
417
  else:
418
418
  raise AirflowException(
419
- f"AWS Batch job ({job_id}) description error: exceeded status_retries "
420
- f"({self.status_retries})"
419
+ f"AWS Batch job ({job_id}) description error: exceeded status_retries ({self.status_retries})"
421
420
  )
422
421
 
423
422
  @staticmethod
@@ -30,7 +30,7 @@ import json
30
30
  import sys
31
31
  from copy import deepcopy
32
32
  from pathlib import Path
33
- from typing import TYPE_CHECKING, Callable
33
+ from typing import TYPE_CHECKING, Any, Callable
34
34
 
35
35
  import botocore.client
36
36
  import botocore.exceptions
@@ -144,7 +144,12 @@ class BatchWaitersHook(BatchClientHook):
144
144
  return self._waiter_model
145
145
 
146
146
  def get_waiter(
147
- self, waiter_name: str, _: dict[str, str] | None = None, deferrable: bool = False, client=None
147
+ self,
148
+ waiter_name: str,
149
+ parameters: dict[str, str] | None = None,
150
+ config_overrides: dict[str, Any] | None = None,
151
+ deferrable: bool = False,
152
+ client=None,
148
153
  ) -> botocore.waiter.Waiter:
149
154
  """
150
155
  Get an AWS Batch service waiter, using the configured ``.waiter_model``.
@@ -175,7 +180,10 @@ class BatchWaitersHook(BatchClientHook):
175
180
  the name (including the casing) of the key name in the waiter
176
181
  model file (typically this is CamelCasing); see ``.list_waiters``.
177
182
 
178
- :param _: unused, just here to match the method signature in base_aws
183
+ :param parameters: unused, just here to match the method signature in base_aws
184
+ :param config_overrides: unused, just here to match the method signature in base_aws
185
+ :param deferrable: unused, just here to match the method signature in base_aws
186
+ :param client: unused, just here to match the method signature in base_aws
179
187
 
180
188
  :return: a waiter object for the named AWS Batch service
181
189
  """
@@ -292,7 +292,9 @@ class DmsHook(AwsBaseHook):
292
292
  return arn
293
293
 
294
294
  except ClientError as err:
295
- err_str = f"Error: {err.get('Error','').get('Code','')}: {err.get('Error','').get('Message','')}"
295
+ err_str = (
296
+ f"Error: {err.get('Error', '').get('Code', '')}: {err.get('Error', '').get('Message', '')}"
297
+ )
296
298
  self.log.error("Error while creating replication config: %s", err_str)
297
299
  raise err
298
300
 
@@ -67,7 +67,7 @@ class RedshiftHook(AwsBaseHook):
67
67
  for the cluster that is being created.
68
68
  :param params: Remaining AWS Create cluster API params.
69
69
  """
70
- response = self.get_conn().create_cluster(
70
+ response = self.conn.create_cluster(
71
71
  ClusterIdentifier=cluster_identifier,
72
72
  NodeType=node_type,
73
73
  MasterUsername=master_username,
@@ -87,9 +87,9 @@ class RedshiftHook(AwsBaseHook):
87
87
  :param cluster_identifier: unique identifier of a cluster
88
88
  """
89
89
  try:
90
- response = self.get_conn().describe_clusters(ClusterIdentifier=cluster_identifier)["Clusters"]
90
+ response = self.conn.describe_clusters(ClusterIdentifier=cluster_identifier)["Clusters"]
91
91
  return response[0]["ClusterStatus"] if response else None
92
- except self.get_conn().exceptions.ClusterNotFoundFault:
92
+ except self.conn.exceptions.ClusterNotFoundFault:
93
93
  return "cluster_not_found"
94
94
 
95
95
  async def cluster_status_async(self, cluster_identifier: str) -> str:
@@ -115,7 +115,7 @@ class RedshiftHook(AwsBaseHook):
115
115
  """
116
116
  final_cluster_snapshot_identifier = final_cluster_snapshot_identifier or ""
117
117
 
118
- response = self.get_conn().delete_cluster(
118
+ response = self.conn.delete_cluster(
119
119
  ClusterIdentifier=cluster_identifier,
120
120
  SkipFinalClusterSnapshot=skip_final_cluster_snapshot,
121
121
  FinalClusterSnapshotIdentifier=final_cluster_snapshot_identifier,
@@ -131,7 +131,7 @@ class RedshiftHook(AwsBaseHook):
131
131
 
132
132
  :param cluster_identifier: unique identifier of a cluster
133
133
  """
134
- response = self.get_conn().describe_cluster_snapshots(ClusterIdentifier=cluster_identifier)
134
+ response = self.conn.describe_cluster_snapshots(ClusterIdentifier=cluster_identifier)
135
135
  if "Snapshots" not in response:
136
136
  return None
137
137
  snapshots = response["Snapshots"]
@@ -149,7 +149,7 @@ class RedshiftHook(AwsBaseHook):
149
149
  :param cluster_identifier: unique identifier of a cluster
150
150
  :param snapshot_identifier: unique identifier for a snapshot of a cluster
151
151
  """
152
- response = self.get_conn().restore_from_cluster_snapshot(
152
+ response = self.conn.restore_from_cluster_snapshot(
153
153
  ClusterIdentifier=cluster_identifier, SnapshotIdentifier=snapshot_identifier
154
154
  )
155
155
  return response["Cluster"] if response["Cluster"] else None
@@ -175,7 +175,7 @@ class RedshiftHook(AwsBaseHook):
175
175
  """
176
176
  if tags is None:
177
177
  tags = []
178
- response = self.get_conn().create_cluster_snapshot(
178
+ response = self.conn.create_cluster_snapshot(
179
179
  SnapshotIdentifier=snapshot_identifier,
180
180
  ClusterIdentifier=cluster_identifier,
181
181
  ManualSnapshotRetentionPeriod=retention_period,
@@ -192,11 +192,11 @@ class RedshiftHook(AwsBaseHook):
192
192
  :param snapshot_identifier: A unique identifier for the snapshot that you are requesting
193
193
  """
194
194
  try:
195
- response = self.get_conn().describe_cluster_snapshots(
195
+ response = self.conn.describe_cluster_snapshots(
196
196
  SnapshotIdentifier=snapshot_identifier,
197
197
  )
198
198
  snapshot = response.get("Snapshots")[0]
199
199
  snapshot_status: str = snapshot.get("Status")
200
200
  return snapshot_status
201
- except self.get_conn().exceptions.ClusterSnapshotNotFoundFault:
201
+ except self.conn.exceptions.ClusterSnapshotNotFoundFault:
202
202
  return None
@@ -186,8 +186,7 @@ class RedshiftDataHook(AwsGenericHook["RedshiftDataAPIServiceClient"]):
186
186
  RedshiftDataQueryFailedError if status == FAILED_STATE else RedshiftDataQueryAbortedError
187
187
  )
188
188
  raise exception_cls(
189
- f"Statement {resp['Id']} terminated with status {status}. "
190
- f"Response details: {pformat(resp)}"
189
+ f"Statement {resp['Id']} terminated with status {status}. Response details: {pformat(resp)}"
191
190
  )
192
191
 
193
192
  self.log.info("Query status: %s", status)
@@ -131,7 +131,7 @@ def secondary_training_status_message(
131
131
  status_strs = []
132
132
  for transition in transitions_to_print:
133
133
  message = transition["StatusMessage"]
134
- time_utc = timezone.convert_to_utc(cast(datetime, job_description["LastModifiedTime"]))
134
+ time_utc = timezone.convert_to_utc(cast("datetime", job_description["LastModifiedTime"]))
135
135
  status_strs.append(f"{time_utc:%Y-%m-%d %H:%M:%S} {transition['Status']} - {message}")
136
136
 
137
137
  return "\n".join(status_strs)
@@ -25,6 +25,5 @@ class AthenaQueryResultsLink(BaseAwsLink):
25
25
  name = "Query Results"
26
26
  key = "_athena_query_results"
27
27
  format_str = (
28
- BASE_AWS_CONSOLE_LINK + "/athena/home?region={region_name}#"
29
- "/query-editor/history/{query_execution_id}"
28
+ BASE_AWS_CONSOLE_LINK + "/athena/home?region={region_name}#/query-editor/history/{query_execution_id}"
30
29
  )
@@ -17,20 +17,31 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
+ import copy
21
+ import json
22
+ import logging
23
+ import os
20
24
  from datetime import date, datetime, timedelta, timezone
21
25
  from functools import cached_property
26
+ from pathlib import Path
22
27
  from typing import TYPE_CHECKING, Any
23
28
 
29
+ import attrs
24
30
  import watchtower
25
31
 
26
32
  from airflow.configuration import conf
27
33
  from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
28
34
  from airflow.providers.amazon.aws.utils import datetime_to_epoch_utc_ms
35
+ from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
29
36
  from airflow.utils.log.file_task_handler import FileTaskHandler
30
37
  from airflow.utils.log.logging_mixin import LoggingMixin
31
38
 
32
39
  if TYPE_CHECKING:
33
- from airflow.models import TaskInstance
40
+ import structlog.typing
41
+
42
+ from airflow.models.taskinstance import TaskInstance
43
+ from airflow.sdk.types import RuntimeTaskInstanceProtocol as RuntimeTI
44
+ from airflow.utils.log.file_task_handler import LogMessages, LogSourceInfo
34
45
 
35
46
 
36
47
  def json_serialize_legacy(value: Any) -> str | None:
@@ -62,6 +73,155 @@ def json_serialize(value: Any) -> str | None:
62
73
  return watchtower._json_serialize_default(value)
63
74
 
64
75
 
76
+ @attrs.define(kw_only=True)
77
+ class CloudWatchRemoteLogIO(LoggingMixin): # noqa: D101
78
+ base_log_folder: Path = attrs.field(converter=Path)
79
+ remote_base: str = ""
80
+ delete_local_copy: bool = True
81
+
82
+ log_group_arn: str
83
+ log_stream_name: str = ""
84
+ log_group: str = attrs.field(init=False, repr=False)
85
+ region_name: str = attrs.field(init=False, repr=False)
86
+
87
+ @log_group.default
88
+ def _(self):
89
+ return self.log_group_arn.split(":")[6]
90
+
91
+ @region_name.default
92
+ def _(self):
93
+ return self.log_group_arn.split(":")[3]
94
+
95
+ @cached_property
96
+ def hook(self):
97
+ """Returns AwsLogsHook."""
98
+ return AwsLogsHook(
99
+ aws_conn_id=conf.get("logging", "remote_log_conn_id"), region_name=self.region_name
100
+ )
101
+
102
+ @cached_property
103
+ def handler(self) -> watchtower.CloudWatchLogHandler:
104
+ _json_serialize = conf.getimport("aws", "cloudwatch_task_handler_json_serializer", fallback=None)
105
+ return watchtower.CloudWatchLogHandler(
106
+ log_group_name=self.log_group,
107
+ log_stream_name=self.log_stream_name,
108
+ use_queues=True,
109
+ boto3_client=self.hook.get_conn(),
110
+ json_serialize_default=_json_serialize or json_serialize_legacy,
111
+ )
112
+
113
+ @cached_property
114
+ def processors(self) -> tuple[structlog.typing.Processor, ...]:
115
+ from logging import getLogRecordFactory
116
+
117
+ import structlog.stdlib
118
+
119
+ logRecordFactory = getLogRecordFactory()
120
+ # The handler MUST be initted here, before the processor is actually used to log anything.
121
+ # Otherwise, logging that occurs during the creation of the handler can create infinite loops.
122
+ _handler = self.handler
123
+ from airflow.sdk.log import relative_path_from_logger
124
+
125
+ def proc(logger: structlog.typing.WrappedLogger, method_name: str, event: structlog.typing.EventDict):
126
+ if not logger or not (stream_name := relative_path_from_logger(logger)):
127
+ return event
128
+ # Only init the handler stream_name once. We cannot do it above when we init the handler because
129
+ # we don't yet know the log path at that point.
130
+ if not _handler.log_stream_name:
131
+ _handler.log_stream_name = stream_name.as_posix().replace(":", "_")
132
+ name = event.get("logger_name") or event.get("logger", "")
133
+ level = structlog.stdlib.NAME_TO_LEVEL.get(method_name.lower(), logging.INFO)
134
+ msg = copy.copy(event)
135
+ created = None
136
+ if ts := msg.pop("timestamp", None):
137
+ try:
138
+ created = datetime.fromisoformat(ts)
139
+ except Exception:
140
+ pass
141
+ record = logRecordFactory(
142
+ name, level, pathname="", lineno=0, msg=msg, args=(), exc_info=None, func=None, sinfo=None
143
+ )
144
+ if created is not None:
145
+ ct = created.timestamp()
146
+ record.created = ct
147
+ record.msecs = int((ct - int(ct)) * 1000) + 0.0 # Copied from stdlib logging
148
+ _handler.handle(record)
149
+ return event
150
+
151
+ return (proc,)
152
+
153
+ def close(self):
154
+ self.handler.close()
155
+
156
+ def upload(self, path: os.PathLike | str, ti: RuntimeTI):
157
+ # No-op, as we upload via the processor as we go
158
+ # But we need to give the handler time to finish off its business
159
+ self.close()
160
+ return
161
+
162
+ def read(self, relative_path, ti: RuntimeTI) -> tuple[LogSourceInfo, LogMessages | None]:
163
+ logs: LogMessages | None = []
164
+ messages = [
165
+ f"Reading remote log from Cloudwatch log_group: {self.log_group} log_stream: {relative_path}"
166
+ ]
167
+ try:
168
+ if AIRFLOW_V_3_0_PLUS:
169
+ from airflow.utils.log.file_task_handler import StructuredLogMessage
170
+
171
+ logs = [
172
+ StructuredLogMessage.model_validate(log)
173
+ for log in self.get_cloudwatch_logs(relative_path, ti)
174
+ ]
175
+ else:
176
+ logs = [self.get_cloudwatch_logs(relative_path, ti)] # type: ignore[arg-value]
177
+ except Exception as e:
178
+ logs = None
179
+ messages.append(str(e))
180
+
181
+ return messages, logs
182
+
183
+ def get_cloudwatch_logs(self, stream_name: str, task_instance: RuntimeTI):
184
+ """
185
+ Return all logs from the given log stream.
186
+
187
+ :param stream_name: name of the Cloudwatch log stream to get all logs from
188
+ :param task_instance: the task instance to get logs about
189
+ :return: string of all logs from the given log stream
190
+ """
191
+ stream_name = stream_name.replace(":", "_")
192
+ # If there is an end_date to the task instance, fetch logs until that date + 30 seconds
193
+ # 30 seconds is an arbitrary buffer so that we don't miss any logs that were emitted
194
+ end_time = (
195
+ None
196
+ if (end_date := getattr(task_instance, "end_date", None)) is None
197
+ else datetime_to_epoch_utc_ms(end_date + timedelta(seconds=30))
198
+ )
199
+ events = self.hook.get_log_events(
200
+ log_group=self.log_group,
201
+ log_stream_name=stream_name,
202
+ end_time=end_time,
203
+ )
204
+ if AIRFLOW_V_3_0_PLUS:
205
+ return list(self._event_to_dict(e) for e in events)
206
+ return "\n".join(self._event_to_str(event) for event in events)
207
+
208
+ def _event_to_dict(self, event: dict) -> dict:
209
+ event_dt = datetime.fromtimestamp(event["timestamp"] / 1000.0, tz=timezone.utc).isoformat()
210
+ message = event["message"]
211
+ try:
212
+ message = json.loads(message)
213
+ message["timestamp"] = event_dt
214
+ return message
215
+ except Exception:
216
+ return {"timestamp": event_dt, "event": message}
217
+
218
+ def _event_to_str(self, event: dict) -> str:
219
+ event_dt = datetime.fromtimestamp(event["timestamp"] / 1000.0, tz=timezone.utc)
220
+ formatted_event_dt = event_dt.strftime("%Y-%m-%d %H:%M:%S,%f")[:-3]
221
+ message = event["message"]
222
+ return f"[{formatted_event_dt}] {message}"
223
+
224
+
65
225
  class CloudwatchTaskHandler(FileTaskHandler, LoggingMixin):
66
226
  """
67
227
  CloudwatchTaskHandler is a python log handler that handles and reads task instance logs.
@@ -84,6 +244,11 @@ class CloudwatchTaskHandler(FileTaskHandler, LoggingMixin):
84
244
  self.region_name = split_arn[3]
85
245
  self.closed = False
86
246
 
247
+ self.io = CloudWatchRemoteLogIO(
248
+ base_log_folder=base_log_folder,
249
+ log_group_arn=log_group_arn,
250
+ )
251
+
87
252
  @cached_property
88
253
  def hook(self):
89
254
  """Returns AwsLogsHook."""
@@ -97,14 +262,9 @@ class CloudwatchTaskHandler(FileTaskHandler, LoggingMixin):
97
262
 
98
263
  def set_context(self, ti: TaskInstance, *, identifier: str | None = None):
99
264
  super().set_context(ti)
100
- _json_serialize = conf.getimport("aws", "cloudwatch_task_handler_json_serializer", fallback=None)
101
- self.handler = watchtower.CloudWatchLogHandler(
102
- log_group_name=self.log_group,
103
- log_stream_name=self._render_filename(ti, ti.try_number),
104
- use_queues=not getattr(ti, "is_trigger_log_context", False),
105
- boto3_client=self.hook.get_conn(),
106
- json_serialize_default=_json_serialize or json_serialize_legacy,
107
- )
265
+ self.io.log_stream_name = self._render_filename(ti, ti.try_number)
266
+
267
+ self.handler = self.io.handler
108
268
 
109
269
  def close(self):
110
270
  """Close the handler responsible for the upload of the local log file to Cloudwatch."""
@@ -120,49 +280,9 @@ class CloudwatchTaskHandler(FileTaskHandler, LoggingMixin):
120
280
  # Mark closed so we don't double write if close is called twice
121
281
  self.closed = True
122
282
 
123
- def _read(self, task_instance, try_number, metadata=None):
283
+ def _read_remote_logs(
284
+ self, task_instance, try_number, metadata=None
285
+ ) -> tuple[LogSourceInfo, LogMessages]:
124
286
  stream_name = self._render_filename(task_instance, try_number)
125
- try:
126
- return (
127
- f"*** Reading remote log from Cloudwatch log_group: {self.log_group} "
128
- f"log_stream: {stream_name}.\n"
129
- f"{self.get_cloudwatch_logs(stream_name=stream_name, task_instance=task_instance)}\n",
130
- {"end_of_log": True},
131
- )
132
- except Exception as e:
133
- log = (
134
- f"*** Unable to read remote logs from Cloudwatch (log_group: {self.log_group}, log_stream: "
135
- f"{stream_name})\n*** {e}\n\n"
136
- )
137
- self.log.error(log)
138
- local_log, metadata = super()._read(task_instance, try_number, metadata)
139
- log += local_log
140
- return log, metadata
141
-
142
- def get_cloudwatch_logs(self, stream_name: str, task_instance: TaskInstance) -> str:
143
- """
144
- Return all logs from the given log stream.
145
-
146
- :param stream_name: name of the Cloudwatch log stream to get all logs from
147
- :param task_instance: the task instance to get logs about
148
- :return: string of all logs from the given log stream
149
- """
150
- # If there is an end_date to the task instance, fetch logs until that date + 30 seconds
151
- # 30 seconds is an arbitrary buffer so that we don't miss any logs that were emitted
152
- end_time = (
153
- None
154
- if task_instance.end_date is None
155
- else datetime_to_epoch_utc_ms(task_instance.end_date + timedelta(seconds=30))
156
- )
157
- events = self.hook.get_log_events(
158
- log_group=self.log_group,
159
- log_stream_name=stream_name,
160
- end_time=end_time,
161
- )
162
- return "\n".join(self._event_to_str(event) for event in events)
163
-
164
- def _event_to_str(self, event: dict) -> str:
165
- event_dt = datetime.fromtimestamp(event["timestamp"] / 1000.0, tz=timezone.utc)
166
- formatted_event_dt = event_dt.strftime("%Y-%m-%d %H:%M:%S,%f")[:-3]
167
- message = event["message"]
168
- return f"[{formatted_event_dt}] {message}"
287
+ messages, logs = self.io.read(stream_name, task_instance)
288
+ return messages, logs or []
@@ -24,6 +24,8 @@ import shutil
24
24
  from functools import cached_property
25
25
  from typing import TYPE_CHECKING
26
26
 
27
+ import attrs
28
+
27
29
  from airflow.configuration import conf
28
30
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
29
31
  from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
@@ -32,28 +34,34 @@ from airflow.utils.log.logging_mixin import LoggingMixin
32
34
 
33
35
  if TYPE_CHECKING:
34
36
  from airflow.models.taskinstance import TaskInstance
37
+ from airflow.sdk.types import RuntimeTaskInstanceProtocol as RuntimeTI
38
+ from airflow.utils.log.file_task_handler import LogMessages, LogSourceInfo
35
39
 
36
40
 
37
- class S3TaskHandler(FileTaskHandler, LoggingMixin):
38
- """
39
- S3TaskHandler is a python log handler that handles and reads task instance logs.
41
+ @attrs.define
42
+ class S3RemoteLogIO(LoggingMixin): # noqa: D101
43
+ remote_base: str
44
+ base_log_folder: pathlib.Path = attrs.field(converter=pathlib.Path)
45
+ delete_local_copy: bool
40
46
 
41
- It extends airflow FileTaskHandler and uploads to and reads from S3 remote storage.
42
- """
47
+ processors = ()
43
48
 
44
- trigger_should_wrap = True
49
+ def upload(self, path: os.PathLike | str, ti: RuntimeTI):
50
+ """Upload the given log path to the remote storage."""
51
+ path = pathlib.Path(path)
52
+ if path.is_absolute():
53
+ local_loc = path
54
+ remote_loc = os.path.join(self.remote_base, path.relative_to(self.base_log_folder))
55
+ else:
56
+ local_loc = self.base_log_folder.joinpath(path)
57
+ remote_loc = os.path.join(self.remote_base, path)
45
58
 
46
- def __init__(self, base_log_folder: str, s3_log_folder: str, **kwargs):
47
- super().__init__(base_log_folder)
48
- self.handler: logging.FileHandler | None = None
49
- self.remote_base = s3_log_folder
50
- self.log_relative_path = ""
51
- self._hook = None
52
- self.closed = False
53
- self.upload_on_close = True
54
- self.delete_local_copy = kwargs.get(
55
- "delete_local_copy", conf.getboolean("logging", "delete_local_logs")
56
- )
59
+ if local_loc.is_file():
60
+ # read log and remove old logs to get just the latest additions
61
+ log = local_loc.read_text()
62
+ has_uploaded = self.write(log, remote_loc)
63
+ if has_uploaded and self.delete_local_copy:
64
+ shutil.rmtree(os.path.dirname(local_loc))
57
65
 
58
66
  @cached_property
59
67
  def hook(self):
@@ -63,73 +71,6 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
63
71
  transfer_config_args={"use_threads": False},
64
72
  )
65
73
 
66
- def set_context(self, ti: TaskInstance, *, identifier: str | None = None) -> None:
67
- super().set_context(ti, identifier=identifier)
68
- # Local location and remote location is needed to open and
69
- # upload local log file to S3 remote storage.
70
- if TYPE_CHECKING:
71
- assert self.handler is not None
72
-
73
- full_path = self.handler.baseFilename
74
- self.log_relative_path = pathlib.Path(full_path).relative_to(self.local_base).as_posix()
75
- is_trigger_log_context = getattr(ti, "is_trigger_log_context", False)
76
- self.upload_on_close = is_trigger_log_context or not getattr(ti, "raw", None)
77
- # Clear the file first so that duplicate data is not uploaded
78
- # when reusing the same path (e.g. with rescheduled sensors)
79
- if self.upload_on_close:
80
- with open(self.handler.baseFilename, "w"):
81
- pass
82
-
83
- def close(self):
84
- """Close and upload local log file to remote storage S3."""
85
- # When application exit, system shuts down all handlers by
86
- # calling close method. Here we check if logger is already
87
- # closed to prevent uploading the log to remote storage multiple
88
- # times when `logging.shutdown` is called.
89
- if self.closed:
90
- return
91
-
92
- super().close()
93
-
94
- if not self.upload_on_close:
95
- return
96
-
97
- local_loc = os.path.join(self.local_base, self.log_relative_path)
98
- remote_loc = os.path.join(self.remote_base, self.log_relative_path)
99
- if os.path.exists(local_loc):
100
- # read log and remove old logs to get just the latest additions
101
- log = pathlib.Path(local_loc).read_text()
102
- write_to_s3 = self.s3_write(log, remote_loc)
103
- if write_to_s3 and self.delete_local_copy:
104
- shutil.rmtree(os.path.dirname(local_loc))
105
-
106
- # Mark closed so we don't double write if close is called twice
107
- self.closed = True
108
-
109
- def _read_remote_logs(self, ti, try_number, metadata=None) -> tuple[list[str], list[str]]:
110
- # Explicitly getting log relative path is necessary as the given
111
- # task instance might be different than task instance passed in
112
- # in set_context method.
113
- worker_log_rel_path = self._render_filename(ti, try_number)
114
-
115
- logs = []
116
- messages = []
117
- bucket, prefix = self.hook.parse_s3_url(s3url=os.path.join(self.remote_base, worker_log_rel_path))
118
- keys = self.hook.list_keys(bucket_name=bucket, prefix=prefix)
119
- if keys:
120
- keys = sorted(f"s3://{bucket}/{key}" for key in keys)
121
- if AIRFLOW_V_3_0_PLUS:
122
- messages = keys
123
- else:
124
- messages.append("Found logs in s3:")
125
- messages.extend(f" * {key}" for key in keys)
126
- for key in keys:
127
- logs.append(self.s3_read(key, return_error=True))
128
- else:
129
- if not AIRFLOW_V_3_0_PLUS:
130
- messages.append(f"No logs found on s3 for ti={ti}")
131
- return messages, logs
132
-
133
74
  def s3_log_exists(self, remote_log_location: str) -> bool:
134
75
  """
135
76
  Check if remote_log_location exists in remote storage.
@@ -158,7 +99,7 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
158
99
  return msg
159
100
  return ""
160
101
 
161
- def s3_write(
102
+ def write(
162
103
  self,
163
104
  log: str,
164
105
  remote_log_location: str,
@@ -168,7 +109,7 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
168
109
  """
169
110
  Write the log to the remote_log_location; return `True` or fails silently and return `False`.
170
111
 
171
- :param log: the log to write to the remote_log_location
112
+ :param log: the contents to write to the remote_log_location
172
113
  :param remote_log_location: the log's location in remote storage
173
114
  :param append: if False, any existing log file is overwritten. If True,
174
115
  the new log is appended to any existing logs.
@@ -205,3 +146,99 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
205
146
  self.log.exception("Could not write logs to %s", remote_log_location)
206
147
  return False
207
148
  return True
149
+
150
+ def read(self, relative_path: str, ti: RuntimeTI) -> tuple[LogSourceInfo, LogMessages | None]:
151
+ logs: list[str] = []
152
+ messages = []
153
+ bucket, prefix = self.hook.parse_s3_url(s3url=os.path.join(self.remote_base, relative_path))
154
+ keys = self.hook.list_keys(bucket_name=bucket, prefix=prefix)
155
+ if keys:
156
+ keys = sorted(f"s3://{bucket}/{key}" for key in keys)
157
+ if AIRFLOW_V_3_0_PLUS:
158
+ messages = keys
159
+ else:
160
+ messages.append("Found logs in s3:")
161
+ messages.extend(f" * {key}" for key in keys)
162
+ for key in keys:
163
+ logs.append(self.s3_read(key, return_error=True))
164
+ return messages, logs
165
+ else:
166
+ return messages, None
167
+
168
+
169
+ class S3TaskHandler(FileTaskHandler, LoggingMixin):
170
+ """
171
+ S3TaskHandler is a python log handler that handles and reads task instance logs.
172
+
173
+ It extends airflow FileTaskHandler and uploads to and reads from S3 remote storage.
174
+ """
175
+
176
+ def __init__(self, base_log_folder: str, s3_log_folder: str, **kwargs):
177
+ super().__init__(base_log_folder)
178
+ self.handler: logging.FileHandler | None = None
179
+ self.remote_base = s3_log_folder
180
+ self.log_relative_path = ""
181
+ self._hook = None
182
+ self.closed = False
183
+ self.upload_on_close = True
184
+ self.io = S3RemoteLogIO(
185
+ remote_base=s3_log_folder,
186
+ base_log_folder=base_log_folder,
187
+ delete_local_copy=kwargs.get(
188
+ "delete_local_copy", conf.getboolean("logging", "delete_local_logs")
189
+ ),
190
+ )
191
+
192
+ def set_context(self, ti: TaskInstance, *, identifier: str | None = None) -> None:
193
+ super().set_context(ti, identifier=identifier)
194
+ # Local location and remote location is needed to open and
195
+ # upload local log file to S3 remote storage.
196
+ if TYPE_CHECKING:
197
+ assert self.handler is not None
198
+
199
+ self.ti = ti
200
+
201
+ full_path = self.handler.baseFilename
202
+ self.log_relative_path = pathlib.Path(full_path).relative_to(self.local_base).as_posix()
203
+ is_trigger_log_context = getattr(ti, "is_trigger_log_context", False)
204
+ self.upload_on_close = is_trigger_log_context or not getattr(ti, "raw", None)
205
+ # Clear the file first so that duplicate data is not uploaded
206
+ # when reusing the same path (e.g. with rescheduled sensors)
207
+ if self.upload_on_close:
208
+ with open(self.handler.baseFilename, "w"):
209
+ pass
210
+
211
+ def close(self):
212
+ """Close and upload local log file to remote storage S3."""
213
+ # When application exit, system shuts down all handlers by
214
+ # calling close method. Here we check if logger is already
215
+ # closed to prevent uploading the log to remote storage multiple
216
+ # times when `logging.shutdown` is called.
217
+ if self.closed:
218
+ return
219
+
220
+ super().close()
221
+
222
+ if not self.upload_on_close:
223
+ return
224
+
225
+ if hasattr(self, "ti"):
226
+ self.io.upload(self.log_relative_path, self.ti)
227
+
228
+ # Mark closed so we don't double write if close is called twice
229
+ self.closed = True
230
+
231
+ def _read_remote_logs(self, ti, try_number, metadata=None) -> tuple[LogSourceInfo, LogMessages]:
232
+ # Explicitly getting log relative path is necessary as the given
233
+ # task instance might be different than task instance passed in
234
+ # in set_context method.
235
+ worker_log_rel_path = self._render_filename(ti, try_number)
236
+
237
+ messages, logs = self.io.read(worker_log_rel_path, ti)
238
+
239
+ if logs is None:
240
+ logs = []
241
+ if not AIRFLOW_V_3_0_PLUS:
242
+ messages.append(f"No logs found on s3 for ti={ti}")
243
+
244
+ return messages, logs
@@ -254,7 +254,7 @@ class EC2CreateInstanceOperator(AwsBaseOperator[EC2Hook]):
254
254
  region_name=self.region_name,
255
255
  api_type="client_type",
256
256
  ) """
257
- self.hook.terminate_instances(InstanceIds=instance_ids)
257
+ self.hook.terminate_instances(instance_ids=instance_ids)
258
258
  super().on_kill()
259
259
 
260
260
 
@@ -338,7 +338,7 @@ class EksCreateClusterOperator(BaseOperator):
338
338
  fargate_pod_execution_role_arn=self.fargate_pod_execution_role_arn,
339
339
  fargate_selectors=self.fargate_selectors,
340
340
  create_fargate_profile_kwargs=self.create_fargate_profile_kwargs,
341
- subnets=cast(list[str], self.resources_vpc_config.get("subnetIds")),
341
+ subnets=cast("list[str]", self.resources_vpc_config.get("subnetIds")),
342
342
  )
343
343
 
344
344
  def deferrable_create_cluster_next(self, context: Context, event: dict[str, Any] | None = None) -> None:
@@ -377,7 +377,7 @@ class EksCreateClusterOperator(BaseOperator):
377
377
  fargate_pod_execution_role_arn=self.fargate_pod_execution_role_arn,
378
378
  fargate_selectors=self.fargate_selectors,
379
379
  create_fargate_profile_kwargs=self.create_fargate_profile_kwargs,
380
- subnets=cast(list[str], self.resources_vpc_config.get("subnetIds")),
380
+ subnets=cast("list[str]", self.resources_vpc_config.get("subnetIds")),
381
381
  )
382
382
  if self.compute == "fargate":
383
383
  self.defer(
@@ -503,7 +503,7 @@ class EksCreateNodegroupOperator(BaseOperator):
503
503
  nodegroup_subnets_list: list[str] = []
504
504
  if self.nodegroup_subnets != "":
505
505
  try:
506
- nodegroup_subnets_list = cast(list, literal_eval(self.nodegroup_subnets))
506
+ nodegroup_subnets_list = cast("list", literal_eval(self.nodegroup_subnets))
507
507
  except ValueError:
508
508
  self.log.warning(
509
509
  "The nodegroup_subnets should be List or string representing "
@@ -755,11 +755,18 @@ class RedshiftDeleteClusterOperator(BaseOperator):
755
755
  final_cluster_snapshot_identifier=self.final_cluster_snapshot_identifier,
756
756
  )
757
757
  break
758
- except self.redshift_hook.get_conn().exceptions.InvalidClusterStateFault:
758
+ except self.redshift_hook.conn.exceptions.InvalidClusterStateFault:
759
759
  self._attempts -= 1
760
760
 
761
761
  if self._attempts:
762
- self.log.error("Unable to delete cluster. %d attempts remaining.", self._attempts)
762
+ current_state = self.redshift_hook.conn.describe_clusters(
763
+ ClusterIdentifier=self.cluster_identifier
764
+ )["Clusters"][0]["ClusterStatus"]
765
+ self.log.error(
766
+ "Cluster in %s state, unable to delete. %d attempts remaining.",
767
+ current_state,
768
+ self._attempts,
769
+ )
763
770
  time.sleep(self._attempt_interval)
764
771
  else:
765
772
  raise
@@ -785,7 +792,7 @@ class RedshiftDeleteClusterOperator(BaseOperator):
785
792
  )
786
793
 
787
794
  elif self.wait_for_completion:
788
- waiter = self.redshift_hook.get_conn().get_waiter("cluster_deleted")
795
+ waiter = self.redshift_hook.conn.get_waiter("cluster_deleted")
789
796
  waiter.wait(
790
797
  ClusterIdentifier=self.cluster_identifier,
791
798
  WaiterConfig={"Delay": self.poll_interval, "MaxAttempts": self.max_attempts},
@@ -170,7 +170,7 @@ class SageMakerBaseOperator(BaseOperator):
170
170
  timestamp = str(
171
171
  time.time_ns() // 1000000000
172
172
  ) # only keep the relevant datetime (first 10 digits)
173
- name = f"{proposed_name[:max_name_len - len(timestamp) - 1]}-{timestamp}" # we subtract one to make provision for the dash between the truncated name and timestamp
173
+ name = f"{proposed_name[: max_name_len - len(timestamp) - 1]}-{timestamp}" # we subtract one to make provision for the dash between the truncated name and timestamp
174
174
  self.log.info("Changed %s name to '%s' to avoid collision.", resource_type, name)
175
175
  return name
176
176
 
@@ -178,8 +178,7 @@ class SageMakerBaseOperator(BaseOperator):
178
178
  """Raise exception if resource type is not 'model' or 'job'."""
179
179
  if resource_type not in ("model", "job"):
180
180
  raise AirflowException(
181
- "Argument resource_type accepts only 'model' and 'job'. "
182
- f"Provided value: '{resource_type}'."
181
+ f"Argument resource_type accepts only 'model' and 'job'. Provided value: '{resource_type}'."
183
182
  )
184
183
 
185
184
  def _check_if_job_exists(self, job_name: str, describe_func: Callable[[str], Any]) -> bool:
@@ -559,8 +558,7 @@ class SageMakerEndpointOperator(SageMakerBaseOperator):
559
558
  self.operation = "update"
560
559
  sagemaker_operation = self.hook.update_endpoint
561
560
  self.log.warning(
562
- "cannot create already existing endpoint %s, "
563
- "updating it with the given config instead",
561
+ "cannot create already existing endpoint %s, updating it with the given config instead",
564
562
  endpoint_info["EndpointName"],
565
563
  )
566
564
  if "Tags" in endpoint_info:
@@ -95,5 +95,5 @@ class GlacierJobOperationSensor(AwsBaseSensor[GlacierHook]):
95
95
  return False
96
96
  else:
97
97
  raise AirflowException(
98
- f'Sensor failed. Job status: {response["Action"]}, code status: {response["StatusCode"]}'
98
+ f"Sensor failed. Job status: {response['Action']}, code status: {response['StatusCode']}"
99
99
  )
@@ -150,7 +150,8 @@ class MwaaDagRunSensor(AwsBaseSensor[MwaaHook]):
150
150
  external_dag_run_id=self.external_dag_run_id,
151
151
  success_states=self.success_states,
152
152
  failure_states=self.failure_states,
153
- waiter_delay=self.poke_interval,
153
+ # somehow the type of poke_interval is derived as float ??
154
+ waiter_delay=self.poke_interval, # type: ignore[arg-type]
154
155
  waiter_max_attempts=self.max_retries,
155
156
  aws_conn_id=self.aws_conn_id,
156
157
  ),
@@ -192,7 +192,7 @@ class S3KeySensor(AwsBaseSensor[S3Hook]):
192
192
  self.defer(
193
193
  timeout=timedelta(seconds=self.timeout),
194
194
  trigger=S3KeyTrigger(
195
- bucket_name=cast(str, self.bucket_name),
195
+ bucket_name=cast("str", self.bucket_name),
196
196
  bucket_key=self.bucket_key,
197
197
  wildcard_match=self.wildcard_match,
198
198
  aws_conn_id=self.aws_conn_id,
@@ -81,5 +81,6 @@ class StepFunctionExecutionSensor(AwsBaseSensor[StepFunctionHook]):
81
81
  return False
82
82
 
83
83
  self.log.info("Doing xcom_push of output")
84
- self.xcom_push(context, "output", output)
84
+
85
+ context["ti"].xcom_push(key="output", value=output)
85
86
  return True
@@ -103,7 +103,7 @@ class MongoToS3Operator(BaseOperator):
103
103
  if self.is_pipeline:
104
104
  results: CommandCursor[Any] | Cursor = MongoHook(self.mongo_conn_id).aggregate(
105
105
  mongo_collection=self.mongo_collection,
106
- aggregate_query=cast(list, self.mongo_query),
106
+ aggregate_query=cast("list", self.mongo_query),
107
107
  mongo_db=self.mongo_db,
108
108
  allowDiskUse=self.allow_disk_use,
109
109
  )
@@ -111,7 +111,7 @@ class MongoToS3Operator(BaseOperator):
111
111
  else:
112
112
  results = MongoHook(self.mongo_conn_id).find(
113
113
  mongo_collection=self.mongo_collection,
114
- query=cast(dict, self.mongo_query),
114
+ query=cast("dict", self.mongo_query),
115
115
  projection=self.mongo_projection,
116
116
  mongo_db=self.mongo_db,
117
117
  find_one=False,
@@ -223,7 +223,7 @@ class SqlToS3Operator(BaseOperator):
223
223
  return
224
224
  for group_label in (grouped_df := df.groupby(**self.groupby_kwargs)).groups:
225
225
  yield (
226
- cast(str, group_label),
226
+ cast("str", group_label),
227
227
  grouped_df.get_group(group_label)
228
228
  .drop(random_column_name, axis=1, errors="ignore")
229
229
  .reset_index(drop=True),
@@ -27,7 +27,7 @@ def get_provider_info():
27
27
  "name": "Amazon",
28
28
  "description": "Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).\n",
29
29
  "state": "ready",
30
- "source-date-epoch": 1741507721,
30
+ "source-date-epoch": 1743477760,
31
31
  "versions": [
32
32
  "9.5.0",
33
33
  "9.4.0",
@@ -1410,7 +1410,7 @@ def get_provider_info():
1410
1410
  "devel-dependencies": [
1411
1411
  "aiobotocore>=2.13.0",
1412
1412
  "aws_xray_sdk>=2.12.0",
1413
- "moto[cloudformation,glue]>=5.0.0",
1413
+ "moto[cloudformation,glue]>=5.1.2",
1414
1414
  "mypy-boto3-appflow>=1.37.0",
1415
1415
  "mypy-boto3-rds>=1.34.90",
1416
1416
  "mypy-boto3-redshift-data>=1.34.0",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 9.5.0rc2
3
+ Version: 9.5.0rc3
4
4
  Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
5
5
  Keywords: airflow-provider,amazon,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -1,13 +1,13 @@
1
1
  airflow/providers/amazon/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
2
  airflow/providers/amazon/__init__.py,sha256=0Bdu1NsCgEgYJqjhW6Wg0pguEwSl-uixdiXZ8wFdE8E,1493
3
- airflow/providers/amazon/get_provider_info.py,sha256=_iDYbqjwP6HgoGZWSu1EfvVamsLUSghhx2b17aRFNfI,72553
3
+ airflow/providers/amazon/get_provider_info.py,sha256=MMpHbbxpCZ5syMfj84qPtJw_SkXDFjBgnQw6PuuZyN8,72553
4
4
  airflow/providers/amazon/version_compat.py,sha256=aHg90_DtgoSnQvILFICexMyNlHlALBdaeWqkX3dFDug,1605
5
5
  airflow/providers/amazon/aws/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
6
6
  airflow/providers/amazon/aws/exceptions.py,sha256=uRGNMgXvgdzfphpOTiyj74lQhjzb70J-X8n6fsx5Jog,1864
7
7
  airflow/providers/amazon/aws/assets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
8
8
  airflow/providers/amazon/aws/assets/s3.py,sha256=wNaJiOM90-SCauD4EQneZVXMO54yDRjLPfI8D5o0-fw,1861
9
9
  airflow/providers/amazon/aws/auth_manager/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
10
- airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=V3u5ASC45b6_PkP8fReUVPo2SANI948ShyyYNelVhtk,15082
10
+ airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=FJMhIhoyFAxrwHKCWD-ZfoJK7TV85Geus7GMiYPnZ00,15046
11
11
  airflow/providers/amazon/aws/auth_manager/constants.py,sha256=Jdluo42InhyNGkYHB_dRtoFMpKanJLJdH0hyR9-5AZg,1050
12
12
  airflow/providers/amazon/aws/auth_manager/user.py,sha256=zds3U6gHmwAy1MuxFFPtGTYikMj-RjYVki9-TSdfnbg,2043
13
13
  airflow/providers/amazon/aws/auth_manager/avp/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -18,7 +18,7 @@ airflow/providers/amazon/aws/auth_manager/cli/__init__.py,sha256=9hdXHABrVpkbpjZ
18
18
  airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py,sha256=RJRwOnsQnYNB1s-AJuCpfEiKNZapGYPlE7PBLp_DLdA,5477
19
19
  airflow/providers/amazon/aws/auth_manager/cli/definition.py,sha256=5Uc5IuEwKnVPtrqoh_hVOPkd2iel002TdJRnchgivXo,2414
20
20
  airflow/providers/amazon/aws/auth_manager/router/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
21
- airflow/providers/amazon/aws/auth_manager/router/login.py,sha256=ig3XjgpVX2dY6l4PWuUQmMEjqIYIiBDKUkhQ5kA14CQ,5222
21
+ airflow/providers/amazon/aws/auth_manager/router/login.py,sha256=wco9S4OTdMD6EKBN3-rdkgUMBuLcU_C84oJ97OYxhQI,5273
22
22
  airflow/providers/amazon/aws/executors/Dockerfile,sha256=VZ-YOR59KSMoztJV_g7v5hUwetKR0Ii4wNNaKqDIfyQ,4275
23
23
  airflow/providers/amazon/aws/executors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
24
24
  airflow/providers/amazon/aws/executors/batch/__init__.py,sha256=TPSNZJ6E3zqN7mvdrMrarqwHeFYN9Efd2jD3hpN7tr0,970
@@ -28,7 +28,7 @@ airflow/providers/amazon/aws/executors/batch/boto_schema.py,sha256=Rqr_uk6Tx6hNV
28
28
  airflow/providers/amazon/aws/executors/batch/utils.py,sha256=QXaKyrUMCYr_Oz7Hq5b7A-gppP61fQtaOX7wip1J7ho,5274
29
29
  airflow/providers/amazon/aws/executors/ecs/__init__.py,sha256=J_B7TIPPQmn67Y7kzr4pgzcpFRr0wUp6gVsyfz5GKc4,962
30
30
  airflow/providers/amazon/aws/executors/ecs/boto_schema.py,sha256=c_2BJu6pC9xjRuPfufqSMYPZVDAbma0JO71JKSBRMSg,3760
31
- airflow/providers/amazon/aws/executors/ecs/ecs_executor.py,sha256=5N2PwqXrg3YfezJtjQDJbcc94PGxk61ohYTNuuFOn-k,25029
31
+ airflow/providers/amazon/aws/executors/ecs/ecs_executor.py,sha256=Pl_Lj6RBSadEZPiuvIysy4HSJ3oralVHSCaef_xbUm8,25029
32
32
  airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py,sha256=pJwaSJLzsflJxlB-5pveRSzXS1O8ua9Tq7_P63RrQ9I,6003
33
33
  airflow/providers/amazon/aws/executors/ecs/utils.py,sha256=wI5dVjQsqgCxq0zOnpXX1ODDG8UW52RTSfSJO3Szaqc,9761
34
34
  airflow/providers/amazon/aws/executors/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -39,17 +39,17 @@ airflow/providers/amazon/aws/fs/s3.py,sha256=dHUcbZ_uJzy0v8G5lDtUHJ-905tXdU9wRwl
39
39
  airflow/providers/amazon/aws/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
40
40
  airflow/providers/amazon/aws/hooks/appflow.py,sha256=-le6RsIMWIqTav7KGknsph9Td42znSm_eIYztxc_RsE,5263
41
41
  airflow/providers/amazon/aws/hooks/athena.py,sha256=Ad_HoITQCQdoF0EzG3U0Pl4T8nhyKmesHov74ZcMIyo,13465
42
- airflow/providers/amazon/aws/hooks/athena_sql.py,sha256=vFIUbMMTem3xvYAUTvW3h1ypjpKVLNck3VbrAlupVLA,6844
42
+ airflow/providers/amazon/aws/hooks/athena_sql.py,sha256=q0LJE4RvBgHGtTx_YkxcPZ8VFoLYDsHQnWJEbVOwEus,6844
43
43
  airflow/providers/amazon/aws/hooks/base_aws.py,sha256=ZQ2yNDWv0SEpB_6oFlyCPFU2k6Y_EzfGBwpBUUrDErE,45504
44
- airflow/providers/amazon/aws/hooks/batch_client.py,sha256=uiYog6GsEyr958OJV-xY51gxcSqpD_MELhVLpjxHTaM,21636
44
+ airflow/providers/amazon/aws/hooks/batch_client.py,sha256=aenPmoiqQcaj0IWL_1RMAbBw5Ku5mon-EwJC-3vpZ48,21616
45
45
  airflow/providers/amazon/aws/hooks/batch_waiters.json,sha256=eoN5YDgeTNZ2Xz17TrbKBPhd7z9-6KD3RhaDKXXOvqU,2511
46
- airflow/providers/amazon/aws/hooks/batch_waiters.py,sha256=VlAf3NYyGsfmOw9S4Ods8VKua3mBWSLHEAr8hHCHdmY,10579
46
+ airflow/providers/amazon/aws/hooks/batch_waiters.py,sha256=93NNCtiTWCOwNRnVJudyuzrZ1EJWp2TRpV5drQjqvMQ,10954
47
47
  airflow/providers/amazon/aws/hooks/bedrock.py,sha256=TZHEBOcDyeGSx-QLL8LydiytHEMdUETji_4emJfv4Ng,3343
48
48
  airflow/providers/amazon/aws/hooks/chime.py,sha256=XYI3YM5zQP7ogr13WVBKk84762TwJmujFTr4_5ppos8,4406
49
49
  airflow/providers/amazon/aws/hooks/cloud_formation.py,sha256=7UVGmlmrT8q8rPcEfgPURZ_n-Sd1qNVHGERAi39iqIs,3436
50
50
  airflow/providers/amazon/aws/hooks/comprehend.py,sha256=Xggr7GCReowgTAVWNXboFSGmT4r5YpMBauZVZfUWIzg,2734
51
51
  airflow/providers/amazon/aws/hooks/datasync.py,sha256=WhN1YHW0oigOpgwQqvEvzQMqW1gbRfL8-iXWZzH_bEw,13620
52
- airflow/providers/amazon/aws/hooks/dms.py,sha256=Ki7Ol-BbPyW4q35sJlyycuWRL8gBrPhuhvYgX_AwF94,14384
52
+ airflow/providers/amazon/aws/hooks/dms.py,sha256=5-V65iz8iOONb5TCLpQ5Rkge1TXBsj0BHdAH6qGo6ag,14420
53
53
  airflow/providers/amazon/aws/hooks/dynamodb.py,sha256=GpOCcNAzCsyiWXNHpmumd5XmLgUvU3GFZncKc7P-ePk,4019
54
54
  airflow/providers/amazon/aws/hooks/ec2.py,sha256=zFNv0gRkhC1zimyr9sSo6lGZoXz2chC2p7AA9p8lgHI,8100
55
55
  airflow/providers/amazon/aws/hooks/ecr.py,sha256=vTPUVCEz1x2DHA6-N0bbOcRXxP8JblXq_r0II5wtDyE,4015
@@ -72,11 +72,11 @@ airflow/providers/amazon/aws/hooks/neptune.py,sha256=a3r26msR8U5oCTMHQYqA-2OspVO
72
72
  airflow/providers/amazon/aws/hooks/opensearch_serverless.py,sha256=0zFRXXjlbQRCTt5D_q1FCp965FC8LyOhMRk2x6nvsIc,1543
73
73
  airflow/providers/amazon/aws/hooks/quicksight.py,sha256=2Am_K-BcoqcfuWwLbWjW1LsbZpGskK2bV-uHT2diu1o,7347
74
74
  airflow/providers/amazon/aws/hooks/rds.py,sha256=h7NF3GZ42RKeh70rlg2BQFVpa8vNadS37slj0MsAT3w,15211
75
- airflow/providers/amazon/aws/hooks/redshift_cluster.py,sha256=5hb3XU4f4QmbBe-RjnPVGbebCzgjIFnVXqdE-s7eL3M,8016
76
- airflow/providers/amazon/aws/hooks/redshift_data.py,sha256=DSa8iAkSWbPFnMEKZZQ_J_PPowVw97j5QHoVIlwJwqM,11863
75
+ airflow/providers/amazon/aws/hooks/redshift_cluster.py,sha256=hyX_ldjn_gNOehcPV-3M3wzO1HdIdujG8JjrUZmCnNg,7962
76
+ airflow/providers/amazon/aws/hooks/redshift_data.py,sha256=JBNmWA4QfRJeJ8WJAg8K6yzFmgg6aYWP_XaDzsJZPe4,11843
77
77
  airflow/providers/amazon/aws/hooks/redshift_sql.py,sha256=qwUHb0kxc_PUTfulCx_uWlKesCMpgAaTQ-3k80NYDfU,11178
78
78
  airflow/providers/amazon/aws/hooks/s3.py,sha256=SOeVux_NlGwUU7iKt8GWlVDumqJNyBbHiw4QQzpESW8,61891
79
- airflow/providers/amazon/aws/hooks/sagemaker.py,sha256=tSVuNHbwMLI_bpbnWV_2p6-XAtqcfl4M0TitQzCKhNk,60500
79
+ airflow/providers/amazon/aws/hooks/sagemaker.py,sha256=zaMPwCSkfaFbIO19-u3vSLVpAvw7g7eKUn81oUhWuIc,60502
80
80
  airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py,sha256=a8nuZFp7CNjgKgGP3IsWpPGnkHEjAWCyNE55WvxmNEw,8002
81
81
  airflow/providers/amazon/aws/hooks/secrets_manager.py,sha256=6srh3jUeSGoqyrSj1M6aSOaA9xT5kna0VGUC0kzH-q0,2690
82
82
  airflow/providers/amazon/aws/hooks/ses.py,sha256=DuCJwFhtg3D3mu0RSjRrebyUpwBAhrWSr-kdu8VR9qU,4174
@@ -87,7 +87,7 @@ airflow/providers/amazon/aws/hooks/step_function.py,sha256=TSmPPF-CFR76a-K9f2yGt
87
87
  airflow/providers/amazon/aws/hooks/sts.py,sha256=6KYyou-tOhbGhRfnGHH95TUi3ENNHkISUJf0nskmuiw,1827
88
88
  airflow/providers/amazon/aws/hooks/verified_permissions.py,sha256=-5vLcpBX_V43tY37a5PpeC60DIUAa2AXBe3sSxpqlGY,1799
89
89
  airflow/providers/amazon/aws/links/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
90
- airflow/providers/amazon/aws/links/athena.py,sha256=bf__mn0W1H0x1EVJBFevf-ap1GDpuiVDuhw4mnEkr9k,1235
90
+ airflow/providers/amazon/aws/links/athena.py,sha256=3fEukB9R59pm_lXKHMTCVG4dPaOmllJ-v-Ri3KDGlc0,1224
91
91
  airflow/providers/amazon/aws/links/base_aws.py,sha256=bYhgMtBxGRq0QwaaVndceCOdxCJep56X1FVb7QuXQLw,3372
92
92
  airflow/providers/amazon/aws/links/batch.py,sha256=-bnVCTEjgXrLOxvdz2mwmG0wIVAIzlaqvKMq0dJyxqM,1770
93
93
  airflow/providers/amazon/aws/links/comprehend.py,sha256=59PtBlRkzXE4B2jbGZ3ip-WEFRStlMkXyWNPFxnTu-M,1636
@@ -100,8 +100,8 @@ airflow/providers/amazon/aws/links/sagemaker.py,sha256=RTQubIIpmjTWEGrJiRI2MyF4C
100
100
  airflow/providers/amazon/aws/links/sagemaker_unified_studio.py,sha256=pHbO14OmkqqjrjnZpt2tO3LISdBbitd9E00DV3ucfTI,1202
101
101
  airflow/providers/amazon/aws/links/step_function.py,sha256=xSL4vfKLnCn-QboRtruajpH5elRrNfw0XkY7eSfPpE4,2099
102
102
  airflow/providers/amazon/aws/log/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
103
- airflow/providers/amazon/aws/log/cloudwatch_task_handler.py,sha256=11iFc08HE6bvLWsGCTi0RoiYsSziEXWFEjvCwDJ4liw,6806
104
- airflow/providers/amazon/aws/log/s3_task_handler.py,sha256=oBBmgQrrpE52EBVxDthon1sDk7_vWbS0BjJ0R1_n3Gc,8501
103
+ airflow/providers/amazon/aws/log/cloudwatch_task_handler.py,sha256=3Fr-brr4KAHUsXvuQog8OiKRH-FBs1MAT24AF6pDaqI,11184
104
+ airflow/providers/amazon/aws/log/s3_task_handler.py,sha256=olDxljlgMuEux_4Mr8WQSQKcMcV_OmURm0hWDDDn5Bc,9666
105
105
  airflow/providers/amazon/aws/notifications/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
106
106
  airflow/providers/amazon/aws/notifications/chime.py,sha256=bpaQtR5IG4ZlBmTdlfrHOZQo4WSQYs_HRm4DWDCkFWE,2127
107
107
  airflow/providers/amazon/aws/notifications/sns.py,sha256=XracHC3r3BxzUuv-DzFLy6l7K6R_Ps85oJIUS0-Lkt4,3116
@@ -116,9 +116,9 @@ airflow/providers/amazon/aws/operators/cloud_formation.py,sha256=8apqdOR9KpAvtep
116
116
  airflow/providers/amazon/aws/operators/comprehend.py,sha256=5eQEJSoOQKcwVPa8Qmp7_VX4-uBsBLziJi35iAg029U,17695
117
117
  airflow/providers/amazon/aws/operators/datasync.py,sha256=mpq3-JcF7tqEm3BpwnEd3qemvTVVyWyq40DUaHrpne8,20321
118
118
  airflow/providers/amazon/aws/operators/dms.py,sha256=20xp5mn5AM-aNPIQZdsMU6x-16so8o6vKR-JKS6co3o,34746
119
- airflow/providers/amazon/aws/operators/ec2.py,sha256=mq16s2CJaPpnYWhjNhRK-uAnIYYqy6eeJBB8JOryP-M,19699
119
+ airflow/providers/amazon/aws/operators/ec2.py,sha256=SclBzOLo3GbQe3kw4S3MKf8zLm8IaKNSiGTc_U-OxRo,19700
120
120
  airflow/providers/amazon/aws/operators/ecs.py,sha256=wsIpsPkacYIwE-ZUfgZ0fHOso7-RkUpYecxIEOC-Fxw,33277
121
- airflow/providers/amazon/aws/operators/eks.py,sha256=0NuCVQZNX3D7Ooco_vLGACniJGld-aueUloyCjUsTuc,49218
121
+ airflow/providers/amazon/aws/operators/eks.py,sha256=RzUiiPjyTbV50nfTth4JViglBxMNzqEjuw0PN64AXPA,49224
122
122
  airflow/providers/amazon/aws/operators/emr.py,sha256=MtfAOg2WqCiJskkClPkHBBDWIE-3h4FVjOej7t6x3CE,73205
123
123
  airflow/providers/amazon/aws/operators/eventbridge.py,sha256=NacTdvRzZZFizSzC3rb0Z7g8dHQWkKQEXGYzFKOp3fc,10421
124
124
  airflow/providers/amazon/aws/operators/glacier.py,sha256=6TFC07B0EOmtRxLs7Bok4jwV84po2yVDa-DnlbnAOVg,3681
@@ -131,10 +131,10 @@ airflow/providers/amazon/aws/operators/mwaa.py,sha256=zVJREj9n5kSeY6GkltEAMzILZ5
131
131
  airflow/providers/amazon/aws/operators/neptune.py,sha256=3OzRFinRJahs1G3GZfez9cifYvmIEa5hYVDNzRGApxk,14778
132
132
  airflow/providers/amazon/aws/operators/quicksight.py,sha256=LNZRW8N4yIXLUgVb0vmJekjG1NFS70yGyeKtinNATMk,4116
133
133
  airflow/providers/amazon/aws/operators/rds.py,sha256=ziuipADQzY7JyDHR5-O8YfATyw1rYD-65iezJoPDji4,38583
134
- airflow/providers/amazon/aws/operators/redshift_cluster.py,sha256=h02aYZ-XxYJIGScYLCsETu6g3BzQQcHhZyOF3MEQ9to,36858
134
+ airflow/providers/amazon/aws/operators/redshift_cluster.py,sha256=dXakMyZV5vvEh0-20FOolMU5xEDugnMrvwwbLvuYc3o,37168
135
135
  airflow/providers/amazon/aws/operators/redshift_data.py,sha256=atZty5-rEk-GyhTqyQlwGnW9LznPQ65y_IAYCt3Kyuo,10872
136
136
  airflow/providers/amazon/aws/operators/s3.py,sha256=0lhbwRF4RQgKUag6w6Q_kKLeZIrsl8A1p9IOsOxtgoQ,38493
137
- airflow/providers/amazon/aws/operators/sagemaker.py,sha256=Gj04hUbF_gZLKf5wTtqSG64V4sxqQVNJnazYQa_mw8E,83772
137
+ airflow/providers/amazon/aws/operators/sagemaker.py,sha256=uC3t4UIoGMbD0owdqYkxEetdxJ2lyf1o7CqkiaGAN6c,83731
138
138
  airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py,sha256=J-huObn3pZ_fg0gEy-BLsX298CX_n7qWV2YwjfpFnrw,6867
139
139
  airflow/providers/amazon/aws/operators/sns.py,sha256=uVcSJBbqy7YCOeiCrMvFFn9F9xTzMRpfrEygqEIhWEM,3757
140
140
  airflow/providers/amazon/aws/operators/sqs.py,sha256=o9rH2Pm5DNmccLh5I2wr96hZiuxOPi6YGZ2QluOeVb0,4764
@@ -155,22 +155,22 @@ airflow/providers/amazon/aws/sensors/ec2.py,sha256=wsCf7g1vvIgcVGzj17mgd_4XIttYF
155
155
  airflow/providers/amazon/aws/sensors/ecs.py,sha256=0O7N9x5SJNYHdcuQP1QnNJWdZ_-cX7j0ntwnLcFztE4,6863
156
156
  airflow/providers/amazon/aws/sensors/eks.py,sha256=beq1AcFgLwI-DfBsvFTbDExrM58cbL-ibjB_8DW-UG8,9658
157
157
  airflow/providers/amazon/aws/sensors/emr.py,sha256=4F9x2eRFdlQ3Whucy_3Gjyx_q1Hdfj0yDVnAJgr7kG8,24752
158
- airflow/providers/amazon/aws/sensors/glacier.py,sha256=yJBWQzY9QwjeCJGfnSd4KcaInXUQPf3nugreAQwmIYg,4078
158
+ airflow/providers/amazon/aws/sensors/glacier.py,sha256=yChjaXawHPGytKp84PvAsdplLGTeE6hLrLgLILCxlNU,4078
159
159
  airflow/providers/amazon/aws/sensors/glue.py,sha256=pyx6oaEzB9755vMOeHRYIQ2hWR_7IAFDZ_WOTidu8ww,14267
160
160
  airflow/providers/amazon/aws/sensors/glue_catalog_partition.py,sha256=YXvkQRnu5TnM2E8Tfi5B_0STqRUvRE9pgdoluz8oNCw,5831
161
161
  airflow/providers/amazon/aws/sensors/glue_crawler.py,sha256=KWU2mXCCqcaVNXL8ZmDJRScLt9u-pQLQubA2fm_75Ls,3415
162
162
  airflow/providers/amazon/aws/sensors/kinesis_analytics.py,sha256=TDsQKi5nx10CgMoTSVbYRo4m-PiKFDhyhnO7dQZSnuI,9933
163
163
  airflow/providers/amazon/aws/sensors/lambda_function.py,sha256=kO4UyrEMaMYSYbQyBN3F2eoysze_kIYCbMaF4tqiKo0,3287
164
- airflow/providers/amazon/aws/sensors/mwaa.py,sha256=2yO4Rz1KmydqsacPNOBaSDKkS5oIcsi7Nw5P4HCtGpk,7369
164
+ airflow/providers/amazon/aws/sensors/mwaa.py,sha256=FK6qPOkV6fZKt4-3ayUyr9O8Q-RZVUphL5a3wIcVHVw,7474
165
165
  airflow/providers/amazon/aws/sensors/opensearch_serverless.py,sha256=cSaZvCvAC7zhFqBYNympTiQHtgCZ7srC5-TrbS4l2GQ,5508
166
166
  airflow/providers/amazon/aws/sensors/quicksight.py,sha256=lm1omzh01BKh0KHU3g2I1yH9LAXtddUDiuIS3uIeOrE,3575
167
167
  airflow/providers/amazon/aws/sensors/rds.py,sha256=QUIBZYaWv4Sjt3xLp5scF9uhKDjNnhxLDKewAuCjUmo,6901
168
168
  airflow/providers/amazon/aws/sensors/redshift_cluster.py,sha256=K_z_nTuYiECuR8vxPD4EcHhb8-d6dWqUSz6q1Zuy9wc,4088
169
- airflow/providers/amazon/aws/sensors/s3.py,sha256=ElmwY-NHvqWWg4D20V5LKnrlR1JPll78tJoYRo7OUt4,17328
169
+ airflow/providers/amazon/aws/sensors/s3.py,sha256=LbGXzxycC16QXVmQYeEag3X8_0JgMSnrKbN76BJjhe0,17330
170
170
  airflow/providers/amazon/aws/sensors/sagemaker.py,sha256=nR32E1qKl9X61W52fC5FVB6ZQKb4gZVgxoMDimvXYhQ,13661
171
171
  airflow/providers/amazon/aws/sensors/sagemaker_unified_studio.py,sha256=9XHmIBjPtYb9QDJuZVOFrDwY1DKYgwW6M7ThEb9YdnI,2893
172
172
  airflow/providers/amazon/aws/sensors/sqs.py,sha256=gsHs_cJdK2E_Bd8TN34GC6W4SlHJQTLv72gz_n4uf4o,10690
173
- airflow/providers/amazon/aws/sensors/step_function.py,sha256=uauhkuYOUPP7PyLF7HwLiKZ5qpSxeC-36ekycp4AiHc,3596
173
+ airflow/providers/amazon/aws/sensors/step_function.py,sha256=gaklKHdfmE-9avKSmyuGYvv9CuSklpjPz4KXZI8wXnY,3607
174
174
  airflow/providers/amazon/aws/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
175
175
  airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py,sha256=4aArk3bVZorDrxp5TXew_zVB4TD3avtGKfjta61Mi40,7248
176
176
  airflow/providers/amazon/aws/transfers/base.py,sha256=hA72zMl4E_rXhmQZ1MwhC8AhM5rfmeHqNEOUNZrNDpk,2415
@@ -184,7 +184,7 @@ airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py,sha256=zG9Af5Ztema452
184
184
  airflow/providers/amazon/aws/transfers/http_to_s3.py,sha256=J1HjIdGsd4Zl8kk-RJAX6uQPXTotmjLVKed4JoSSPWo,7538
185
185
  airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py,sha256=LAZKq20An7A1m-J_5rqNOaoPcg8xzD3Mkc63oOPG1bk,4563
186
186
  airflow/providers/amazon/aws/transfers/local_to_s3.py,sha256=FHMOACqjPeHRV85vAQgSZ6ncSPPj_mML9CyE7O3PIfk,4192
187
- airflow/providers/amazon/aws/transfers/mongo_to_s3.py,sha256=VgRUftpTdHGUQjJWDWU8d8lUKUq1dtHL72D9L8bAcVA,6047
187
+ airflow/providers/amazon/aws/transfers/mongo_to_s3.py,sha256=09wAN5426QmazvA0kB-HkwJ2d91faQOoe9Hcn1Y1dP8,6051
188
188
  airflow/providers/amazon/aws/transfers/redshift_to_s3.py,sha256=rixW1f1rVW4tLOjkqJiJap45eZQtL0ftw2M4kpGewCA,13747
189
189
  airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py,sha256=XYwswE5-CT8zwkJPVUBJ87McePsDDZAUmeVGNG2CsmU,11681
190
190
  airflow/providers/amazon/aws/transfers/s3_to_ftp.py,sha256=_G-aRJ4JgQUNXUGmfAXeZ2xfAH64vtDISLjTPpWOTGA,2993
@@ -193,7 +193,7 @@ airflow/providers/amazon/aws/transfers/s3_to_sftp.py,sha256=gYT0iG9pbL36ObnjzKm_
193
193
  airflow/providers/amazon/aws/transfers/s3_to_sql.py,sha256=shkNpAoNgKJ3fBEBg7ZQaU8zMa5WjZuZV4eihxh2uLQ,4976
194
194
  airflow/providers/amazon/aws/transfers/salesforce_to_s3.py,sha256=WbCZUa9gfQB1SjDfUfPw5QO8lZ8Q-vSLriTnpXLhvxs,5713
195
195
  airflow/providers/amazon/aws/transfers/sftp_to_s3.py,sha256=-D5AR306Q8710e4dHao75CMGS7InHernCH_aZsE6Je4,4209
196
- airflow/providers/amazon/aws/transfers/sql_to_s3.py,sha256=vQJ-s9RYMiZQ_qAV87NVTK6mrxtwbrsfKq0sCYSDnfQ,10618
196
+ airflow/providers/amazon/aws/transfers/sql_to_s3.py,sha256=Wq0mqKyCdk7fDAFFV76tQnBzJTQbKAMTy9jP9O8qn38,10620
197
197
  airflow/providers/amazon/aws/triggers/README.md,sha256=ax2F0w2CuQSDN4ghJADozrrv5W4OeCDPA8Vzp00BXOU,10919
198
198
  airflow/providers/amazon/aws/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
199
199
  airflow/providers/amazon/aws/triggers/athena.py,sha256=62ty40zejcm5Y0d1rTQZuYzSjq3hUkmAs0d_zxM_Kjw,2596
@@ -265,7 +265,7 @@ airflow/providers/amazon/aws/waiters/rds.json,sha256=HNmNQm5J-VaFHzjWb1pE5P7-Ix-
265
265
  airflow/providers/amazon/aws/waiters/redshift.json,sha256=jOBotCgbkko1b_CHcGEbhhRvusgt0YSzVuFiZrqVP30,1742
266
266
  airflow/providers/amazon/aws/waiters/sagemaker.json,sha256=JPHuQtUFZ1B7EMLfVmCRevNZ9jgpB71LM0dva8ZEO9A,5254
267
267
  airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=GsOH-emGerKGBAUFmI5lpMfNGH4c0ol_PSiea25DCEY,1033
268
- apache_airflow_providers_amazon-9.5.0rc2.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
269
- apache_airflow_providers_amazon-9.5.0rc2.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
270
- apache_airflow_providers_amazon-9.5.0rc2.dist-info/METADATA,sha256=knQ6EiNVlXIPWpxg8D0rK0FFqwQkV0Epn7qXL4r7hik,9802
271
- apache_airflow_providers_amazon-9.5.0rc2.dist-info/RECORD,,
268
+ apache_airflow_providers_amazon-9.5.0rc3.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
269
+ apache_airflow_providers_amazon-9.5.0rc3.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
270
+ apache_airflow_providers_amazon-9.5.0rc3.dist-info/METADATA,sha256=AgsnEH525W9ia-ldkgPRt1_KRBwrZ2VBOlBFwKJOhK8,9802
271
+ apache_airflow_providers_amazon-9.5.0rc3.dist-info/RECORD,,