apache-airflow-providers-amazon 9.10.0rc1__py3-none-any.whl → 9.11.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. airflow/providers/amazon/__init__.py +1 -1
  2. airflow/providers/amazon/aws/executors/aws_lambda/lambda_executor.py +37 -6
  3. airflow/providers/amazon/aws/hooks/base_aws.py +5 -5
  4. airflow/providers/amazon/aws/hooks/ec2.py +1 -2
  5. airflow/providers/amazon/aws/hooks/ecs.py +1 -11
  6. airflow/providers/amazon/aws/hooks/eks.py +2 -2
  7. airflow/providers/amazon/aws/hooks/redshift_cluster.py +4 -4
  8. airflow/providers/amazon/aws/hooks/redshift_sql.py +1 -1
  9. airflow/providers/amazon/aws/hooks/s3.py +8 -9
  10. airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +3 -13
  11. airflow/providers/amazon/aws/notifications/chime.py +1 -1
  12. airflow/providers/amazon/aws/operators/appflow.py +2 -2
  13. airflow/providers/amazon/aws/operators/athena.py +1 -1
  14. airflow/providers/amazon/aws/operators/datasync.py +2 -2
  15. airflow/providers/amazon/aws/operators/ecs.py +1 -8
  16. airflow/providers/amazon/aws/operators/emr.py +12 -10
  17. airflow/providers/amazon/aws/operators/sagemaker.py +2 -3
  18. airflow/providers/amazon/aws/operators/ssm.py +131 -0
  19. airflow/providers/amazon/aws/sensors/ssm.py +127 -0
  20. airflow/providers/amazon/aws/transfers/sql_to_s3.py +10 -3
  21. airflow/providers/amazon/aws/triggers/opensearch_serverless.py +1 -1
  22. airflow/providers/amazon/aws/triggers/ssm.py +86 -0
  23. airflow/providers/amazon/aws/utils/suppress.py +1 -3
  24. airflow/providers/amazon/get_provider_info.py +13 -0
  25. airflow/providers/amazon/version_compat.py +1 -1
  26. {apache_airflow_providers_amazon-9.10.0rc1.dist-info → apache_airflow_providers_amazon-9.11.0rc1.dist-info}/METADATA +13 -11
  27. {apache_airflow_providers_amazon-9.10.0rc1.dist-info → apache_airflow_providers_amazon-9.11.0rc1.dist-info}/RECORD +29 -26
  28. {apache_airflow_providers_amazon-9.10.0rc1.dist-info → apache_airflow_providers_amazon-9.11.0rc1.dist-info}/WHEEL +0 -0
  29. {apache_airflow_providers_amazon-9.10.0rc1.dist-info → apache_airflow_providers_amazon-9.11.0rc1.dist-info}/entry_points.txt +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "9.10.0"
32
+ __version__ = "9.11.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -362,26 +362,57 @@ class AwsLambdaExecutor(BaseExecutor):
362
362
  MaxNumberOfMessages=10,
363
363
  )
364
364
 
365
+ # Pagination? Maybe we don't need it. But we don't always delete messages after viewing them so we
366
+ # could possibly accumulate a lot of messages in the queue and get stuck if we don't read bigger
367
+ # chunks and paginate.
365
368
  messages = response.get("Messages", [])
366
- # Pagination? Maybe we don't need it. Since we always delete messages after looking at them.
367
- # But then that may delete messages that could have been adopted. Let's leave it for now and see how it goes.
369
+ # The keys that we validate in the messages below will be different depending on whether or not
370
+ # the message is from the dead letter queue or the main results queue.
371
+ message_keys = ("return_code", "task_key")
368
372
  if messages and queue_url == self.dlq_url:
369
373
  self.log.warning("%d messages received from the dead letter queue", len(messages))
374
+ message_keys = ("command", "task_key")
370
375
 
371
376
  for message in messages:
377
+ delete_message = False
372
378
  receipt_handle = message["ReceiptHandle"]
373
- body = json.loads(message["Body"])
379
+ try:
380
+ body = json.loads(message["Body"])
381
+ except json.JSONDecodeError:
382
+ self.log.warning(
383
+ "Received a message from the queue that could not be parsed as JSON: %s",
384
+ message["Body"],
385
+ )
386
+ delete_message = True
387
+ # If the message is not already marked for deletion, check if it has the required keys.
388
+ if not delete_message and not all(key in body for key in message_keys):
389
+ self.log.warning(
390
+ "Message is not formatted correctly, %s and/or %s are missing: %s", *message_keys, body
391
+ )
392
+ delete_message = True
393
+ if delete_message:
394
+ self.log.warning("Deleting the message to avoid processing it again.")
395
+ self.sqs_client.delete_message(QueueUrl=queue_url, ReceiptHandle=receipt_handle)
396
+ continue
374
397
  return_code = body.get("return_code")
375
398
  ser_task_key = body.get("task_key")
376
399
  # Fetch the real task key from the running_tasks dict, using the serialized task key.
377
400
  try:
378
401
  task_key = self.running_tasks[ser_task_key]
379
402
  except KeyError:
380
- self.log.warning(
381
- "Received task %s from the queue which is not found in running tasks. Removing message.",
403
+ self.log.debug(
404
+ "Received task %s from the queue which is not found in running tasks, it is likely "
405
+ "from another Lambda Executor sharing this queue or might be a stale message that needs "
406
+ "deleting manually. Marking the message as visible again.",
382
407
  ser_task_key,
383
408
  )
384
- task_key = None
409
+ # Mark task as visible again in SQS so that another executor can pick it up.
410
+ self.sqs_client.change_message_visibility(
411
+ QueueUrl=queue_url,
412
+ ReceiptHandle=receipt_handle,
413
+ VisibilityTimeout=0,
414
+ )
415
+ continue
385
416
 
386
417
  if task_key:
387
418
  if return_code == 0:
@@ -74,7 +74,7 @@ from airflow.utils.log.logging_mixin import LoggingMixin
74
74
  # If we change to Union syntax then mypy is not happy with UP007 Use `X | Y` for type annotations
75
75
  # The only way to workaround it for now is to keep the union syntax with ignore for mypy
76
76
  # We should try to resolve this later.
77
- BaseAwsConnection = TypeVar("BaseAwsConnection", bound=Union[BaseClient, ServiceResource]) # type: ignore[operator] # noqa: UP007
77
+ BaseAwsConnection = TypeVar("BaseAwsConnection", bound=Union[BaseClient, ServiceResource]) # noqa: UP007
78
78
 
79
79
 
80
80
  if AIRFLOW_V_3_0_PLUS:
@@ -636,7 +636,7 @@ class AwsGenericHook(BaseHook, Generic[BaseAwsConnection]):
636
636
  raise
637
637
 
638
638
  return AwsConnectionWrapper(
639
- conn=connection, # type: ignore[arg-type]
639
+ conn=connection,
640
640
  region_name=self._region_name,
641
641
  botocore_config=self._config,
642
642
  verify=self._verify,
@@ -718,10 +718,10 @@ class AwsGenericHook(BaseHook, Generic[BaseAwsConnection]):
718
718
  # because the user_agent_extra field is generated at runtime.
719
719
  user_agent_config = Config(
720
720
  user_agent_extra=self._generate_user_agent_extra_field(
721
- existing_user_agent_extra=config.user_agent_extra # type: ignore[union-attr]
721
+ existing_user_agent_extra=config.user_agent_extra
722
722
  )
723
723
  )
724
- return config.merge(user_agent_config) # type: ignore[union-attr]
724
+ return config.merge(user_agent_config)
725
725
 
726
726
  def get_client_type(
727
727
  self,
@@ -1050,7 +1050,7 @@ class AwsGenericHook(BaseHook, Generic[BaseAwsConnection]):
1050
1050
  return WaiterModel(model_config).waiter_names
1051
1051
 
1052
1052
 
1053
- class AwsBaseHook(AwsGenericHook[Union[boto3.client, boto3.resource]]): # type: ignore[operator] # noqa: UP007
1053
+ class AwsBaseHook(AwsGenericHook[Union[boto3.client, boto3.resource]]): # noqa: UP007
1054
1054
  """
1055
1055
  Base class for interact with AWS.
1056
1056
 
@@ -20,11 +20,10 @@ from __future__ import annotations
20
20
  import functools
21
21
  import time
22
22
  from collections.abc import Callable
23
- from typing import TypeVar
23
+ from typing import ParamSpec, TypeVar
24
24
 
25
25
  from airflow.exceptions import AirflowException
26
26
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
27
- from airflow.typing_compat import ParamSpec
28
27
 
29
28
  PS = ParamSpec("PS")
30
29
  RT = TypeVar("RT")
@@ -19,7 +19,7 @@ from __future__ import annotations
19
19
 
20
20
  from typing import TYPE_CHECKING, Protocol, runtime_checkable
21
21
 
22
- from airflow.providers.amazon.aws.exceptions import EcsOperatorError, EcsTaskFailToStart
22
+ from airflow.providers.amazon.aws.exceptions import EcsOperatorError
23
23
  from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
24
24
  from airflow.providers.amazon.aws.utils import _StringCompareEnum
25
25
 
@@ -38,16 +38,6 @@ def should_retry(exception: Exception):
38
38
  return False
39
39
 
40
40
 
41
- def should_retry_eni(exception: Exception):
42
- """Check if exception is related to ENI (Elastic Network Interfaces)."""
43
- if isinstance(exception, EcsTaskFailToStart):
44
- return any(
45
- eni_reason in exception.message
46
- for eni_reason in ["network interface provisioning", "ResourceInitializationError"]
47
- )
48
- return False
49
-
50
-
51
41
  class EcsClusterStates(_StringCompareEnum):
52
42
  """Contains the possible State values of an ECS Cluster."""
53
43
 
@@ -100,7 +100,7 @@ COMMAND = """
100
100
 
101
101
  json_string=$(printf '{{"kind": "ExecCredential","apiVersion": \
102
102
  "client.authentication.k8s.io/v1alpha1","spec": {{}},"status": \
103
- {{"expirationTimestamp": "%s","token": "%s"}}}}' "$expiration_timestamp" "$token")
103
+ {{"expirationTimestamp": "%s","token": "%s"}}}}' "$timestamp" "$token")
104
104
  echo $json_string
105
105
  """
106
106
 
@@ -523,7 +523,7 @@ class EksHook(AwsBaseHook):
523
523
  :return: A List of the combined results of the provided API call.
524
524
  """
525
525
  name_collection: list = []
526
- token = DEFAULT_PAGINATION_TOKEN
526
+ token: str | None = DEFAULT_PAGINATION_TOKEN
527
527
 
528
528
  while token is not None:
529
529
  response = api_call(nextToken=token)
@@ -77,7 +77,7 @@ class RedshiftHook(AwsBaseHook):
77
77
  return response
78
78
 
79
79
  # TODO: Wrap create_cluster_snapshot
80
- def cluster_status(self, cluster_identifier: str) -> str:
80
+ def cluster_status(self, cluster_identifier: str) -> str | None:
81
81
  """
82
82
  Get status of a cluster.
83
83
 
@@ -92,7 +92,7 @@ class RedshiftHook(AwsBaseHook):
92
92
  except self.conn.exceptions.ClusterNotFoundFault:
93
93
  return "cluster_not_found"
94
94
 
95
- async def cluster_status_async(self, cluster_identifier: str) -> str:
95
+ async def cluster_status_async(self, cluster_identifier: str) -> str | None:
96
96
  async with await self.get_async_conn() as client:
97
97
  response = await client.describe_clusters(ClusterIdentifier=cluster_identifier)
98
98
  return response["Clusters"][0]["ClusterStatus"] if response else None
@@ -139,7 +139,7 @@ class RedshiftHook(AwsBaseHook):
139
139
  snapshots.sort(key=lambda x: x["SnapshotCreateTime"], reverse=True)
140
140
  return snapshots
141
141
 
142
- def restore_from_cluster_snapshot(self, cluster_identifier: str, snapshot_identifier: str) -> str:
142
+ def restore_from_cluster_snapshot(self, cluster_identifier: str, snapshot_identifier: str) -> dict | None:
143
143
  """
144
144
  Restore a cluster from its snapshot.
145
145
 
@@ -160,7 +160,7 @@ class RedshiftHook(AwsBaseHook):
160
160
  cluster_identifier: str,
161
161
  retention_period: int = -1,
162
162
  tags: list[Any] | None = None,
163
- ) -> str:
163
+ ) -> dict | None:
164
164
  """
165
165
  Create a snapshot of a cluster.
166
166
 
@@ -74,7 +74,7 @@ class RedshiftSQLHook(DbApiHook):
74
74
 
75
75
  @cached_property
76
76
  def conn(self):
77
- return self.get_connection(self.redshift_conn_id) # type: ignore[attr-defined]
77
+ return self.get_connection(self.get_conn_id())
78
78
 
79
79
  def _get_conn_params(self) -> dict[str, str | int]:
80
80
  """Retrieve connection parameters."""
@@ -1565,15 +1565,14 @@ class S3Hook(AwsBaseHook):
1565
1565
  else:
1566
1566
  file = NamedTemporaryFile(dir=local_path, prefix="airflow_tmp_", delete=False) # type: ignore
1567
1567
 
1568
- with file:
1569
- extra_args = {**self.extra_args}
1570
- if self._requester_pays:
1571
- extra_args["RequestPayer"] = "requester"
1572
- s3_obj.download_fileobj(
1573
- file,
1574
- ExtraArgs=extra_args,
1575
- Config=self.transfer_config,
1576
- )
1568
+ extra_args = {**self.extra_args}
1569
+ if self._requester_pays:
1570
+ extra_args["RequestPayer"] = "requester"
1571
+ s3_obj.download_fileobj(
1572
+ file,
1573
+ ExtraArgs=extra_args,
1574
+ Config=self.transfer_config,
1575
+ )
1577
1576
  get_hook_lineage_collector().add_input_asset(
1578
1577
  context=self, scheme="s3", asset_kwargs={"bucket": bucket_name, "key": key}
1579
1578
  )
@@ -33,7 +33,6 @@ import watchtower
33
33
  from airflow.configuration import conf
34
34
  from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
35
35
  from airflow.providers.amazon.aws.utils import datetime_to_epoch_utc_ms
36
- from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
37
36
  from airflow.utils.log.file_task_handler import FileTaskHandler
38
37
  from airflow.utils.log.logging_mixin import LoggingMixin
39
38
 
@@ -170,15 +169,7 @@ class CloudWatchRemoteLogIO(LoggingMixin): # noqa: D101
170
169
  f"Reading remote log from Cloudwatch log_group: {self.log_group} log_stream: {relative_path}"
171
170
  ]
172
171
  try:
173
- if AIRFLOW_V_3_0_PLUS:
174
- from airflow.utils.log.file_task_handler import StructuredLogMessage
175
-
176
- logs = [
177
- StructuredLogMessage.model_validate(log)
178
- for log in self.get_cloudwatch_logs(relative_path, ti)
179
- ]
180
- else:
181
- logs = [self.get_cloudwatch_logs(relative_path, ti)] # type: ignore[arg-value]
172
+ logs = [self.get_cloudwatch_logs(relative_path, ti)]
182
173
  except Exception as e:
183
174
  logs = None
184
175
  messages.append(str(e))
@@ -206,8 +197,6 @@ class CloudWatchRemoteLogIO(LoggingMixin): # noqa: D101
206
197
  log_stream_name=stream_name,
207
198
  end_time=end_time,
208
199
  )
209
- if AIRFLOW_V_3_0_PLUS:
210
- return list(self._event_to_dict(e) for e in events)
211
200
  return "\n".join(self._event_to_str(event) for event in events)
212
201
 
213
202
  def _event_to_dict(self, event: dict) -> dict:
@@ -222,7 +211,8 @@ class CloudWatchRemoteLogIO(LoggingMixin): # noqa: D101
222
211
 
223
212
  def _event_to_str(self, event: dict) -> str:
224
213
  event_dt = datetime.fromtimestamp(event["timestamp"] / 1000.0, tz=timezone.utc)
225
- formatted_event_dt = event_dt.strftime("%Y-%m-%d %H:%M:%S,%f")[:-3]
214
+ # Format a datetime object to a string in Zulu time without milliseconds.
215
+ formatted_event_dt = event_dt.strftime("%Y-%m-%dT%H:%M:%SZ")
226
216
  message = event["message"]
227
217
  return f"[{formatted_event_dt}] {message}"
228
218
 
@@ -49,7 +49,7 @@ class ChimeNotifier(BaseNotifier):
49
49
  """To reduce overhead cache the hook for the notifier."""
50
50
  return ChimeWebhookHook(chime_conn_id=self.chime_conn_id)
51
51
 
52
- def notify(self, context: Context) -> None: # type: ignore[override]
52
+ def notify(self, context: Context) -> None:
53
53
  """Send a message to a Chime Chat Room."""
54
54
  self.hook.send_message(message=self.message)
55
55
 
@@ -468,7 +468,7 @@ class AppflowRecordsShortCircuitOperator(ShortCircuitOperator, AwsBaseHookMixin[
468
468
  self.log.info("flow_name: %s", flow_name)
469
469
  af_client = self.hook.conn
470
470
  task_instance = kwargs["task_instance"]
471
- execution_id = task_instance.xcom_pull(task_ids=appflow_task_id, key="execution_id") # type: ignore
471
+ execution_id = task_instance.xcom_pull(task_ids=appflow_task_id, key="execution_id")
472
472
  if not execution_id:
473
473
  raise AirflowException(f"No execution_id found from task_id {appflow_task_id}!")
474
474
  self.log.info("execution_id: %s", execution_id)
@@ -494,5 +494,5 @@ class AppflowRecordsShortCircuitOperator(ShortCircuitOperator, AwsBaseHookMixin[
494
494
  raise AirflowException(f"Flow ({execution_id}) without recordsProcessed info!")
495
495
  records_processed = execution["recordsProcessed"]
496
496
  self.log.info("records_processed: %d", records_processed)
497
- task_instance.xcom_push("records_processed", records_processed) # type: ignore
497
+ task_instance.xcom_push("records_processed", records_processed)
498
498
  return records_processed > 0
@@ -239,7 +239,7 @@ class AthenaOperator(AwsBaseOperator[AthenaHook]):
239
239
  run_facets: dict[str, BaseFacet] = {}
240
240
  if parse_result.errors:
241
241
  run_facets["extractionError"] = ExtractionErrorRunFacet(
242
- totalTasks=len(self.query) if isinstance(self.query, list) else 1,
242
+ totalTasks=1,
243
243
  failedTasks=len(parse_result.errors),
244
244
  errors=[
245
245
  Error(
@@ -362,7 +362,7 @@ class DataSyncOperator(AwsBaseOperator[DataSyncHook]):
362
362
  aws_domain=DataSyncTaskExecutionLink.get_aws_domain(self.hook.conn_partition),
363
363
  region_name=self.hook.conn_region_name,
364
364
  task_id=self.task_arn.split("/")[-1],
365
- task_execution_id=self.task_execution_arn.split("/")[-1], # type: ignore[union-attr]
365
+ task_execution_id=self.task_execution_arn.split("/")[-1],
366
366
  )
367
367
  DataSyncTaskExecutionLink.persist(
368
368
  context=context,
@@ -370,7 +370,7 @@ class DataSyncOperator(AwsBaseOperator[DataSyncHook]):
370
370
  region_name=self.hook.conn_region_name,
371
371
  aws_partition=self.hook.conn_partition,
372
372
  task_id=self.task_arn.split("/")[-1],
373
- task_execution_id=self.task_execution_arn.split("/")[-1], # type: ignore[union-attr]
373
+ task_execution_id=self.task_execution_arn.split("/")[-1],
374
374
  )
375
375
 
376
376
  self.log.info("You can view this DataSync task execution at %s", execution_url)
@@ -26,8 +26,7 @@ from typing import TYPE_CHECKING, Any
26
26
  from airflow.configuration import conf
27
27
  from airflow.exceptions import AirflowException
28
28
  from airflow.providers.amazon.aws.exceptions import EcsOperatorError, EcsTaskFailToStart
29
- from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
30
- from airflow.providers.amazon.aws.hooks.ecs import EcsClusterStates, EcsHook, should_retry_eni
29
+ from airflow.providers.amazon.aws.hooks.ecs import EcsClusterStates, EcsHook
31
30
  from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
32
31
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
33
32
  from airflow.providers.amazon.aws.triggers.ecs import (
@@ -688,7 +687,6 @@ class EcsRunTaskOperator(EcsBaseOperator):
688
687
  logger=self.log,
689
688
  )
690
689
 
691
- @AwsBaseHook.retry(should_retry_eni)
692
690
  def _check_success_task(self) -> None:
693
691
  if not self.client or not self.arn:
694
692
  return
@@ -701,11 +699,6 @@ class EcsRunTaskOperator(EcsBaseOperator):
701
699
 
702
700
  for task in response["tasks"]:
703
701
  if task.get("stopCode", "") == "TaskFailedToStart":
704
- # Reset task arn here otherwise the retry run will not start
705
- # a new task but keep polling the old dead one
706
- # I'm not resetting it for other exceptions here because
707
- # EcsTaskFailToStart is the only exception that's being retried at the moment
708
- self.arn = None
709
702
  raise EcsTaskFailToStart(f"The task failed to start due to: {task.get('stoppedReason', '')}")
710
703
 
711
704
  # This is a `stoppedReason` that indicates a task has not
@@ -89,9 +89,9 @@ class EmrAddStepsOperator(AwsBaseOperator[EmrHook]):
89
89
  :param steps: boto3 style steps or reference to a steps file (must be '.json') to
90
90
  be added to the jobflow. (templated)
91
91
  :param wait_for_completion: If True, the operator will wait for all the steps to be completed.
92
+ Defaults to False. Note: When deferrable=True, this parameter will not take effect.
92
93
  :param execution_role_arn: The ARN of the runtime role for a step on the cluster.
93
94
  :param do_xcom_push: if True, job_flow_id is pushed to XCom with key job_flow_id.
94
- :param wait_for_completion: Whether to wait for job run completion. (default: True)
95
95
  :param deferrable: If True, the operator will wait asynchronously for the job to complete.
96
96
  This implies waiting for completion. This mode requires aiobotocore module to be installed.
97
97
  (default: False)
@@ -970,7 +970,7 @@ class EmrServerlessCreateApplicationOperator(AwsBaseOperator[EmrServerlessHook])
970
970
 
971
971
  :param release_label: The EMR release version associated with the application.
972
972
  :param job_type: The type of application you want to start, such as Spark or Hive.
973
- :param wait_for_completion: If true, wait for the Application to start before returning. Default to True.
973
+ :param wait_for_completion: If true, wait for the Application to start before returning. Defaults to True.
974
974
  If set to False, ``waiter_max_attempts`` and ``waiter_delay`` will only be applied when
975
975
  waiting for the application to be in the ``CREATED`` state.
976
976
  :param client_request_token: The client idempotency token of the application to create.
@@ -985,8 +985,9 @@ class EmrServerlessCreateApplicationOperator(AwsBaseOperator[EmrServerlessHook])
985
985
  :param verify: Whether or not to verify SSL certificates. See:
986
986
  https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
987
987
  :param waiter_max_attempts: Number of times the waiter should poll the application to check the state.
988
- If not set, the waiter will use its default value.
988
+ Defaults to 25 if not set.
989
989
  :param waiter_delay: Number of seconds between polling the state of the application.
990
+ Defaults to 60 seconds if not set.
990
991
  :param deferrable: If True, the operator will wait asynchronously for application to be created.
991
992
  This implies waiting for completion. This mode requires aiobotocore module to be installed.
992
993
  (default: False, but can be overridden in config file by setting default_deferrable to True)
@@ -1117,8 +1118,8 @@ class EmrServerlessStartJobOperator(AwsBaseOperator[EmrServerlessHook]):
1117
1118
  Its value must be unique for each request.
1118
1119
  :param config: Optional dictionary for arbitrary parameters to the boto API start_job_run call.
1119
1120
  :param wait_for_completion: If true, waits for the job to start before returning. Defaults to True.
1120
- If set to False, ``waiter_countdown`` and ``waiter_check_interval_seconds`` will only be applied
1121
- when waiting for the application be to in the ``STARTED`` state.
1121
+ If set to False, ``waiter_max_attempts`` and ``waiter_delay`` will only be applied
1122
+ when waiting for the application to be in the ``STARTED`` state.
1122
1123
  :param aws_conn_id: The Airflow connection used for AWS credentials.
1123
1124
  If this is ``None`` or empty then the default boto3 behaviour is used. If
1124
1125
  running Airflow in a distributed manner and aws_conn_id is None or
@@ -1129,8 +1130,9 @@ class EmrServerlessStartJobOperator(AwsBaseOperator[EmrServerlessHook]):
1129
1130
  https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
1130
1131
  :param name: Name for the EMR Serverless job. If not provided, a default name will be assigned.
1131
1132
  :param waiter_max_attempts: Number of times the waiter should poll the application to check the state.
1132
- If not set, the waiter will use its default value.
1133
+ Defaults to 25 if not set.
1133
1134
  :param waiter_delay: Number of seconds between polling the state of the job run.
1135
+ Defaults to 60 seconds if not set.
1134
1136
  :param deferrable: If True, the operator will wait asynchronously for the crawl to complete.
1135
1137
  This implies waiting for completion. This mode requires aiobotocore module to be installed.
1136
1138
  (default: False, but can be overridden in config file by setting default_deferrable to True)
@@ -1439,9 +1441,9 @@ class EmrServerlessStopApplicationOperator(AwsBaseOperator[EmrServerlessHook]):
1439
1441
  If you want to wait for the jobs to finish gracefully, use
1440
1442
  :class:`airflow.providers.amazon.aws.sensors.emr.EmrServerlessJobSensor`
1441
1443
  :param waiter_max_attempts: Number of times the waiter should poll the application to check the state.
1442
- Default is 25.
1444
+ Defaults to 25 if not set.
1443
1445
  :param waiter_delay: Number of seconds between polling the state of the application.
1444
- Default is 60 seconds.
1446
+ Defaults to 60 seconds if not set.
1445
1447
  :param deferrable: If True, the operator will wait asynchronously for the application to stop.
1446
1448
  This implies waiting for completion. This mode requires aiobotocore module to be installed.
1447
1449
  (default: False, but can be overridden in config file by setting default_deferrable to True)
@@ -1574,9 +1576,9 @@ class EmrServerlessDeleteApplicationOperator(EmrServerlessStopApplicationOperato
1574
1576
  :param verify: Whether or not to verify SSL certificates. See:
1575
1577
  https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
1576
1578
  :param waiter_max_attempts: Number of times the waiter should poll the application to check the state.
1577
- Defaults to 25.
1579
+ Defaults to 25 if not set.
1578
1580
  :param waiter_delay: Number of seconds between polling the state of the application.
1579
- Defaults to 60 seconds.
1581
+ Defaults to 60 seconds if not set.
1580
1582
  :param deferrable: If True, the operator will wait asynchronously for application to be deleted.
1581
1583
  This implies waiting for completion. This mode requires aiobotocore module to be installed.
1582
1584
  (default: False, but can be overridden in config file by setting default_deferrable to True)
@@ -86,7 +86,7 @@ class SageMakerBaseOperator(AwsBaseOperator[SageMakerHook]):
86
86
  super().__init__(**kwargs)
87
87
  self.config = config
88
88
 
89
- def parse_integer(self, config: dict, field: list[str] | str) -> None:
89
+ def parse_integer(self, config: dict | list, field: list[str] | str) -> None:
90
90
  """Recursive method for parsing string fields holding integer values to integers."""
91
91
  if len(field) == 1:
92
92
  if isinstance(config, list):
@@ -993,7 +993,7 @@ class SageMakerTuningOperator(SageMakerBaseOperator):
993
993
  )
994
994
  if response["ResponseMetadata"]["HTTPStatusCode"] != 200:
995
995
  raise AirflowException(f"Sagemaker Tuning Job creation failed: {response}")
996
-
996
+ description: dict = {}
997
997
  if self.deferrable:
998
998
  self.defer(
999
999
  trigger=SageMakerTrigger(
@@ -1009,7 +1009,6 @@ class SageMakerTuningOperator(SageMakerBaseOperator):
1009
1009
  else None
1010
1010
  ),
1011
1011
  )
1012
- description = {} # never executed but makes static checkers happy
1013
1012
  elif self.wait_for_completion:
1014
1013
  description = self.hook.check_status(
1015
1014
  self.config["HyperParameterTuningJobName"],
@@ -0,0 +1,131 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from collections.abc import Sequence
20
+ from typing import TYPE_CHECKING, Any
21
+
22
+ from airflow.configuration import conf
23
+ from airflow.exceptions import AirflowException
24
+ from airflow.providers.amazon.aws.hooks.ssm import SsmHook
25
+ from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
26
+ from airflow.providers.amazon.aws.triggers.ssm import SsmRunCommandTrigger
27
+ from airflow.providers.amazon.aws.utils import validate_execute_complete_event
28
+ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
29
+
30
+ if TYPE_CHECKING:
31
+ from airflow.utils.context import Context
32
+
33
+
34
+ class SsmRunCommandOperator(AwsBaseOperator[SsmHook]):
35
+ """
36
+ Executes the SSM Run Command to perform actions on managed instances.
37
+
38
+ .. seealso::
39
+ For more information on how to use this operator, take a look at the guide:
40
+ :ref:`howto/operator:SsmRunCommandOperator`
41
+
42
+ :param document_name: The name of the Amazon Web Services Systems Manager document (SSM document) to run.
43
+ :param run_command_kwargs: Optional parameters to pass to the send_command API.
44
+
45
+ :param wait_for_completion: Whether to wait for cluster to stop. (default: True)
46
+ :param waiter_delay: Time in seconds to wait between status checks. (default: 120)
47
+ :param waiter_max_attempts: Maximum number of attempts to check for job completion. (default: 75)
48
+ :param deferrable: If True, the operator will wait asynchronously for the cluster to stop.
49
+ This implies waiting for completion. This mode requires aiobotocore module to be installed.
50
+ (default: False)
51
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
52
+ If this is ``None`` or empty then the default boto3 behaviour is used. If
53
+ running Airflow in a distributed manner and aws_conn_id is None or
54
+ empty, then default boto3 configuration would be used (and must be
55
+ maintained on each worker node).
56
+ :param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
57
+ :param verify: Whether or not to verify SSL certificates. See:
58
+ https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
59
+ :param botocore_config: Configuration dictionary (key-values) for botocore client. See:
60
+ https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
61
+ """
62
+
63
+ aws_hook_class = SsmHook
64
+ template_fields: Sequence[str] = aws_template_fields(
65
+ "document_name",
66
+ "run_command_kwargs",
67
+ )
68
+
69
+ def __init__(
70
+ self,
71
+ *,
72
+ document_name: str,
73
+ run_command_kwargs: dict[str, Any] | None = None,
74
+ wait_for_completion: bool = True,
75
+ waiter_delay: int = 120,
76
+ waiter_max_attempts: int = 75,
77
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
78
+ **kwargs,
79
+ ):
80
+ super().__init__(**kwargs)
81
+ self.wait_for_completion = wait_for_completion
82
+ self.waiter_delay = waiter_delay
83
+ self.waiter_max_attempts = waiter_max_attempts
84
+ self.deferrable = deferrable
85
+
86
+ self.document_name = document_name
87
+ self.run_command_kwargs = run_command_kwargs or {}
88
+
89
+ def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str:
90
+ event = validate_execute_complete_event(event)
91
+
92
+ if event["status"] != "success":
93
+ raise AirflowException(f"Error while running run command: {event}")
94
+
95
+ self.log.info("SSM run command `%s` completed.", event["command_id"])
96
+ return event["command_id"]
97
+
98
+ def execute(self, context: Context):
99
+ response = self.hook.conn.send_command(
100
+ DocumentName=self.document_name,
101
+ **self.run_command_kwargs,
102
+ )
103
+
104
+ command_id = response["Command"]["CommandId"]
105
+ task_description = f"SSM run command {command_id} to complete."
106
+
107
+ if self.deferrable:
108
+ self.log.info("Deferring for %s", task_description)
109
+ self.defer(
110
+ trigger=SsmRunCommandTrigger(
111
+ command_id=command_id,
112
+ waiter_delay=self.waiter_delay,
113
+ waiter_max_attempts=self.waiter_max_attempts,
114
+ aws_conn_id=self.aws_conn_id,
115
+ ),
116
+ method_name="execute_complete",
117
+ )
118
+
119
+ elif self.wait_for_completion:
120
+ self.log.info("Waiting for %s", task_description)
121
+ waiter = self.hook.get_waiter("command_executed")
122
+
123
+ instance_ids = response["Command"]["InstanceIds"]
124
+ for instance_id in instance_ids:
125
+ waiter.wait(
126
+ CommandId=command_id,
127
+ InstanceId=instance_id,
128
+ WaiterConfig={"Delay": self.waiter_delay, "MaxAttempts": self.waiter_max_attempts},
129
+ )
130
+
131
+ return command_id
@@ -0,0 +1,127 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ from __future__ import annotations
19
+
20
+ from collections.abc import Sequence
21
+ from typing import TYPE_CHECKING, Any
22
+
23
+ from airflow.configuration import conf
24
+ from airflow.exceptions import AirflowException
25
+ from airflow.providers.amazon.aws.hooks.ssm import SsmHook
26
+ from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
+ from airflow.providers.amazon.aws.triggers.ssm import SsmRunCommandTrigger
28
+ from airflow.providers.amazon.aws.utils import validate_execute_complete_event
29
+ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
30
+
31
+ if TYPE_CHECKING:
32
+ from airflow.utils.context import Context
33
+
34
+
35
+ class SsmRunCommandCompletedSensor(AwsBaseSensor[SsmHook]):
36
+ """
37
+ Poll the state of an AWS SSM Run Command until all instance jobs reach a terminal state. Fails if any instance job ends in a failed state.
38
+
39
+ .. seealso::
40
+ For more information on how to use this sensor, take a look at the guide:
41
+ :ref:`howto/sensor:SsmRunCommandCompletedSensor`
42
+
43
+ :param command_id: The ID of the AWS SSM Run Command.
44
+
45
+ :param deferrable: If True, the sensor will operate in deferrable mode. This mode requires aiobotocore
46
+ module to be installed.
47
+ (default: False, but can be overridden in config file by setting default_deferrable to True)
48
+ :param poke_interval: Polling period in seconds to check for the status of the job. (default: 120)
49
+ :param max_retries: Number of times before returning the current state. (default: 75)
50
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
51
+ If this is ``None`` or empty then the default boto3 behaviour is used. If
52
+ running Airflow in a distributed manner and aws_conn_id is None or
53
+ empty, then default boto3 configuration would be used (and must be
54
+ maintained on each worker node).
55
+ :param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
56
+ :param verify: Whether or not to verify SSL certificates. See:
57
+ https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
58
+ :param botocore_config: Configuration dictionary (key-values) for botocore client. See:
59
+ https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
60
+ """
61
+
62
+ INTERMEDIATE_STATES: tuple[str, ...] = ("Pending", "Delayed", "InProgress", "Cancelling")
63
+ FAILURE_STATES: tuple[str, ...] = ("Cancelled", "TimedOut", "Failed")
64
+ SUCCESS_STATES: tuple[str, ...] = ("Success",)
65
+ FAILURE_MESSAGE = "SSM run command sensor failed."
66
+
67
+ aws_hook_class = SsmHook
68
+ template_fields: Sequence[str] = aws_template_fields(
69
+ "command_id",
70
+ )
71
+
72
+ def __init__(
73
+ self,
74
+ *,
75
+ command_id,
76
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
77
+ poke_interval: int = 120,
78
+ max_retries: int = 75,
79
+ **kwargs,
80
+ ):
81
+ super().__init__(**kwargs)
82
+ self.command_id = command_id
83
+ self.deferrable = deferrable
84
+ self.poke_interval = poke_interval
85
+ self.max_retries = max_retries
86
+
87
+ def poke(self, context: Context):
88
+ response = self.hook.conn.list_command_invocations(CommandId=self.command_id)
89
+ command_invocations = response.get("CommandInvocations", [])
90
+
91
+ if not command_invocations:
92
+ self.log.info("No command invocations found for command_id=%s yet, waiting...", self.command_id)
93
+ return False
94
+
95
+ for invocation in command_invocations:
96
+ state = invocation["Status"]
97
+
98
+ if state in self.FAILURE_STATES:
99
+ raise AirflowException(self.FAILURE_MESSAGE)
100
+
101
+ if state in self.INTERMEDIATE_STATES:
102
+ return False
103
+
104
+ return True
105
+
106
+ def execute(self, context: Context):
107
+ if self.deferrable:
108
+ self.defer(
109
+ trigger=SsmRunCommandTrigger(
110
+ command_id=self.command_id,
111
+ waiter_delay=int(self.poke_interval),
112
+ waiter_max_attempts=self.max_retries,
113
+ aws_conn_id=self.aws_conn_id,
114
+ ),
115
+ method_name="execute_complete",
116
+ )
117
+
118
+ else:
119
+ super().execute(context=context)
120
+
121
+ def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
122
+ event = validate_execute_complete_event(event)
123
+
124
+ if event["status"] != "success":
125
+ raise AirflowException(f"Error while running run command: {event}")
126
+
127
+ self.log.info("SSM run command `%s` completed.", event["command_id"])
@@ -69,6 +69,7 @@ class SqlToS3Operator(BaseOperator):
69
69
  :param sql_hook_params: Extra config params to be passed to the underlying hook.
70
70
  Should match the desired hook constructor params.
71
71
  :param parameters: (optional) the parameters to render the SQL query with.
72
+ :param read_pd_kwargs: arguments to include in DataFrame when ``pd.read_sql()`` is called.
72
73
  :param aws_conn_id: reference to a specific S3 connection
73
74
  :param verify: Whether or not to verify SSL certificates for S3 connection.
74
75
  By default SSL certificates are verified.
@@ -97,6 +98,7 @@ class SqlToS3Operator(BaseOperator):
97
98
  template_fields_renderers = {
98
99
  "query": "sql",
99
100
  "pd_kwargs": "json",
101
+ "read_pd_kwargs": "json",
100
102
  }
101
103
 
102
104
  def __init__(
@@ -108,6 +110,7 @@ class SqlToS3Operator(BaseOperator):
108
110
  sql_conn_id: str,
109
111
  sql_hook_params: dict | None = None,
110
112
  parameters: None | Mapping[str, Any] | list | tuple = None,
113
+ read_pd_kwargs: dict | None = None,
111
114
  replace: bool = False,
112
115
  aws_conn_id: str | None = "aws_default",
113
116
  verify: bool | str | None = None,
@@ -127,6 +130,7 @@ class SqlToS3Operator(BaseOperator):
127
130
  self.replace = replace
128
131
  self.pd_kwargs = pd_kwargs or {}
129
132
  self.parameters = parameters
133
+ self.read_pd_kwargs = read_pd_kwargs or {}
130
134
  self.max_rows_per_file = max_rows_per_file
131
135
  self.groupby_kwargs = groupby_kwargs or {}
132
136
  self.sql_hook_params = sql_hook_params
@@ -161,7 +165,7 @@ class SqlToS3Operator(BaseOperator):
161
165
  raise AirflowOptionalProviderFeatureException(e)
162
166
 
163
167
  for col in df:
164
- if df[col].dtype.name == "object" and file_format == "parquet":
168
+ if df[col].dtype.name == "object" and file_format == FILE_FORMAT.PARQUET:
165
169
  # if the type wasn't identified or converted, change it to a string so if can still be
166
170
  # processed.
167
171
  df[col] = df[col].astype(str)
@@ -185,9 +189,12 @@ class SqlToS3Operator(BaseOperator):
185
189
  def execute(self, context: Context) -> None:
186
190
  sql_hook = self._get_hook()
187
191
  s3_conn = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
188
- data_df = sql_hook.get_df(sql=self.query, parameters=self.parameters, df_type="pandas")
192
+ data_df = sql_hook.get_df(
193
+ sql=self.query, parameters=self.parameters, df_type="pandas", **self.read_pd_kwargs
194
+ )
189
195
  self.log.info("Data from SQL obtained")
190
- self._fix_dtypes(data_df, self.file_format)
196
+ if ("dtype_backend", "pyarrow") not in self.read_pd_kwargs.items():
197
+ self._fix_dtypes(data_df, self.file_format)
191
198
  file_options = FILE_OPTIONS_MAP[self.file_format]
192
199
 
193
200
  for group_name, df in self._partition_dataframe(df=data_df):
@@ -54,7 +54,7 @@ class OpenSearchServerlessCollectionActiveTrigger(AwsBaseWaiterTrigger):
54
54
  serialized_fields={"collection_id": collection_id, "collection_name": collection_name},
55
55
  waiter_name="collection_available",
56
56
  # waiter_args is a dict[str, Any], allow a possible list of None (it is caught above)
57
- waiter_args={"ids": [collection_id]} if collection_id else {"names": [collection_name]}, # type: ignore[list-item]
57
+ waiter_args={"ids": [collection_id]} if collection_id else {"names": [collection_name]},
58
58
  failure_message="OpenSearch Serverless Collection creation failed.",
59
59
  status_message="Status of OpenSearch Serverless Collection is",
60
60
  status_queries=["status"],
@@ -0,0 +1,86 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from collections.abc import AsyncIterator
20
+ from typing import TYPE_CHECKING
21
+
22
+ from airflow.providers.amazon.aws.hooks.ssm import SsmHook
23
+ from airflow.providers.amazon.aws.triggers.base import AwsBaseWaiterTrigger
24
+ from airflow.providers.amazon.aws.utils.waiter_with_logging import async_wait
25
+ from airflow.triggers.base import TriggerEvent
26
+
27
+ if TYPE_CHECKING:
28
+ from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
29
+
30
+
31
+ class SsmRunCommandTrigger(AwsBaseWaiterTrigger):
32
+ """
33
+ Trigger when a SSM run command is complete.
34
+
35
+ :param command_id: The ID of the AWS SSM Run Command.
36
+ :param waiter_delay: The amount of time in seconds to wait between attempts. (default: 120)
37
+ :param waiter_max_attempts: The maximum number of attempts to be made. (default: 75)
38
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
39
+ """
40
+
41
+ def __init__(
42
+ self,
43
+ *,
44
+ command_id: str,
45
+ waiter_delay: int = 120,
46
+ waiter_max_attempts: int = 75,
47
+ aws_conn_id: str | None = None,
48
+ ) -> None:
49
+ super().__init__(
50
+ serialized_fields={"command_id": command_id},
51
+ waiter_name="command_executed",
52
+ waiter_args={"CommandId": command_id},
53
+ failure_message="SSM run command failed.",
54
+ status_message="Status of SSM run command is",
55
+ status_queries=["status"],
56
+ return_key="command_id",
57
+ return_value=command_id,
58
+ waiter_delay=waiter_delay,
59
+ waiter_max_attempts=waiter_max_attempts,
60
+ aws_conn_id=aws_conn_id,
61
+ )
62
+ self.command_id = command_id
63
+
64
+ def hook(self) -> AwsGenericHook:
65
+ return SsmHook(aws_conn_id=self.aws_conn_id)
66
+
67
+ async def run(self) -> AsyncIterator[TriggerEvent]:
68
+ hook = self.hook()
69
+ async with hook.async_conn as client:
70
+ response = client.list_command_invocations(CommandId=self.command_id)
71
+ instance_ids = [invocation["InstanceId"] for invocation in response.get("CommandInvocations", [])]
72
+ waiter = hook.get_waiter(self.waiter_name, deferrable=True, client=client)
73
+
74
+ for instance_id in instance_ids:
75
+ self.waiter_args["InstanceId"] = instance_id
76
+ await async_wait(
77
+ waiter,
78
+ self.waiter_delay,
79
+ self.attempts,
80
+ self.waiter_args,
81
+ self.failure_message,
82
+ self.status_message,
83
+ self.status_queries,
84
+ )
85
+
86
+ yield TriggerEvent({"status": "success", self.return_key: self.return_value})
@@ -30,9 +30,7 @@ from __future__ import annotations
30
30
  import logging
31
31
  from collections.abc import Callable
32
32
  from functools import wraps
33
- from typing import TypeVar
34
-
35
- from airflow.typing_compat import ParamSpec
33
+ from typing import ParamSpec, TypeVar
36
34
 
37
35
  PS = ParamSpec("PS")
38
36
  RT = TypeVar("RT")
@@ -250,6 +250,7 @@ def get_provider_info():
250
250
  "integration-name": "Amazon Systems Manager (SSM)",
251
251
  "external-doc-url": "https://aws.amazon.com/systems-manager/",
252
252
  "logo": "/docs/integration-logos/AWS-Systems-Manager_light-bg@4x.png",
253
+ "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/ssm.rst"],
253
254
  "tags": ["aws"],
254
255
  },
255
256
  {
@@ -444,6 +445,10 @@ def get_provider_info():
444
445
  "integration-name": "AWS Step Functions",
445
446
  "python-modules": ["airflow.providers.amazon.aws.operators.step_function"],
446
447
  },
448
+ {
449
+ "integration-name": "Amazon Systems Manager (SSM)",
450
+ "python-modules": ["airflow.providers.amazon.aws.operators.ssm"],
451
+ },
447
452
  {
448
453
  "integration-name": "Amazon RDS",
449
454
  "python-modules": ["airflow.providers.amazon.aws.operators.rds"],
@@ -581,6 +586,10 @@ def get_provider_info():
581
586
  "integration-name": "AWS Step Functions",
582
587
  "python-modules": ["airflow.providers.amazon.aws.sensors.step_function"],
583
588
  },
589
+ {
590
+ "integration-name": "Amazon Systems Manager (SSM)",
591
+ "python-modules": ["airflow.providers.amazon.aws.sensors.ssm"],
592
+ },
584
593
  {
585
594
  "integration-name": "Amazon QuickSight",
586
595
  "python-modules": ["airflow.providers.amazon.aws.sensors.quicksight"],
@@ -846,6 +855,10 @@ def get_provider_info():
846
855
  "integration-name": "Amazon Simple Storage Service (S3)",
847
856
  "python-modules": ["airflow.providers.amazon.aws.triggers.s3"],
848
857
  },
858
+ {
859
+ "integration-name": "Amazon Systems Manager (SSM)",
860
+ "python-modules": ["airflow.providers.amazon.aws.triggers.ssm"],
861
+ },
849
862
  {
850
863
  "integration-name": "Amazon EMR",
851
864
  "python-modules": ["airflow.providers.amazon.aws.triggers.emr"],
@@ -44,7 +44,7 @@ if AIRFLOW_V_3_0_PLUS:
44
44
  from airflow.sdk import BaseOperator, BaseOperatorLink, BaseSensorOperator
45
45
  from airflow.sdk.execution_time.xcom import XCom
46
46
  else:
47
- from airflow.models import BaseOperator, XCom # type: ignore[no-redef]
47
+ from airflow.models import BaseOperator, XCom
48
48
  from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
49
49
  from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
50
50
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 9.10.0rc1
3
+ Version: 9.11.0rc1
4
4
  Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
5
5
  Keywords: airflow-provider,amazon,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -18,6 +18,7 @@ Classifier: License :: OSI Approved :: Apache Software License
18
18
  Classifier: Programming Language :: Python :: 3.10
19
19
  Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
21
22
  Classifier: Topic :: System :: Monitoring
22
23
  Requires-Dist: apache-airflow>=2.10.0rc1
23
24
  Requires-Dist: apache-airflow-providers-common-compat>=1.6.1rc1
@@ -39,23 +40,23 @@ Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache-hive"
39
40
  Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0rc1 ; extra == "cncf-kubernetes"
40
41
  Requires-Dist: apache-airflow-providers-common-messaging>=1.0.1rc1 ; extra == "common-messaging"
41
42
  Requires-Dist: apache-airflow-providers-exasol ; extra == "exasol"
42
- Requires-Dist: apache-airflow-providers-fab ; extra == "fab"
43
+ Requires-Dist: apache-airflow-providers-fab>=2.2.0rc1 ; extra == "fab" and ( python_version < '3.13')
43
44
  Requires-Dist: apache-airflow-providers-ftp ; extra == "ftp"
44
45
  Requires-Dist: apache-airflow-providers-google ; extra == "google"
45
46
  Requires-Dist: apache-airflow-providers-imap ; extra == "imap"
46
47
  Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft-azure"
47
48
  Requires-Dist: apache-airflow-providers-mongo ; extra == "mongo"
48
49
  Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1 ; extra == "openlineage"
49
- Requires-Dist: python3-saml>=1.16.0 ; extra == "python3-saml"
50
- Requires-Dist: xmlsec!=1.3.15,>=1.3.14 ; extra == "python3-saml"
51
- Requires-Dist: lxml<5.4.0,>=5.3.2 ; extra == "python3-saml"
50
+ Requires-Dist: python3-saml>=1.16.0 ; extra == "python3-saml" and ( python_version < '3.13')
51
+ Requires-Dist: xmlsec>=1.3.14 ; extra == "python3-saml" and ( python_version < '3.13')
52
+ Requires-Dist: lxml>=6.0.0 ; extra == "python3-saml" and ( python_version < '3.13')
52
53
  Requires-Dist: s3fs>=2023.10.0 ; extra == "s3fs"
53
54
  Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
54
55
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
55
56
  Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
56
57
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
57
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.10.0/changelog.html
58
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.10.0
58
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.11.0/changelog.html
59
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.11.0
59
60
  Project-URL: Mastodon, https://fosstodon.org/@airflow
60
61
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
61
62
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -103,8 +104,9 @@ Provides-Extra: standard
103
104
 
104
105
  Package ``apache-airflow-providers-amazon``
105
106
 
106
- Release: ``9.10.0``
107
+ Release: ``9.11.0``
107
108
 
109
+ Release Date: ``|PypiReleaseDate|``
108
110
 
109
111
  Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
110
112
 
@@ -116,7 +118,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
116
118
  are in ``airflow.providers.amazon`` python package.
117
119
 
118
120
  You can find package information and changelog for the provider
119
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.10.0/>`_.
121
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.11.0/>`_.
120
122
 
121
123
  Installation
122
124
  ------------
@@ -125,7 +127,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
125
127
  for the minimum Airflow version supported) via
126
128
  ``pip install apache-airflow-providers-amazon``
127
129
 
128
- The package supports the following python versions: 3.10,3.11,3.12
130
+ The package supports the following python versions: 3.10,3.11,3.12,3.13
129
131
 
130
132
  Requirements
131
133
  ------------
@@ -184,5 +186,5 @@ Dependent package
184
186
  ======================================================================================================================== ====================
185
187
 
186
188
  The changelog for the provider package can be found in the
187
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.10.0/changelog.html>`_.
189
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.11.0/changelog.html>`_.
188
190
 
@@ -1,7 +1,7 @@
1
1
  airflow/providers/amazon/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/amazon/__init__.py,sha256=iSLz0UFeGo5Z6gcCXR4EgRzkhPiEW_Ztl-UsBA-puG0,1496
3
- airflow/providers/amazon/get_provider_info.py,sha256=TV5uNdxIE2kTx-CJJAkfApueVYUXBscm8jdwA8MQYV8,72655
4
- airflow/providers/amazon/version_compat.py,sha256=7Czyw4hDHy79lHEc0n01cv1Q8sAMF9xQSxDWnkmgh4M,2335
2
+ airflow/providers/amazon/__init__.py,sha256=jBnxUQuGC6nq67_O7qB3iK1Ir2z6m83935PsA7hd03U,1496
3
+ airflow/providers/amazon/get_provider_info.py,sha256=HqgOY-2XbaX7Nhb11ySGgUIrQJ_C8tBWRx9b6XO32zg,73282
4
+ airflow/providers/amazon/version_compat.py,sha256=8biVK8TSccWSZKPfRoA5w9N9R6YznPWPq8RALrVDWuY,2309
5
5
  airflow/providers/amazon/aws/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
6
6
  airflow/providers/amazon/aws/exceptions.py,sha256=uRGNMgXvgdzfphpOTiyj74lQhjzb70J-X8n6fsx5Jog,1864
7
7
  airflow/providers/amazon/aws/assets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -26,7 +26,7 @@ airflow/providers/amazon/aws/bundles/s3.py,sha256=I9vjYnn5kQyOlwMcANTxHeH79zGzoc
26
26
  airflow/providers/amazon/aws/executors/Dockerfile,sha256=VZ-YOR59KSMoztJV_g7v5hUwetKR0Ii4wNNaKqDIfyQ,4275
27
27
  airflow/providers/amazon/aws/executors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
28
28
  airflow/providers/amazon/aws/executors/aws_lambda/__init__.py,sha256=1PebDNZ6KXaXd3Zojp8lhULD6Elk-Pi_NiK3qi4G45s,950
29
- airflow/providers/amazon/aws/executors/aws_lambda/lambda_executor.py,sha256=WGQZ1fo3v2ghMgpog4DZYrauhCDKUfEmKaD4jrPGODo,21304
29
+ airflow/providers/amazon/aws/executors/aws_lambda/lambda_executor.py,sha256=CUvpOtpezfNRjUj4ZNLf1yYKSd0DZ9DUB-v-bbyiy4Q,23030
30
30
  airflow/providers/amazon/aws/executors/aws_lambda/utils.py,sha256=6Shcr1_kMxQK9-IslzMbTK_O526PF9D2Z5CGyUrA4sA,2255
31
31
  airflow/providers/amazon/aws/executors/aws_lambda/docker/Dockerfile,sha256=_Oy_AHxEM-_BwtaL0iwWwD8Lm2RFSFGCBsiBUzzM7Dg,5043
32
32
  airflow/providers/amazon/aws/executors/aws_lambda/docker/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -50,7 +50,7 @@ airflow/providers/amazon/aws/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2
50
50
  airflow/providers/amazon/aws/hooks/appflow.py,sha256=-le6RsIMWIqTav7KGknsph9Td42znSm_eIYztxc_RsE,5263
51
51
  airflow/providers/amazon/aws/hooks/athena.py,sha256=ubpgNafkIRzFh1h7Bexhj4L83SLvyZf-yDcmGmAIVt8,13463
52
52
  airflow/providers/amazon/aws/hooks/athena_sql.py,sha256=0LzWXpmUYVphkuhnH5dUgnopmuAAvjkC0nAFq-C0Xk8,7201
53
- airflow/providers/amazon/aws/hooks/base_aws.py,sha256=vwHLkdL-o99CmNNNh2VRafQ7zw8wTxsTlgPTS2a-1ig,46851
53
+ airflow/providers/amazon/aws/hooks/base_aws.py,sha256=8e_fvEQlDq7btwL-cijJytm4O32lXTDP-QP8QDCO7wA,46719
54
54
  airflow/providers/amazon/aws/hooks/batch_client.py,sha256=0FGUSCtcgvuO6A8oKNykZ6T8ZZGQav4OP-F2PcrIw7o,22016
55
55
  airflow/providers/amazon/aws/hooks/batch_waiters.json,sha256=eoN5YDgeTNZ2Xz17TrbKBPhd7z9-6KD3RhaDKXXOvqU,2511
56
56
  airflow/providers/amazon/aws/hooks/batch_waiters.py,sha256=V-ZvO6cAAVAJqOHx8aU5QEzaCYnPnCC8jBWLESi9-qs,10981
@@ -61,10 +61,10 @@ airflow/providers/amazon/aws/hooks/comprehend.py,sha256=Xggr7GCReowgTAVWNXboFSGm
61
61
  airflow/providers/amazon/aws/hooks/datasync.py,sha256=RcYRlpP7jsUzTUJDOEVYnfTkm5I7exkoMTyB622AZsA,13616
62
62
  airflow/providers/amazon/aws/hooks/dms.py,sha256=zR8zMuR_uR1_38meyDE9sATq8cZKvzCnFDuTlQykp7g,14398
63
63
  airflow/providers/amazon/aws/hooks/dynamodb.py,sha256=gS0KlQFvHh74DEt3gyQaGdXcb0yT6SXaxO1JFhPe1sI,3997
64
- airflow/providers/amazon/aws/hooks/ec2.py,sha256=UXKoZHwiukngu1eKx7vssUv3TSnjxnoGoU2uDbMEsp8,8127
64
+ airflow/providers/amazon/aws/hooks/ec2.py,sha256=M5XIGQ2LamXS2JQlHF_pMCdUWUmFmA8uDVlCcI-vTlo,8094
65
65
  airflow/providers/amazon/aws/hooks/ecr.py,sha256=vTPUVCEz1x2DHA6-N0bbOcRXxP8JblXq_r0II5wtDyE,4015
66
- airflow/providers/amazon/aws/hooks/ecs.py,sha256=I8xmqa8X1kakh5QpaIT55qRfihnJCjzWPm1dmPxXrVU,6687
67
- airflow/providers/amazon/aws/hooks/eks.py,sha256=aVWDwBiDuP1TSg5wQD9N_VW_YvDUru3aPYZpOexjVT4,25117
66
+ airflow/providers/amazon/aws/hooks/ecs.py,sha256=9HMiR21Mj4PAeLMHyY87rI4WstNqRqRwJAicAGQiEHQ,6307
67
+ airflow/providers/amazon/aws/hooks/eks.py,sha256=DJOxMZi05Rynv_SYCTeyUkrAfj3qW_qnyVUT979w7aI,25118
68
68
  airflow/providers/amazon/aws/hooks/elasticache_replication_group.py,sha256=x6kkaR2nzDF8w1kqolbaS3-XCbHl5qlJMcpGYmlsxuU,12089
69
69
  airflow/providers/amazon/aws/hooks/emr.py,sha256=B8vNVLOFCkCEXnongySjcFH2fqnK3sBN8d6LbJrAYNA,22240
70
70
  airflow/providers/amazon/aws/hooks/eventbridge.py,sha256=dSaKbFB8ueOUJGl6YLIz70zXy0Xzr3yMflKS2wGFDSM,3364
@@ -82,10 +82,10 @@ airflow/providers/amazon/aws/hooks/neptune.py,sha256=a3r26msR8U5oCTMHQYqA-2OspVO
82
82
  airflow/providers/amazon/aws/hooks/opensearch_serverless.py,sha256=0zFRXXjlbQRCTt5D_q1FCp965FC8LyOhMRk2x6nvsIc,1543
83
83
  airflow/providers/amazon/aws/hooks/quicksight.py,sha256=2Am_K-BcoqcfuWwLbWjW1LsbZpGskK2bV-uHT2diu1o,7347
84
84
  airflow/providers/amazon/aws/hooks/rds.py,sha256=bAcaGeP7uNN0lp_FZtIPlt2JCZxcTEr8E7u5o8UbVYk,15238
85
- airflow/providers/amazon/aws/hooks/redshift_cluster.py,sha256=hyX_ldjn_gNOehcPV-3M3wzO1HdIdujG8JjrUZmCnNg,7962
85
+ airflow/providers/amazon/aws/hooks/redshift_cluster.py,sha256=FTpUkyLCM_aRCIApVkFNN-AQPHVDtODVzJyd1cuIl8c,7992
86
86
  airflow/providers/amazon/aws/hooks/redshift_data.py,sha256=JxyXEyFeJHUtMxjjtMlCMJSW9P-cnixISd3R4Ob7fy8,11841
87
- airflow/providers/amazon/aws/hooks/redshift_sql.py,sha256=MzFW0erCvc3pJ-_1PPrp-H3gYcwRpMvY31xx09GvQts,10980
88
- airflow/providers/amazon/aws/hooks/s3.py,sha256=EB5pN_091fOdJYlljqT5ETKwjknlOtP-cC6Jc6F8ycI,67760
87
+ airflow/providers/amazon/aws/hooks/redshift_sql.py,sha256=gpI1q9KK-mkewigehTegIhWJKrAQnQu1WedDfapx6gU,10947
88
+ airflow/providers/amazon/aws/hooks/s3.py,sha256=sAuzqwpCkWzVl45Vu6juJsb3-T6mcsskaUlPUwGZxSE,67709
89
89
  airflow/providers/amazon/aws/hooks/sagemaker.py,sha256=mq-zL8hQHa6SoXArbHzZ31IFylF-P2vhLW310Ggb9Ts,60418
90
90
  airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py,sha256=d3A50iQGOtqvQP9FywkZONr7JU0vMMP2MoqfjoZ_554,7989
91
91
  airflow/providers/amazon/aws/hooks/secrets_manager.py,sha256=6srh3jUeSGoqyrSj1M6aSOaA9xT5kna0VGUC0kzH-q0,2690
@@ -110,26 +110,26 @@ airflow/providers/amazon/aws/links/sagemaker.py,sha256=RTQubIIpmjTWEGrJiRI2MyF4C
110
110
  airflow/providers/amazon/aws/links/sagemaker_unified_studio.py,sha256=pHbO14OmkqqjrjnZpt2tO3LISdBbitd9E00DV3ucfTI,1202
111
111
  airflow/providers/amazon/aws/links/step_function.py,sha256=xSL4vfKLnCn-QboRtruajpH5elRrNfw0XkY7eSfPpE4,2099
112
112
  airflow/providers/amazon/aws/log/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
113
- airflow/providers/amazon/aws/log/cloudwatch_task_handler.py,sha256=Y1OCcG11rIuNkHnZIgp-XlIL5g2UMobIVZhKn1d5Qtg,11615
113
+ airflow/providers/amazon/aws/log/cloudwatch_task_handler.py,sha256=XEwe-orsE4aPZGGr_aQ_7uDMRElAIBXm7x66d8dFdD0,11175
114
114
  airflow/providers/amazon/aws/log/s3_task_handler.py,sha256=R-Qtpc6uZVb_aXpqbKsFx_WKeyEHOiSKxoMfYAVV5GI,9648
115
115
  airflow/providers/amazon/aws/notifications/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
116
- airflow/providers/amazon/aws/notifications/chime.py,sha256=bpaQtR5IG4ZlBmTdlfrHOZQo4WSQYs_HRm4DWDCkFWE,2127
116
+ airflow/providers/amazon/aws/notifications/chime.py,sha256=OpFM5fNknzD8mVz_04vOwKd-Ow2ArWM3QdrUAgaSHqs,2101
117
117
  airflow/providers/amazon/aws/notifications/sns.py,sha256=XracHC3r3BxzUuv-DzFLy6l7K6R_Ps85oJIUS0-Lkt4,3116
118
118
  airflow/providers/amazon/aws/notifications/sqs.py,sha256=iINaYMVw3hpu7EL2PB4BtTx7zsypFaY74C2QrNf7Z-c,3606
119
119
  airflow/providers/amazon/aws/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
120
- airflow/providers/amazon/aws/operators/appflow.py,sha256=A5SHSt5KO-3vw-fJFgu2cWOpDjQ5oAhKYN0mAwGMWK8,20856
121
- airflow/providers/amazon/aws/operators/athena.py,sha256=OPNQbBNuIbnfWTEFCeudMBJxYa86fKr3ITqlJNgSXUc,14807
120
+ airflow/providers/amazon/aws/operators/appflow.py,sha256=TlQSJc1frCw7yKfobjCLf2faWQIT0nKRans5Wy-kJQQ,20824
121
+ airflow/providers/amazon/aws/operators/athena.py,sha256=CPKPZXN4dGELg0sW0LJIkG0X2-v5qPgWt4TMqYtTS18,14754
122
122
  airflow/providers/amazon/aws/operators/base_aws.py,sha256=Y8hbT2uDA2CUfaEhHqm9qc_P81OS1ZnPh_34sHn0FrA,3898
123
123
  airflow/providers/amazon/aws/operators/batch.py,sha256=a3fGEw9uQ1RvVLaDVU1obmceiCas2Sm9OOwES2EBE08,21935
124
124
  airflow/providers/amazon/aws/operators/bedrock.py,sha256=jDr87UjL1buRgD8LgcQuaRfBh0ufYKpsG6hdXhjy5Ew,45762
125
125
  airflow/providers/amazon/aws/operators/cloud_formation.py,sha256=bDzAHcs2QjrnwE3Z9w4s6JFeu5Xl74JWKR8Y5Ez03C4,5005
126
126
  airflow/providers/amazon/aws/operators/comprehend.py,sha256=Vkdw0i2iW9_WRQLSDKNncNkVIQaNWG8jz-DxHy47Fmg,17607
127
- airflow/providers/amazon/aws/operators/datasync.py,sha256=mpq3-JcF7tqEm3BpwnEd3qemvTVVyWyq40DUaHrpne8,20321
127
+ airflow/providers/amazon/aws/operators/datasync.py,sha256=7DZtLjYxQqk7kV968CFHtWk5pbQPausbvE8DAkvPhqw,20265
128
128
  airflow/providers/amazon/aws/operators/dms.py,sha256=XmIcXpkp_--PBQF1m7NFfeHDTp4aRV4lnXUu5h6kEa4,34658
129
129
  airflow/providers/amazon/aws/operators/ec2.py,sha256=SclBzOLo3GbQe3kw4S3MKf8zLm8IaKNSiGTc_U-OxRo,19700
130
- airflow/providers/amazon/aws/operators/ecs.py,sha256=-r1mPp1uUmQ58mSZ6q7S4dw_0Wb_7WRU92FaEpGFbAE,33259
130
+ airflow/providers/amazon/aws/operators/ecs.py,sha256=xlkTNCNCnNZBW8ntrULLICrpsbZnVBaF6FvazOe-I1A,32793
131
131
  airflow/providers/amazon/aws/operators/eks.py,sha256=KXcsb8ZvBsnBiI0Z2Hn_AI4lpCYqEh7T5eFuRDI2t5Y,51372
132
- airflow/providers/amazon/aws/operators/emr.py,sha256=eMAZjklENP7bNDwWKTKgrrkHLObPSvMDfpukOFCrNTQ,75908
132
+ airflow/providers/amazon/aws/operators/emr.py,sha256=Cw1qiA0eiPJODCSxHhPayo2_0TZOlA4mj8pcveV0WNc,75983
133
133
  airflow/providers/amazon/aws/operators/eventbridge.py,sha256=NacTdvRzZZFizSzC3rb0Z7g8dHQWkKQEXGYzFKOp3fc,10421
134
134
  airflow/providers/amazon/aws/operators/glacier.py,sha256=6TFC07B0EOmtRxLs7Bok4jwV84po2yVDa-DnlbnAOVg,3681
135
135
  airflow/providers/amazon/aws/operators/glue.py,sha256=2LA7KZp7mhitk9SrcqKBUVnS_NlqSrOwRnCDeuBCuGE,30534
@@ -144,10 +144,11 @@ airflow/providers/amazon/aws/operators/rds.py,sha256=tiRxWVtx2trpeCEzgD7h7_xzseq
144
144
  airflow/providers/amazon/aws/operators/redshift_cluster.py,sha256=-9JYfjKmj4IHrBm2fhnfg7uxWHj7XMgR5HRnJXr5JXk,39136
145
145
  airflow/providers/amazon/aws/operators/redshift_data.py,sha256=motUwcXjxNoboswXx9ooE8fNbNnJ1y9OQyzekYGPNss,10854
146
146
  airflow/providers/amazon/aws/operators/s3.py,sha256=Imd3siCtmtaPWRmmSd382dJHhr49WRd-_aP6Tx5T7ac,38389
147
- airflow/providers/amazon/aws/operators/sagemaker.py,sha256=-CPBe2CBBwZHsY92z1GfWuMi_QdbbybTJh086LR_18Q,91293
147
+ airflow/providers/amazon/aws/operators/sagemaker.py,sha256=Aj4mgTCXtwWMP1Tms-3bmY10UjBWdWI7RG-LSkbpuwQ,91251
148
148
  airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py,sha256=8_Fo8_bhEupkkSjfPPDrpi-bOVkaIUUN13cpugwkgj0,6892
149
149
  airflow/providers/amazon/aws/operators/sns.py,sha256=C5QEvwEZ70uuRdGs90MRtIbb3jj9bs-PH5k0ceLZCMI,4623
150
150
  airflow/providers/amazon/aws/operators/sqs.py,sha256=o9rH2Pm5DNmccLh5I2wr96hZiuxOPi6YGZ2QluOeVb0,4764
151
+ airflow/providers/amazon/aws/operators/ssm.py,sha256=4WX38BTzM33iAow1MRnIxJb4MvLAfUYO-BMVf8kFtjU,5716
151
152
  airflow/providers/amazon/aws/operators/step_function.py,sha256=isee1oy4X3YnpoJInPI6sYq67E5kIYj9zhhBpHwvDJI,9582
152
153
  airflow/providers/amazon/aws/queues/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
153
154
  airflow/providers/amazon/aws/queues/sqs.py,sha256=Ne6co2amDBZ96vABRNcnQ-LgaNC6dtpWBx-tq8bbLV8,1909
@@ -182,6 +183,7 @@ airflow/providers/amazon/aws/sensors/s3.py,sha256=8SiAyTh8_TMvPWrjpPw5gvoyB_omBh
182
183
  airflow/providers/amazon/aws/sensors/sagemaker.py,sha256=dVQntJNRyUYCLQ7cIkeHesgZxf-1yS_BBAiVBzCwaHI,13795
183
184
  airflow/providers/amazon/aws/sensors/sagemaker_unified_studio.py,sha256=ohVNJ_mYTkvoF3d8yAl556uiuFEixLurN_FXcrkvuoQ,2884
184
185
  airflow/providers/amazon/aws/sensors/sqs.py,sha256=V3d05xb2VuxdWimpDVJy_SOKX7N0ok9TBbEYO-9o3v4,10672
186
+ airflow/providers/amazon/aws/sensors/ssm.py,sha256=SIflxP5SyL--8H9sJmnTzYgZAlw9RZdnQN_NglBABso,5406
185
187
  airflow/providers/amazon/aws/sensors/step_function.py,sha256=gaklKHdfmE-9avKSmyuGYvv9CuSklpjPz4KXZI8wXnY,3607
186
188
  airflow/providers/amazon/aws/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
187
189
  airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py,sha256=wZXaU7I1tYwDI_2qD-Ctu5oQY8grGpJdd2k8UUlceTI,7273
@@ -205,7 +207,7 @@ airflow/providers/amazon/aws/transfers/s3_to_sftp.py,sha256=sor0_phlRN-yltJiaz0l
205
207
  airflow/providers/amazon/aws/transfers/s3_to_sql.py,sha256=RLuAwBHJOMcGaZcDgrivAhLRsOuZsjwTxJEOcLB_1MY,4971
206
208
  airflow/providers/amazon/aws/transfers/salesforce_to_s3.py,sha256=noALwo6dpjEHF33ZDDZY0z47HK0Gsv-BU3Zr2NE3zRA,5738
207
209
  airflow/providers/amazon/aws/transfers/sftp_to_s3.py,sha256=o5IDLFmeHzqBH6_Uh_fGTk9iymjQYsuGznnH-qZ1M-Y,4234
208
- airflow/providers/amazon/aws/transfers/sql_to_s3.py,sha256=EmRN8Hp6GVNbsQWzeUm4tP1RI-9C-pfDZy83ScspHr8,11083
210
+ airflow/providers/amazon/aws/transfers/sql_to_s3.py,sha256=-OdWGu-1P9yHsFUqmFuGT2AsmYP7TVy6f0MbmgnB1Ck,11442
209
211
  airflow/providers/amazon/aws/triggers/README.md,sha256=ax2F0w2CuQSDN4ghJADozrrv5W4OeCDPA8Vzp00BXOU,10919
210
212
  airflow/providers/amazon/aws/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
211
213
  airflow/providers/amazon/aws/triggers/athena.py,sha256=62ty40zejcm5Y0d1rTQZuYzSjq3hUkmAs0d_zxM_Kjw,2596
@@ -225,7 +227,7 @@ airflow/providers/amazon/aws/triggers/kinesis_analytics.py,sha256=FERA9pE2o4juRJ
225
227
  airflow/providers/amazon/aws/triggers/lambda_function.py,sha256=CWVJHvUSd1v4THrFOA59XW0AjOqfwTR87rT4tUaYaYQ,2847
226
228
  airflow/providers/amazon/aws/triggers/mwaa.py,sha256=X6YgeDhQTZW-hGWbazgAZasl6RLZ-pnwU76vSqjpVRg,5407
227
229
  airflow/providers/amazon/aws/triggers/neptune.py,sha256=bL9W78zgDp5rASdLGm-_WO6XKne5_tcOMkbGBPmdX-8,5868
228
- airflow/providers/amazon/aws/triggers/opensearch_serverless.py,sha256=b06MwFZEEiMaFJOseJCrWhpPuTZWXYtkqNZD8iK5hvI,3244
230
+ airflow/providers/amazon/aws/triggers/opensearch_serverless.py,sha256=iHaYvCsNQ-C-MppZ2bkaQsvh2Ox284Fka5TKux6XoKM,3217
229
231
  airflow/providers/amazon/aws/triggers/rds.py,sha256=jT714qEymUpH3zIiwNbCyFFyJarTgCIlfd4v1l3lZFw,7763
230
232
  airflow/providers/amazon/aws/triggers/redshift_cluster.py,sha256=iRxNUu1XnacCMn3f5xyar9MuNkqsYoxkNObNR6hondc,10125
231
233
  airflow/providers/amazon/aws/triggers/redshift_data.py,sha256=10IIWbta6Zpd2VOe1pVYfv_lB8us1HP9OyapgEiR_pw,4272
@@ -233,6 +235,7 @@ airflow/providers/amazon/aws/triggers/s3.py,sha256=s2_JGXWy7ge6mgypVsdgO80q03p0t
233
235
  airflow/providers/amazon/aws/triggers/sagemaker.py,sha256=Dq7LpnaqZkugrySfljz2n6kcMdL0qhwf9qUhPxOOOzk,7946
234
236
  airflow/providers/amazon/aws/triggers/sagemaker_unified_studio.py,sha256=1WGrng4rUprRDKSmbyeH-Eda2-8wf7o6VufT1_L7peI,2503
235
237
  airflow/providers/amazon/aws/triggers/sqs.py,sha256=NIwBc2ZisY164xfkSYyox3dcuYIEPrn9MhsEFS0eXDA,8590
238
+ airflow/providers/amazon/aws/triggers/ssm.py,sha256=47brvFXU1DajrccmzD5KuabG_ywo_yzazDP8ng7tlMY,3474
236
239
  airflow/providers/amazon/aws/triggers/step_function.py,sha256=M1HGdrnxL_T9KSCBNy2t531xMNJaFc-Y792T9cSmLGM,2685
237
240
  airflow/providers/amazon/aws/utils/__init__.py,sha256=-Q5XK8ZV7EK6unj_4hlciqztACPuftMjNKMuBA21q84,3178
238
241
  airflow/providers/amazon/aws/utils/connection_wrapper.py,sha256=KJsYG3qnESxxh2PFWvf83gHKzqEEAE9jBANTMoyRn3A,16435
@@ -246,7 +249,7 @@ airflow/providers/amazon/aws/utils/redshift.py,sha256=-Fh3kgrv_3VN6Ys5sEudtRWR9I
246
249
  airflow/providers/amazon/aws/utils/sagemaker.py,sha256=893W8DBPhsyPINbFph9MKKP4O_zwptse0oUWm3XtGDE,1040
247
250
  airflow/providers/amazon/aws/utils/sagemaker_unified_studio.py,sha256=6ZiMtMzRx4UzdGOYnQcUa9dQJInjD4-G_3CZoMCxuLY,1040
248
251
  airflow/providers/amazon/aws/utils/sqs.py,sha256=HNVp0XgsN_L46NMbxlgN3dWvGAWk1Uv_Sl2lT915T_4,3511
249
- airflow/providers/amazon/aws/utils/suppress.py,sha256=VEJVawKJcfnDkj3xrYyiAMqONHQmtzL0GqWB8Hm9t44,2387
252
+ airflow/providers/amazon/aws/utils/suppress.py,sha256=SxAZeDpRsaHpa6NBxDywDEIebazfTawYZJtQZPVE3Hw,2353
250
253
  airflow/providers/amazon/aws/utils/tags.py,sha256=LDmVOEdPlyWW47bQByQ20UiA4baA34eT65pBd5goOQA,1746
251
254
  airflow/providers/amazon/aws/utils/task_log_fetcher.py,sha256=fHaKisKDz4JMGKDotIBCQm43dGKCIKmieYfIV46EfP8,5354
252
255
  airflow/providers/amazon/aws/utils/waiter.py,sha256=Vn26dk_UglyhHZeLAQIDJiZmKhIxs--btSa1GRukaKk,4134
@@ -277,7 +280,7 @@ airflow/providers/amazon/aws/waiters/rds.json,sha256=HNmNQm5J-VaFHzjWb1pE5P7-Ix-
277
280
  airflow/providers/amazon/aws/waiters/redshift.json,sha256=jOBotCgbkko1b_CHcGEbhhRvusgt0YSzVuFiZrqVP30,1742
278
281
  airflow/providers/amazon/aws/waiters/sagemaker.json,sha256=JPHuQtUFZ1B7EMLfVmCRevNZ9jgpB71LM0dva8ZEO9A,5254
279
282
  airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=GsOH-emGerKGBAUFmI5lpMfNGH4c0ol_PSiea25DCEY,1033
280
- apache_airflow_providers_amazon-9.10.0rc1.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
281
- apache_airflow_providers_amazon-9.10.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
282
- apache_airflow_providers_amazon-9.10.0rc1.dist-info/METADATA,sha256=qYPMrN1BxtFb9h-Kx9szM_omTtbzrlLIxKLn1IF7vgU,9994
283
- apache_airflow_providers_amazon-9.10.0rc1.dist-info/RECORD,,
283
+ apache_airflow_providers_amazon-9.11.0rc1.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
284
+ apache_airflow_providers_amazon-9.11.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
285
+ apache_airflow_providers_amazon-9.11.0rc1.dist-info/METADATA,sha256=0pKkjHOqSZ_T4Z3o5JDver1MDZEAPIRgYbcgaOUDxMs,10204
286
+ apache_airflow_providers_amazon-9.11.0rc1.dist-info/RECORD,,