apache-airflow-providers-amazon 9.14.0__py3-none-any.whl → 9.18.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. airflow/providers/amazon/__init__.py +3 -3
  2. airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +106 -5
  3. airflow/providers/amazon/aws/auth_manager/routes/login.py +7 -1
  4. airflow/providers/amazon/aws/executors/aws_lambda/docker/app.py +5 -1
  5. airflow/providers/amazon/aws/executors/aws_lambda/lambda_executor.py +1 -1
  6. airflow/providers/amazon/aws/hooks/athena.py +6 -2
  7. airflow/providers/amazon/aws/hooks/athena_sql.py +2 -2
  8. airflow/providers/amazon/aws/hooks/base_aws.py +2 -2
  9. airflow/providers/amazon/aws/hooks/batch_client.py +4 -6
  10. airflow/providers/amazon/aws/hooks/batch_waiters.py +0 -1
  11. airflow/providers/amazon/aws/hooks/chime.py +1 -1
  12. airflow/providers/amazon/aws/hooks/datasync.py +3 -3
  13. airflow/providers/amazon/aws/hooks/firehose.py +56 -0
  14. airflow/providers/amazon/aws/hooks/glue.py +7 -1
  15. airflow/providers/amazon/aws/hooks/kinesis.py +31 -13
  16. airflow/providers/amazon/aws/hooks/mwaa.py +38 -7
  17. airflow/providers/amazon/aws/hooks/redshift_sql.py +20 -6
  18. airflow/providers/amazon/aws/hooks/s3.py +41 -11
  19. airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py +1 -1
  20. airflow/providers/amazon/aws/hooks/ses.py +76 -10
  21. airflow/providers/amazon/aws/hooks/sns.py +74 -18
  22. airflow/providers/amazon/aws/hooks/sqs.py +64 -11
  23. airflow/providers/amazon/aws/hooks/ssm.py +34 -6
  24. airflow/providers/amazon/aws/hooks/step_function.py +1 -1
  25. airflow/providers/amazon/aws/links/base_aws.py +1 -1
  26. airflow/providers/amazon/aws/notifications/ses.py +139 -0
  27. airflow/providers/amazon/aws/notifications/sns.py +16 -1
  28. airflow/providers/amazon/aws/notifications/sqs.py +17 -1
  29. airflow/providers/amazon/aws/operators/base_aws.py +2 -2
  30. airflow/providers/amazon/aws/operators/bedrock.py +2 -0
  31. airflow/providers/amazon/aws/operators/cloud_formation.py +2 -2
  32. airflow/providers/amazon/aws/operators/datasync.py +2 -1
  33. airflow/providers/amazon/aws/operators/emr.py +44 -33
  34. airflow/providers/amazon/aws/operators/mwaa.py +12 -3
  35. airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py +1 -1
  36. airflow/providers/amazon/aws/operators/ssm.py +122 -17
  37. airflow/providers/amazon/aws/secrets/secrets_manager.py +3 -4
  38. airflow/providers/amazon/aws/sensors/base_aws.py +2 -2
  39. airflow/providers/amazon/aws/sensors/mwaa.py +14 -1
  40. airflow/providers/amazon/aws/sensors/s3.py +27 -13
  41. airflow/providers/amazon/aws/sensors/sagemaker_unified_studio.py +1 -1
  42. airflow/providers/amazon/aws/sensors/ssm.py +33 -17
  43. airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py +3 -3
  44. airflow/providers/amazon/aws/transfers/base.py +5 -5
  45. airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +4 -4
  46. airflow/providers/amazon/aws/transfers/exasol_to_s3.py +1 -1
  47. airflow/providers/amazon/aws/transfers/ftp_to_s3.py +1 -1
  48. airflow/providers/amazon/aws/transfers/gcs_to_s3.py +48 -5
  49. airflow/providers/amazon/aws/transfers/glacier_to_gcs.py +1 -1
  50. airflow/providers/amazon/aws/transfers/google_api_to_s3.py +2 -5
  51. airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py +1 -1
  52. airflow/providers/amazon/aws/transfers/http_to_s3.py +1 -1
  53. airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py +1 -1
  54. airflow/providers/amazon/aws/transfers/local_to_s3.py +1 -1
  55. airflow/providers/amazon/aws/transfers/mongo_to_s3.py +1 -1
  56. airflow/providers/amazon/aws/transfers/redshift_to_s3.py +6 -6
  57. airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py +1 -1
  58. airflow/providers/amazon/aws/transfers/s3_to_ftp.py +1 -1
  59. airflow/providers/amazon/aws/transfers/s3_to_redshift.py +6 -6
  60. airflow/providers/amazon/aws/transfers/s3_to_sftp.py +1 -1
  61. airflow/providers/amazon/aws/transfers/s3_to_sql.py +1 -1
  62. airflow/providers/amazon/aws/transfers/salesforce_to_s3.py +1 -1
  63. airflow/providers/amazon/aws/transfers/sftp_to_s3.py +1 -1
  64. airflow/providers/amazon/aws/transfers/sql_to_s3.py +4 -5
  65. airflow/providers/amazon/aws/triggers/bedrock.py +1 -1
  66. airflow/providers/amazon/aws/triggers/s3.py +29 -2
  67. airflow/providers/amazon/aws/triggers/ssm.py +17 -1
  68. airflow/providers/amazon/aws/utils/connection_wrapper.py +2 -5
  69. airflow/providers/amazon/aws/utils/mixins.py +1 -1
  70. airflow/providers/amazon/aws/utils/waiter.py +2 -2
  71. airflow/providers/amazon/aws/waiters/emr.json +6 -6
  72. airflow/providers/amazon/get_provider_info.py +19 -1
  73. airflow/providers/amazon/version_compat.py +19 -16
  74. {apache_airflow_providers_amazon-9.14.0.dist-info → apache_airflow_providers_amazon-9.18.0rc2.dist-info}/METADATA +25 -19
  75. {apache_airflow_providers_amazon-9.14.0.dist-info → apache_airflow_providers_amazon-9.18.0rc2.dist-info}/RECORD +79 -76
  76. apache_airflow_providers_amazon-9.18.0rc2.dist-info/licenses/NOTICE +5 -0
  77. {apache_airflow_providers_amazon-9.14.0.dist-info → apache_airflow_providers_amazon-9.18.0rc2.dist-info}/WHEEL +0 -0
  78. {apache_airflow_providers_amazon-9.14.0.dist-info → apache_airflow_providers_amazon-9.18.0rc2.dist-info}/entry_points.txt +0 -0
  79. {airflow/providers/amazon → apache_airflow_providers_amazon-9.18.0rc2.dist-info/licenses}/LICENSE +0 -0
@@ -28,7 +28,8 @@ import os
28
28
  import re
29
29
  import shutil
30
30
  import time
31
- from collections.abc import AsyncIterator, Callable
31
+ import warnings
32
+ from collections.abc import AsyncIterator, Callable, Iterator
32
33
  from contextlib import suppress
33
34
  from copy import deepcopy
34
35
  from datetime import datetime
@@ -42,22 +43,20 @@ from urllib.parse import urlsplit
42
43
  from uuid import uuid4
43
44
 
44
45
  if TYPE_CHECKING:
46
+ from aiobotocore.client import AioBaseClient
45
47
  from mypy_boto3_s3.service_resource import (
46
48
  Bucket as S3Bucket,
47
49
  Object as S3ResourceObject,
48
50
  )
49
51
 
50
- from airflow.utils.types import ArgNotSet
51
-
52
- with suppress(ImportError):
53
- from aiobotocore.client import AioBaseClient
52
+ from airflow.providers.amazon.version_compat import ArgNotSet
54
53
 
55
54
 
56
55
  from asgiref.sync import sync_to_async
57
56
  from boto3.s3.transfer import S3Transfer, TransferConfig
58
57
  from botocore.exceptions import ClientError
59
58
 
60
- from airflow.exceptions import AirflowException, AirflowNotFoundException
59
+ from airflow.exceptions import AirflowException, AirflowNotFoundException, AirflowProviderDeprecationWarning
61
60
  from airflow.providers.amazon.aws.exceptions import S3HookUriParseFailure
62
61
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
63
62
  from airflow.providers.amazon.aws.utils.tags import format_tags
@@ -931,7 +930,38 @@ class S3Hook(AwsBaseHook):
931
930
  max_items: int | None = None,
932
931
  ) -> list:
933
932
  """
934
- List metadata objects in a bucket under prefix.
933
+ .. deprecated:: <9.13.0> Use `iter_file_metadata` instead.
934
+
935
+ This method `get_file_metadata` is deprecated. Calling this method will result in all matching keys
936
+ being loaded into a single list, and can often result in out-of-memory exceptions.
937
+ """
938
+ warnings.warn(
939
+ "This method `get_file_metadata` is deprecated. Calling this method will result in all matching "
940
+ "keys being loaded into a single list, and can often result in out-of-memory exceptions. "
941
+ "Instead, use `iter_file_metadata`.",
942
+ AirflowProviderDeprecationWarning,
943
+ stacklevel=2,
944
+ )
945
+
946
+ return list(
947
+ self.iter_file_metadata(
948
+ prefix=prefix,
949
+ bucket_name=bucket_name,
950
+ page_size=page_size,
951
+ max_items=max_items,
952
+ )
953
+ )
954
+
955
+ @provide_bucket_name
956
+ def iter_file_metadata(
957
+ self,
958
+ prefix: str,
959
+ bucket_name: str | None = None,
960
+ page_size: int | None = None,
961
+ max_items: int | None = None,
962
+ ) -> Iterator:
963
+ """
964
+ Yield metadata objects from a bucket under a prefix.
935
965
 
936
966
  .. seealso::
937
967
  - :external+boto3:py:class:`S3.Paginator.ListObjectsV2`
@@ -940,7 +970,7 @@ class S3Hook(AwsBaseHook):
940
970
  :param bucket_name: the name of the bucket
941
971
  :param page_size: pagination size
942
972
  :param max_items: maximum items to return
943
- :return: a list of metadata of objects
973
+ :return: an Iterator of metadata of objects
944
974
  """
945
975
  config = {
946
976
  "PageSize": page_size,
@@ -957,11 +987,9 @@ class S3Hook(AwsBaseHook):
957
987
  params["RequestPayer"] = "requester"
958
988
  response = paginator.paginate(**params)
959
989
 
960
- files = []
961
990
  for page in response:
962
991
  if "Contents" in page:
963
- files += page["Contents"]
964
- return files
992
+ yield from page["Contents"]
965
993
 
966
994
  @unify_bucket_name_and_key
967
995
  @provide_bucket_name
@@ -1751,6 +1779,8 @@ class S3Hook(AwsBaseHook):
1751
1779
  local_s3_objects = []
1752
1780
  s3_bucket = self.get_bucket(bucket_name)
1753
1781
  for obj in s3_bucket.objects.filter(Prefix=s3_prefix):
1782
+ if obj.key.endswith("/"):
1783
+ continue
1754
1784
  obj_path = Path(obj.key)
1755
1785
  local_target_path = local_dir.joinpath(obj_path.relative_to(s3_prefix))
1756
1786
  if not local_target_path.parent.exists():
@@ -26,7 +26,7 @@ from sagemaker_studio.sagemaker_studio_api import SageMakerStudioAPI
26
26
 
27
27
  from airflow.exceptions import AirflowException
28
28
  from airflow.providers.amazon.aws.utils.sagemaker_unified_studio import is_local_runner
29
- from airflow.providers.amazon.version_compat import BaseHook
29
+ from airflow.providers.common.compat.sdk import BaseHook
30
30
 
31
31
 
32
32
  class SageMakerNotebookHook(BaseHook):
@@ -42,6 +42,20 @@ class SesHook(AwsBaseHook):
42
42
  kwargs["client_type"] = "ses"
43
43
  super().__init__(*args, **kwargs)
44
44
 
45
+ @staticmethod
46
+ def _build_headers(
47
+ custom_headers: dict[str, Any] | None,
48
+ reply_to: str | None,
49
+ return_path: str | None,
50
+ ) -> dict[str, Any]:
51
+ _custom_headers = custom_headers or {}
52
+ if reply_to:
53
+ _custom_headers["Reply-To"] = reply_to
54
+ if return_path:
55
+ _custom_headers["Return-Path"] = return_path
56
+
57
+ return _custom_headers
58
+
45
59
  def send_email(
46
60
  self,
47
61
  mail_from: str,
@@ -70,23 +84,17 @@ class SesHook(AwsBaseHook):
70
84
  :param files: List of paths of files to be attached
71
85
  :param cc: List of email addresses to set as email's CC
72
86
  :param bcc: List of email addresses to set as email's BCC
73
- :param mime_subtype: Can be used to specify the sub-type of the message. Default = mixed
87
+ :param mime_subtype: Can be used to specify the subtype of the message. Default = mixed
74
88
  :param mime_charset: Email's charset. Default = UTF-8.
75
89
  :param return_path: The email address to which replies will be sent. By default, replies
76
90
  are sent to the original sender's email address.
77
91
  :param reply_to: The email address to which message bounces and complaints should be sent.
78
92
  "Return-Path" is sometimes called "envelope from", "envelope sender", or "MAIL FROM".
79
93
  :param custom_headers: Additional headers to add to the MIME message.
80
- No validations are run on these values and they should be able to be encoded.
94
+ No validations are run on these values, and they should be able to be encoded.
81
95
  :return: Response from Amazon SES service with unique message identifier.
82
96
  """
83
- ses_client = self.get_conn()
84
-
85
- custom_headers = custom_headers or {}
86
- if reply_to:
87
- custom_headers["Reply-To"] = reply_to
88
- if return_path:
89
- custom_headers["Return-Path"] = return_path
97
+ custom_headers = self._build_headers(custom_headers, reply_to, return_path)
90
98
 
91
99
  message, recipients = build_mime_message(
92
100
  mail_from=mail_from,
@@ -101,6 +109,64 @@ class SesHook(AwsBaseHook):
101
109
  custom_headers=custom_headers,
102
110
  )
103
111
 
104
- return ses_client.send_raw_email(
112
+ return self.conn.send_raw_email(
105
113
  Source=mail_from, Destinations=recipients, RawMessage={"Data": message.as_string()}
106
114
  )
115
+
116
+ async def asend_email(
117
+ self,
118
+ mail_from: str,
119
+ to: str | Iterable[str],
120
+ subject: str,
121
+ html_content: str,
122
+ files: list[str] | None = None,
123
+ cc: str | Iterable[str] | None = None,
124
+ bcc: str | Iterable[str] | None = None,
125
+ mime_subtype: str = "mixed",
126
+ mime_charset: str = "utf-8",
127
+ reply_to: str | None = None,
128
+ return_path: str | None = None,
129
+ custom_headers: dict[str, Any] | None = None,
130
+ ) -> dict:
131
+ """
132
+ Send email using Amazon Simple Email Service (async).
133
+
134
+ .. seealso::
135
+ - :external+boto3:py:meth:`SES.Client.send_raw_email`
136
+
137
+ :param mail_from: Email address to set as email's from
138
+ :param to: List of email addresses to set as email's to
139
+ :param subject: Email's subject
140
+ :param html_content: Content of email in HTML format
141
+ :param files: List of paths of files to be attached
142
+ :param cc: List of email addresses to set as email's CC
143
+ :param bcc: List of email addresses to set as email's BCC
144
+ :param mime_subtype: Can be used to specify the subtype of the message. Default = mixed
145
+ :param mime_charset: Email's charset. Default = UTF-8.
146
+ :param return_path: The email address to which replies will be sent. By default, replies
147
+ are sent to the original sender's email address.
148
+ :param reply_to: The email address to which message bounces and complaints should be sent.
149
+ "Return-Path" is sometimes called "envelope from", "envelope sender", or "MAIL FROM".
150
+ :param custom_headers: Additional headers to add to the MIME message.
151
+ No validations are run on these values, and they should be able to be encoded.
152
+ :return: Response from Amazon SES service with unique message identifier.
153
+ """
154
+ custom_headers = self._build_headers(custom_headers, reply_to, return_path)
155
+
156
+ message, recipients = build_mime_message(
157
+ mail_from=mail_from,
158
+ to=to,
159
+ subject=subject,
160
+ html_content=html_content,
161
+ files=files,
162
+ cc=cc,
163
+ bcc=bcc,
164
+ mime_subtype=mime_subtype,
165
+ mime_charset=mime_charset,
166
+ custom_headers=custom_headers,
167
+ )
168
+
169
+ async with await self.get_async_conn() as async_client:
170
+ return await async_client.send_raw_email(
171
+ Source=mail_from, Destinations=recipients, RawMessage={"Data": message.as_string()}
172
+ )
@@ -22,6 +22,7 @@ from __future__ import annotations
22
22
  import json
23
23
 
24
24
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
25
+ from airflow.utils.helpers import prune_dict
25
26
 
26
27
 
27
28
  def _get_message_attribute(o):
@@ -38,6 +39,33 @@ def _get_message_attribute(o):
38
39
  )
39
40
 
40
41
 
42
+ def _build_publish_kwargs(
43
+ target_arn: str,
44
+ message: str,
45
+ subject: str | None = None,
46
+ message_attributes: dict | None = None,
47
+ message_deduplication_id: str | None = None,
48
+ message_group_id: str | None = None,
49
+ ) -> dict[str, str | dict]:
50
+ publish_kwargs: dict[str, str | dict] = prune_dict(
51
+ {
52
+ "TargetArn": target_arn,
53
+ "MessageStructure": "json",
54
+ "Message": json.dumps({"default": message}),
55
+ "Subject": subject,
56
+ "MessageDeduplicationId": message_deduplication_id,
57
+ "MessageGroupId": message_group_id,
58
+ }
59
+ )
60
+
61
+ if message_attributes:
62
+ publish_kwargs["MessageAttributes"] = {
63
+ key: _get_message_attribute(val) for key, val in message_attributes.items()
64
+ }
65
+
66
+ return publish_kwargs
67
+
68
+
41
69
  class SnsHook(AwsBaseHook):
42
70
  """
43
71
  Interact with Amazon Simple Notification Service.
@@ -84,22 +112,50 @@ class SnsHook(AwsBaseHook):
84
112
  :param message_group_id: Tag that specifies that a message belongs to a specific message group.
85
113
  This parameter applies only to FIFO (first-in-first-out) topics.
86
114
  """
87
- publish_kwargs: dict[str, str | dict] = {
88
- "TargetArn": target_arn,
89
- "MessageStructure": "json",
90
- "Message": json.dumps({"default": message}),
91
- }
115
+ return self.get_conn().publish(
116
+ **_build_publish_kwargs(
117
+ target_arn, message, subject, message_attributes, message_deduplication_id, message_group_id
118
+ )
119
+ )
92
120
 
93
- # Construct args this way because boto3 distinguishes from missing args and those set to None
94
- if subject:
95
- publish_kwargs["Subject"] = subject
96
- if message_deduplication_id:
97
- publish_kwargs["MessageDeduplicationId"] = message_deduplication_id
98
- if message_group_id:
99
- publish_kwargs["MessageGroupId"] = message_group_id
100
- if message_attributes:
101
- publish_kwargs["MessageAttributes"] = {
102
- key: _get_message_attribute(val) for key, val in message_attributes.items()
103
- }
104
-
105
- return self.get_conn().publish(**publish_kwargs)
121
+ async def apublish_to_target(
122
+ self,
123
+ target_arn: str,
124
+ message: str,
125
+ subject: str | None = None,
126
+ message_attributes: dict | None = None,
127
+ message_deduplication_id: str | None = None,
128
+ message_group_id: str | None = None,
129
+ ):
130
+ """
131
+ Publish a message to a SNS topic or an endpoint.
132
+
133
+ .. seealso::
134
+ - :external+boto3:py:meth:`SNS.Client.publish`
135
+
136
+ :param target_arn: either a TopicArn or an EndpointArn
137
+ :param message: the default message you want to send
138
+ :param subject: subject of message
139
+ :param message_attributes: additional attributes to publish for message filtering. This should be
140
+ a flat dict; the DataType to be sent depends on the type of the value:
141
+
142
+ - bytes = Binary
143
+ - str = String
144
+ - int, float = Number
145
+ - iterable = String.Array
146
+ :param message_deduplication_id: Every message must have a unique message_deduplication_id.
147
+ This parameter applies only to FIFO (first-in-first-out) topics.
148
+ :param message_group_id: Tag that specifies that a message belongs to a specific message group.
149
+ This parameter applies only to FIFO (first-in-first-out) topics.
150
+ """
151
+ async with await self.get_async_conn() as async_client:
152
+ return await async_client.publish(
153
+ **_build_publish_kwargs(
154
+ target_arn,
155
+ message,
156
+ subject,
157
+ message_attributes,
158
+ message_deduplication_id,
159
+ message_group_id,
160
+ )
161
+ )
@@ -20,6 +20,7 @@
20
20
  from __future__ import annotations
21
21
 
22
22
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
23
+ from airflow.utils.helpers import prune_dict
23
24
 
24
25
 
25
26
  class SqsHook(AwsBaseHook):
@@ -52,6 +53,26 @@ class SqsHook(AwsBaseHook):
52
53
  """
53
54
  return self.get_conn().create_queue(QueueName=queue_name, Attributes=attributes or {})
54
55
 
56
+ @staticmethod
57
+ def _build_msg_params(
58
+ queue_url: str,
59
+ message_body: str,
60
+ delay_seconds: int = 0,
61
+ message_attributes: dict | None = None,
62
+ message_group_id: str | None = None,
63
+ message_deduplication_id: str | None = None,
64
+ ) -> dict:
65
+ return prune_dict(
66
+ {
67
+ "QueueUrl": queue_url,
68
+ "MessageBody": message_body,
69
+ "DelaySeconds": delay_seconds,
70
+ "MessageAttributes": message_attributes or {},
71
+ "MessageGroupId": message_group_id,
72
+ "MessageDeduplicationId": message_deduplication_id,
73
+ }
74
+ )
75
+
55
76
  def send_message(
56
77
  self,
57
78
  queue_url: str,
@@ -75,15 +96,47 @@ class SqsHook(AwsBaseHook):
75
96
  :param message_deduplication_id: This applies only to FIFO (first-in-first-out) queues.
76
97
  :return: dict with the information about the message sent
77
98
  """
78
- params = {
79
- "QueueUrl": queue_url,
80
- "MessageBody": message_body,
81
- "DelaySeconds": delay_seconds,
82
- "MessageAttributes": message_attributes or {},
83
- }
84
- if message_group_id:
85
- params["MessageGroupId"] = message_group_id
86
- if message_deduplication_id:
87
- params["MessageDeduplicationId"] = message_deduplication_id
88
-
99
+ params = self._build_msg_params(
100
+ queue_url=queue_url,
101
+ message_body=message_body,
102
+ delay_seconds=delay_seconds,
103
+ message_attributes=message_attributes,
104
+ message_group_id=message_group_id,
105
+ message_deduplication_id=message_deduplication_id,
106
+ )
89
107
  return self.get_conn().send_message(**params)
108
+
109
+ async def asend_message(
110
+ self,
111
+ queue_url: str,
112
+ message_body: str,
113
+ delay_seconds: int = 0,
114
+ message_attributes: dict | None = None,
115
+ message_group_id: str | None = None,
116
+ message_deduplication_id: str | None = None,
117
+ ) -> dict:
118
+ """
119
+ Send message to the queue (async).
120
+
121
+ .. seealso::
122
+ - :external+boto3:py:meth:`SQS.Client.send_message`
123
+
124
+ :param queue_url: queue url
125
+ :param message_body: the contents of the message
126
+ :param delay_seconds: seconds to delay the message
127
+ :param message_attributes: additional attributes for the message (default: None)
128
+ :param message_group_id: This applies only to FIFO (first-in-first-out) queues. (default: None)
129
+ :param message_deduplication_id: This applies only to FIFO (first-in-first-out) queues.
130
+ :return: dict with the information about the message sent
131
+ """
132
+ params = self._build_msg_params(
133
+ queue_url=queue_url,
134
+ message_body=message_body,
135
+ delay_seconds=delay_seconds,
136
+ message_attributes=message_attributes,
137
+ message_group_id=message_group_id,
138
+ message_deduplication_id=message_deduplication_id,
139
+ )
140
+
141
+ async with await self.get_async_conn() as async_conn:
142
+ return await async_conn.send_message(**params)
@@ -20,7 +20,7 @@ from __future__ import annotations
20
20
  from typing import TYPE_CHECKING
21
21
 
22
22
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
23
- from airflow.utils.types import NOTSET, ArgNotSet
23
+ from airflow.providers.amazon.version_compat import NOTSET, ArgNotSet, is_arg_set
24
24
 
25
25
  if TYPE_CHECKING:
26
26
  from airflow.sdk.execution_time.secrets_masker import mask_secret
@@ -38,7 +38,8 @@ class SsmHook(AwsBaseHook):
38
38
  """
39
39
  Interact with Amazon Systems Manager (SSM).
40
40
 
41
- Provide thin wrapper around :external+boto3:py:class:`boto3.client("ssm") <SSM.Client>`.
41
+ Provide thin wrapper around
42
+ :external+boto3:py:class:`boto3.client("ssm") <SSM.Client>`.
42
43
 
43
44
  Additional arguments (such as ``aws_conn_id``) may be specified and
44
45
  are passed down to the underlying AwsBaseHook.
@@ -53,7 +54,9 @@ class SsmHook(AwsBaseHook):
53
54
 
54
55
  def get_parameter_value(self, parameter: str, default: str | ArgNotSet = NOTSET) -> str:
55
56
  """
56
- Return the provided Parameter or an optional default; if it is encrypted, then decrypt and mask.
57
+ Return the provided Parameter or an optional default.
58
+
59
+ If it is encrypted, then decrypt and mask.
57
60
 
58
61
  .. seealso::
59
62
  - :external+boto3:py:meth:`SSM.Client.get_parameter`
@@ -68,6 +71,31 @@ class SsmHook(AwsBaseHook):
68
71
  mask_secret(value)
69
72
  return value
70
73
  except self.conn.exceptions.ParameterNotFound:
71
- if isinstance(default, ArgNotSet):
72
- raise
73
- return default
74
+ if is_arg_set(default):
75
+ return default
76
+ raise
77
+
78
+ def get_command_invocation(self, command_id: str, instance_id: str) -> dict:
79
+ """
80
+ Get the output of a command invocation for a specific instance.
81
+
82
+ .. seealso::
83
+ - :external+boto3:py:meth:`SSM.Client.get_command_invocation`
84
+
85
+ :param command_id: The ID of the command.
86
+ :param instance_id: The ID of the instance.
87
+ :return: The command invocation details including output.
88
+ """
89
+ return self.conn.get_command_invocation(CommandId=command_id, InstanceId=instance_id)
90
+
91
+ def list_command_invocations(self, command_id: str) -> dict:
92
+ """
93
+ List all command invocations for a given command ID.
94
+
95
+ .. seealso::
96
+ - :external+boto3:py:meth:`SSM.Client.list_command_invocations`
97
+
98
+ :param command_id: The ID of the command.
99
+ :return: Response from SSM list_command_invocations API.
100
+ """
101
+ return self.conn.list_command_invocations(CommandId=command_id)
@@ -18,8 +18,8 @@ from __future__ import annotations
18
18
 
19
19
  import json
20
20
 
21
- from airflow.exceptions import AirflowFailException
22
21
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
22
+ from airflow.providers.common.compat.sdk import AirflowFailException
23
23
 
24
24
 
25
25
  class StepFunctionHook(AwsBaseHook):
@@ -20,7 +20,7 @@ from __future__ import annotations
20
20
  from typing import TYPE_CHECKING, ClassVar
21
21
 
22
22
  from airflow.providers.amazon.aws.utils.suppress import return_on_error
23
- from airflow.providers.amazon.version_compat import BaseOperatorLink, XCom
23
+ from airflow.providers.common.compat.sdk import BaseOperatorLink, XCom
24
24
 
25
25
  if TYPE_CHECKING:
26
26
  from airflow.models import BaseOperator
@@ -0,0 +1,139 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from collections.abc import Iterable, Sequence
20
+ from functools import cached_property
21
+ from typing import Any
22
+
23
+ from airflow.providers.amazon.aws.hooks.ses import SesHook
24
+ from airflow.providers.amazon.version_compat import AIRFLOW_V_3_1_PLUS
25
+ from airflow.providers.common.compat.notifier import BaseNotifier
26
+ from airflow.utils.helpers import prune_dict
27
+
28
+
29
+ class SesNotifier(BaseNotifier):
30
+ """
31
+ Amazon Simple Email Service (SES) Notifier.
32
+
33
+ :param mail_from: Email address to set as email's from
34
+ :param to: List of email addresses to set as email's to
35
+ :param subject: Email's subject
36
+ :param html_content: Content of email in HTML format
37
+ :param files: List of paths of files to be attached
38
+ :param cc: List of email addresses to set as email's CC
39
+ :param bcc: List of email addresses to set as email's BCC
40
+ :param mime_subtype: Can be used to specify the subtype of the message. Default = mixed
41
+ :param mime_charset: Email's charset. Default = UTF-8.
42
+ :param return_path: The email address to which replies will be sent. By default, replies
43
+ are sent to the original sender's email address.
44
+ :param reply_to: The email address to which message bounces and complaints should be sent.
45
+ "Return-Path" is sometimes called "envelope from", "envelope sender", or "MAIL FROM".
46
+ :param custom_headers: Additional headers to add to the MIME message.
47
+ No validations are run on these values, and they should be able to be encoded.
48
+ """
49
+
50
+ template_fields: Sequence[str] = (
51
+ "aws_conn_id",
52
+ "region_name",
53
+ "mail_from",
54
+ "to",
55
+ "subject",
56
+ "html_content",
57
+ "files",
58
+ "cc",
59
+ "bcc",
60
+ "mime_subtype",
61
+ "mime_charset",
62
+ "reply_to",
63
+ "return_path",
64
+ "custom_headers",
65
+ )
66
+
67
+ def __init__(
68
+ self,
69
+ *,
70
+ aws_conn_id: str | None = SesHook.default_conn_name,
71
+ region_name: str | None = None,
72
+ mail_from: str,
73
+ to: str | Iterable[str],
74
+ subject: str,
75
+ html_content: str,
76
+ files: list[str] | None = None,
77
+ cc: str | Iterable[str] | None = None,
78
+ bcc: str | Iterable[str] | None = None,
79
+ mime_subtype: str = "mixed",
80
+ mime_charset: str = "utf-8",
81
+ reply_to: str | None = None,
82
+ return_path: str | None = None,
83
+ custom_headers: dict[str, Any] | None = None,
84
+ **kwargs,
85
+ ):
86
+ if AIRFLOW_V_3_1_PLUS:
87
+ # Support for passing context was added in 3.1.0
88
+ super().__init__(**kwargs)
89
+ else:
90
+ super().__init__()
91
+ self.aws_conn_id = aws_conn_id
92
+ self.region_name = region_name
93
+
94
+ self.mail_from = mail_from
95
+ self.to = to
96
+ self.subject = subject
97
+ self.html_content = html_content
98
+ self.files = files
99
+ self.cc = cc
100
+ self.bcc = bcc
101
+ self.mime_subtype = mime_subtype
102
+ self.mime_charset = mime_charset
103
+ self.reply_to = reply_to
104
+ self.return_path = return_path
105
+ self.custom_headers = custom_headers
106
+
107
+ def _build_send_kwargs(self):
108
+ return prune_dict(
109
+ {
110
+ "mail_from": self.mail_from,
111
+ "to": self.to,
112
+ "subject": self.subject,
113
+ "html_content": self.html_content,
114
+ "files": self.files,
115
+ "cc": self.cc,
116
+ "bcc": self.bcc,
117
+ "mime_subtype": self.mime_subtype,
118
+ "mime_charset": self.mime_charset,
119
+ "reply_to": self.reply_to,
120
+ "return_path": self.return_path,
121
+ "custom_headers": self.custom_headers,
122
+ }
123
+ )
124
+
125
+ @cached_property
126
+ def hook(self) -> SesHook:
127
+ """Amazon Simple Email Service (SES) Hook (cached)."""
128
+ return SesHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
129
+
130
+ def notify(self, context):
131
+ """Send email using Amazon Simple Email Service (SES)."""
132
+ self.hook.send_email(**self._build_send_kwargs())
133
+
134
+ async def async_notify(self, context):
135
+ """Send email using Amazon Simple Email Service (SES) (async)."""
136
+ await self.hook.asend_email(**self._build_send_kwargs())
137
+
138
+
139
+ send_ses_notification = SesNotifier