apache-airflow-providers-amazon 9.14.0rc1__py3-none-any.whl → 9.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "9.14.0"
32
+ __version__ = "9.15.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -338,6 +338,37 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
338
338
  ]
339
339
  return self.avp_facade.batch_is_authorized(requests=facade_requests, user=user)
340
340
 
341
+ def filter_authorized_connections(
342
+ self,
343
+ *,
344
+ conn_ids: set[str],
345
+ user: AwsAuthManagerUser,
346
+ method: ResourceMethod = "GET",
347
+ team_name: str | None = None,
348
+ ) -> set[str]:
349
+ requests: dict[str, dict[ResourceMethod, IsAuthorizedRequest]] = defaultdict(dict)
350
+ requests_list: list[IsAuthorizedRequest] = []
351
+ for conn_id in conn_ids:
352
+ request: IsAuthorizedRequest = {
353
+ "method": method,
354
+ "entity_type": AvpEntities.CONNECTION,
355
+ "entity_id": conn_id,
356
+ }
357
+ requests[conn_id][method] = request
358
+ requests_list.append(request)
359
+
360
+ batch_is_authorized_results = self.avp_facade.get_batch_is_authorized_results(
361
+ requests=requests_list, user=user
362
+ )
363
+
364
+ return {
365
+ conn_id
366
+ for conn_id in conn_ids
367
+ if self._is_authorized_from_batch_response(
368
+ batch_is_authorized_results, requests[conn_id][method], user
369
+ )
370
+ }
371
+
341
372
  def filter_authorized_dag_ids(
342
373
  self,
343
374
  *,
@@ -361,13 +392,75 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
361
392
  requests=requests_list, user=user
362
393
  )
363
394
 
364
- def _has_access_to_dag(request: IsAuthorizedRequest):
365
- result = self.avp_facade.get_batch_is_authorized_single_result(
366
- batch_is_authorized_results=batch_is_authorized_results, request=request, user=user
395
+ return {
396
+ dag_id
397
+ for dag_id in dag_ids
398
+ if self._is_authorized_from_batch_response(
399
+ batch_is_authorized_results, requests[dag_id][method], user
367
400
  )
368
- return result["decision"] == "ALLOW"
401
+ }
369
402
 
370
- return {dag_id for dag_id in dag_ids if _has_access_to_dag(requests[dag_id][method])}
403
+ def filter_authorized_pools(
404
+ self,
405
+ *,
406
+ pool_names: set[str],
407
+ user: AwsAuthManagerUser,
408
+ method: ResourceMethod = "GET",
409
+ team_name: str | None = None,
410
+ ) -> set[str]:
411
+ requests: dict[str, dict[ResourceMethod, IsAuthorizedRequest]] = defaultdict(dict)
412
+ requests_list: list[IsAuthorizedRequest] = []
413
+ for pool_name in pool_names:
414
+ request: IsAuthorizedRequest = {
415
+ "method": method,
416
+ "entity_type": AvpEntities.POOL,
417
+ "entity_id": pool_name,
418
+ }
419
+ requests[pool_name][method] = request
420
+ requests_list.append(request)
421
+
422
+ batch_is_authorized_results = self.avp_facade.get_batch_is_authorized_results(
423
+ requests=requests_list, user=user
424
+ )
425
+
426
+ return {
427
+ pool_name
428
+ for pool_name in pool_names
429
+ if self._is_authorized_from_batch_response(
430
+ batch_is_authorized_results, requests[pool_name][method], user
431
+ )
432
+ }
433
+
434
+ def filter_authorized_variables(
435
+ self,
436
+ *,
437
+ variable_keys: set[str],
438
+ user: AwsAuthManagerUser,
439
+ method: ResourceMethod = "GET",
440
+ team_name: str | None = None,
441
+ ) -> set[str]:
442
+ requests: dict[str, dict[ResourceMethod, IsAuthorizedRequest]] = defaultdict(dict)
443
+ requests_list: list[IsAuthorizedRequest] = []
444
+ for variable_key in variable_keys:
445
+ request: IsAuthorizedRequest = {
446
+ "method": method,
447
+ "entity_type": AvpEntities.VARIABLE,
448
+ "entity_id": variable_key,
449
+ }
450
+ requests[variable_key][method] = request
451
+ requests_list.append(request)
452
+
453
+ batch_is_authorized_results = self.avp_facade.get_batch_is_authorized_results(
454
+ requests=requests_list, user=user
455
+ )
456
+
457
+ return {
458
+ variable_key
459
+ for variable_key in variable_keys
460
+ if self._is_authorized_from_batch_response(
461
+ batch_is_authorized_results, requests[variable_key][method], user
462
+ )
463
+ }
371
464
 
372
465
  def get_url_login(self, **kwargs) -> str:
373
466
  return urljoin(self.apiserver_endpoint, f"{AUTH_MANAGER_FASTAPI_APP_PREFIX}/login")
@@ -406,6 +499,14 @@ class AwsAuthManager(BaseAuthManager[AwsAuthManagerUser]):
406
499
  "entity_id": menu_item_text,
407
500
  }
408
501
 
502
+ def _is_authorized_from_batch_response(
503
+ self, batch_is_authorized_results: list[dict], request: IsAuthorizedRequest, user: AwsAuthManagerUser
504
+ ):
505
+ result = self.avp_facade.get_batch_is_authorized_single_result(
506
+ batch_is_authorized_results=batch_is_authorized_results, request=request, user=user
507
+ )
508
+ return result["decision"] == "ALLOW"
509
+
409
510
  def _check_avp_schema_version(self):
410
511
  if not self.avp_facade.is_policy_store_schema_up_to_date():
411
512
  self.log.warning(
@@ -49,7 +49,7 @@ try:
49
49
  except ImportError:
50
50
  from airflow.utils import timezone # type: ignore[attr-defined,no-redef]
51
51
 
52
- from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
52
+ from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
53
53
 
54
54
  if TYPE_CHECKING:
55
55
  from sqlalchemy.orm import Session
@@ -28,7 +28,8 @@ import os
28
28
  import re
29
29
  import shutil
30
30
  import time
31
- from collections.abc import AsyncIterator, Callable
31
+ import warnings
32
+ from collections.abc import AsyncIterator, Callable, Iterator
32
33
  from contextlib import suppress
33
34
  from copy import deepcopy
34
35
  from datetime import datetime
@@ -57,7 +58,7 @@ from asgiref.sync import sync_to_async
57
58
  from boto3.s3.transfer import S3Transfer, TransferConfig
58
59
  from botocore.exceptions import ClientError
59
60
 
60
- from airflow.exceptions import AirflowException, AirflowNotFoundException
61
+ from airflow.exceptions import AirflowException, AirflowNotFoundException, AirflowProviderDeprecationWarning
61
62
  from airflow.providers.amazon.aws.exceptions import S3HookUriParseFailure
62
63
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
63
64
  from airflow.providers.amazon.aws.utils.tags import format_tags
@@ -931,7 +932,31 @@ class S3Hook(AwsBaseHook):
931
932
  max_items: int | None = None,
932
933
  ) -> list:
933
934
  """
934
- List metadata objects in a bucket under prefix.
935
+ .. deprecated:: <9.13.0> Use `iter_file_metadata` instead.
936
+
937
+ This method `get_file_metadata` is deprecated. Calling this method will result in all matching keys
938
+ being loaded into a single list, and can often result in out-of-memory exceptions.
939
+ """
940
+ warnings.warn(
941
+ "This method `get_file_metadata` is deprecated. Calling this method will result in all matching "
942
+ "keys being loaded into a single list, and can often result in out-of-memory exceptions. "
943
+ "Instead, use `iter_file_metadata`.",
944
+ AirflowProviderDeprecationWarning,
945
+ stacklevel=2,
946
+ )
947
+
948
+ return list(self.iter_file_metadata(prefix=prefix, page_size=page_size, max_items=max_items))
949
+
950
+ @provide_bucket_name
951
+ def iter_file_metadata(
952
+ self,
953
+ prefix: str,
954
+ bucket_name: str | None = None,
955
+ page_size: int | None = None,
956
+ max_items: int | None = None,
957
+ ) -> Iterator:
958
+ """
959
+ Yield metadata objects from a bucket under a prefix.
935
960
 
936
961
  .. seealso::
937
962
  - :external+boto3:py:class:`S3.Paginator.ListObjectsV2`
@@ -940,7 +965,7 @@ class S3Hook(AwsBaseHook):
940
965
  :param bucket_name: the name of the bucket
941
966
  :param page_size: pagination size
942
967
  :param max_items: maximum items to return
943
- :return: a list of metadata of objects
968
+ :return: an Iterator of metadata of objects
944
969
  """
945
970
  config = {
946
971
  "PageSize": page_size,
@@ -957,11 +982,9 @@ class S3Hook(AwsBaseHook):
957
982
  params["RequestPayer"] = "requester"
958
983
  response = paginator.paginate(**params)
959
984
 
960
- files = []
961
985
  for page in response:
962
986
  if "Contents" in page:
963
- files += page["Contents"]
964
- return files
987
+ yield from page["Contents"]
965
988
 
966
989
  @unify_bucket_name_and_key
967
990
  @provide_bucket_name
@@ -42,6 +42,20 @@ class SesHook(AwsBaseHook):
42
42
  kwargs["client_type"] = "ses"
43
43
  super().__init__(*args, **kwargs)
44
44
 
45
+ @staticmethod
46
+ def _build_headers(
47
+ custom_headers: dict[str, Any] | None,
48
+ reply_to: str | None,
49
+ return_path: str | None,
50
+ ) -> dict[str, Any]:
51
+ _custom_headers = custom_headers or {}
52
+ if reply_to:
53
+ _custom_headers["Reply-To"] = reply_to
54
+ if return_path:
55
+ _custom_headers["Return-Path"] = return_path
56
+
57
+ return _custom_headers
58
+
45
59
  def send_email(
46
60
  self,
47
61
  mail_from: str,
@@ -70,23 +84,17 @@ class SesHook(AwsBaseHook):
70
84
  :param files: List of paths of files to be attached
71
85
  :param cc: List of email addresses to set as email's CC
72
86
  :param bcc: List of email addresses to set as email's BCC
73
- :param mime_subtype: Can be used to specify the sub-type of the message. Default = mixed
87
+ :param mime_subtype: Can be used to specify the subtype of the message. Default = mixed
74
88
  :param mime_charset: Email's charset. Default = UTF-8.
75
89
  :param return_path: The email address to which replies will be sent. By default, replies
76
90
  are sent to the original sender's email address.
77
91
  :param reply_to: The email address to which message bounces and complaints should be sent.
78
92
  "Return-Path" is sometimes called "envelope from", "envelope sender", or "MAIL FROM".
79
93
  :param custom_headers: Additional headers to add to the MIME message.
80
- No validations are run on these values and they should be able to be encoded.
94
+ No validations are run on these values, and they should be able to be encoded.
81
95
  :return: Response from Amazon SES service with unique message identifier.
82
96
  """
83
- ses_client = self.get_conn()
84
-
85
- custom_headers = custom_headers or {}
86
- if reply_to:
87
- custom_headers["Reply-To"] = reply_to
88
- if return_path:
89
- custom_headers["Return-Path"] = return_path
97
+ custom_headers = self._build_headers(custom_headers, reply_to, return_path)
90
98
 
91
99
  message, recipients = build_mime_message(
92
100
  mail_from=mail_from,
@@ -101,6 +109,64 @@ class SesHook(AwsBaseHook):
101
109
  custom_headers=custom_headers,
102
110
  )
103
111
 
104
- return ses_client.send_raw_email(
112
+ return self.conn.send_raw_email(
105
113
  Source=mail_from, Destinations=recipients, RawMessage={"Data": message.as_string()}
106
114
  )
115
+
116
+ async def asend_email(
117
+ self,
118
+ mail_from: str,
119
+ to: str | Iterable[str],
120
+ subject: str,
121
+ html_content: str,
122
+ files: list[str] | None = None,
123
+ cc: str | Iterable[str] | None = None,
124
+ bcc: str | Iterable[str] | None = None,
125
+ mime_subtype: str = "mixed",
126
+ mime_charset: str = "utf-8",
127
+ reply_to: str | None = None,
128
+ return_path: str | None = None,
129
+ custom_headers: dict[str, Any] | None = None,
130
+ ) -> dict:
131
+ """
132
+ Send email using Amazon Simple Email Service (async).
133
+
134
+ .. seealso::
135
+ - :external+boto3:py:meth:`SES.Client.send_raw_email`
136
+
137
+ :param mail_from: Email address to set as email's from
138
+ :param to: List of email addresses to set as email's to
139
+ :param subject: Email's subject
140
+ :param html_content: Content of email in HTML format
141
+ :param files: List of paths of files to be attached
142
+ :param cc: List of email addresses to set as email's CC
143
+ :param bcc: List of email addresses to set as email's BCC
144
+ :param mime_subtype: Can be used to specify the subtype of the message. Default = mixed
145
+ :param mime_charset: Email's charset. Default = UTF-8.
146
+ :param return_path: The email address to which replies will be sent. By default, replies
147
+ are sent to the original sender's email address.
148
+ :param reply_to: The email address to which message bounces and complaints should be sent.
149
+ "Return-Path" is sometimes called "envelope from", "envelope sender", or "MAIL FROM".
150
+ :param custom_headers: Additional headers to add to the MIME message.
151
+ No validations are run on these values, and they should be able to be encoded.
152
+ :return: Response from Amazon SES service with unique message identifier.
153
+ """
154
+ custom_headers = self._build_headers(custom_headers, reply_to, return_path)
155
+
156
+ message, recipients = build_mime_message(
157
+ mail_from=mail_from,
158
+ to=to,
159
+ subject=subject,
160
+ html_content=html_content,
161
+ files=files,
162
+ cc=cc,
163
+ bcc=bcc,
164
+ mime_subtype=mime_subtype,
165
+ mime_charset=mime_charset,
166
+ custom_headers=custom_headers,
167
+ )
168
+
169
+ async with await self.get_async_conn() as async_client:
170
+ return await async_client.send_raw_email(
171
+ Source=mail_from, Destinations=recipients, RawMessage={"Data": message.as_string()}
172
+ )
@@ -22,6 +22,7 @@ from __future__ import annotations
22
22
  import json
23
23
 
24
24
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
25
+ from airflow.utils.helpers import prune_dict
25
26
 
26
27
 
27
28
  def _get_message_attribute(o):
@@ -38,6 +39,33 @@ def _get_message_attribute(o):
38
39
  )
39
40
 
40
41
 
42
+ def _build_publish_kwargs(
43
+ target_arn: str,
44
+ message: str,
45
+ subject: str | None = None,
46
+ message_attributes: dict | None = None,
47
+ message_deduplication_id: str | None = None,
48
+ message_group_id: str | None = None,
49
+ ) -> dict[str, str | dict]:
50
+ publish_kwargs: dict[str, str | dict] = prune_dict(
51
+ {
52
+ "TargetArn": target_arn,
53
+ "MessageStructure": "json",
54
+ "Message": json.dumps({"default": message}),
55
+ "Subject": subject,
56
+ "MessageDeduplicationId": message_deduplication_id,
57
+ "MessageGroupId": message_group_id,
58
+ }
59
+ )
60
+
61
+ if message_attributes:
62
+ publish_kwargs["MessageAttributes"] = {
63
+ key: _get_message_attribute(val) for key, val in message_attributes.items()
64
+ }
65
+
66
+ return publish_kwargs
67
+
68
+
41
69
  class SnsHook(AwsBaseHook):
42
70
  """
43
71
  Interact with Amazon Simple Notification Service.
@@ -84,22 +112,50 @@ class SnsHook(AwsBaseHook):
84
112
  :param message_group_id: Tag that specifies that a message belongs to a specific message group.
85
113
  This parameter applies only to FIFO (first-in-first-out) topics.
86
114
  """
87
- publish_kwargs: dict[str, str | dict] = {
88
- "TargetArn": target_arn,
89
- "MessageStructure": "json",
90
- "Message": json.dumps({"default": message}),
91
- }
115
+ return self.get_conn().publish(
116
+ **_build_publish_kwargs(
117
+ target_arn, message, subject, message_attributes, message_deduplication_id, message_group_id
118
+ )
119
+ )
92
120
 
93
- # Construct args this way because boto3 distinguishes from missing args and those set to None
94
- if subject:
95
- publish_kwargs["Subject"] = subject
96
- if message_deduplication_id:
97
- publish_kwargs["MessageDeduplicationId"] = message_deduplication_id
98
- if message_group_id:
99
- publish_kwargs["MessageGroupId"] = message_group_id
100
- if message_attributes:
101
- publish_kwargs["MessageAttributes"] = {
102
- key: _get_message_attribute(val) for key, val in message_attributes.items()
103
- }
104
-
105
- return self.get_conn().publish(**publish_kwargs)
121
+ async def apublish_to_target(
122
+ self,
123
+ target_arn: str,
124
+ message: str,
125
+ subject: str | None = None,
126
+ message_attributes: dict | None = None,
127
+ message_deduplication_id: str | None = None,
128
+ message_group_id: str | None = None,
129
+ ):
130
+ """
131
+ Publish a message to a SNS topic or an endpoint.
132
+
133
+ .. seealso::
134
+ - :external+boto3:py:meth:`SNS.Client.publish`
135
+
136
+ :param target_arn: either a TopicArn or an EndpointArn
137
+ :param message: the default message you want to send
138
+ :param subject: subject of message
139
+ :param message_attributes: additional attributes to publish for message filtering. This should be
140
+ a flat dict; the DataType to be sent depends on the type of the value:
141
+
142
+ - bytes = Binary
143
+ - str = String
144
+ - int, float = Number
145
+ - iterable = String.Array
146
+ :param message_deduplication_id: Every message must have a unique message_deduplication_id.
147
+ This parameter applies only to FIFO (first-in-first-out) topics.
148
+ :param message_group_id: Tag that specifies that a message belongs to a specific message group.
149
+ This parameter applies only to FIFO (first-in-first-out) topics.
150
+ """
151
+ async with await self.get_async_conn() as async_client:
152
+ return await async_client.publish(
153
+ **_build_publish_kwargs(
154
+ target_arn,
155
+ message,
156
+ subject,
157
+ message_attributes,
158
+ message_deduplication_id,
159
+ message_group_id,
160
+ )
161
+ )
@@ -20,6 +20,7 @@
20
20
  from __future__ import annotations
21
21
 
22
22
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
23
+ from airflow.utils.helpers import prune_dict
23
24
 
24
25
 
25
26
  class SqsHook(AwsBaseHook):
@@ -52,6 +53,26 @@ class SqsHook(AwsBaseHook):
52
53
  """
53
54
  return self.get_conn().create_queue(QueueName=queue_name, Attributes=attributes or {})
54
55
 
56
+ @staticmethod
57
+ def _build_msg_params(
58
+ queue_url: str,
59
+ message_body: str,
60
+ delay_seconds: int = 0,
61
+ message_attributes: dict | None = None,
62
+ message_group_id: str | None = None,
63
+ message_deduplication_id: str | None = None,
64
+ ) -> dict:
65
+ return prune_dict(
66
+ {
67
+ "QueueUrl": queue_url,
68
+ "MessageBody": message_body,
69
+ "DelaySeconds": delay_seconds,
70
+ "MessageAttributes": message_attributes or {},
71
+ "MessageGroupId": message_group_id,
72
+ "MessageDeduplicationId": message_deduplication_id,
73
+ }
74
+ )
75
+
55
76
  def send_message(
56
77
  self,
57
78
  queue_url: str,
@@ -75,15 +96,47 @@ class SqsHook(AwsBaseHook):
75
96
  :param message_deduplication_id: This applies only to FIFO (first-in-first-out) queues.
76
97
  :return: dict with the information about the message sent
77
98
  """
78
- params = {
79
- "QueueUrl": queue_url,
80
- "MessageBody": message_body,
81
- "DelaySeconds": delay_seconds,
82
- "MessageAttributes": message_attributes or {},
83
- }
84
- if message_group_id:
85
- params["MessageGroupId"] = message_group_id
86
- if message_deduplication_id:
87
- params["MessageDeduplicationId"] = message_deduplication_id
88
-
99
+ params = self._build_msg_params(
100
+ queue_url=queue_url,
101
+ message_body=message_body,
102
+ delay_seconds=delay_seconds,
103
+ message_attributes=message_attributes,
104
+ message_group_id=message_group_id,
105
+ message_deduplication_id=message_deduplication_id,
106
+ )
89
107
  return self.get_conn().send_message(**params)
108
+
109
+ async def asend_message(
110
+ self,
111
+ queue_url: str,
112
+ message_body: str,
113
+ delay_seconds: int = 0,
114
+ message_attributes: dict | None = None,
115
+ message_group_id: str | None = None,
116
+ message_deduplication_id: str | None = None,
117
+ ) -> dict:
118
+ """
119
+ Send message to the queue (async).
120
+
121
+ .. seealso::
122
+ - :external+boto3:py:meth:`SQS.Client.send_message`
123
+
124
+ :param queue_url: queue url
125
+ :param message_body: the contents of the message
126
+ :param delay_seconds: seconds to delay the message
127
+ :param message_attributes: additional attributes for the message (default: None)
128
+ :param message_group_id: This applies only to FIFO (first-in-first-out) queues. (default: None)
129
+ :param message_deduplication_id: This applies only to FIFO (first-in-first-out) queues.
130
+ :return: dict with the information about the message sent
131
+ """
132
+ params = self._build_msg_params(
133
+ queue_url=queue_url,
134
+ message_body=message_body,
135
+ delay_seconds=delay_seconds,
136
+ message_attributes=message_attributes,
137
+ message_group_id=message_group_id,
138
+ message_deduplication_id=message_deduplication_id,
139
+ )
140
+
141
+ async with await self.get_async_conn() as async_conn:
142
+ return await async_conn.send_message(**params)
@@ -0,0 +1,139 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from collections.abc import Iterable, Sequence
20
+ from functools import cached_property
21
+ from typing import Any
22
+
23
+ from airflow.providers.amazon.aws.hooks.ses import SesHook
24
+ from airflow.providers.amazon.version_compat import AIRFLOW_V_3_1_PLUS
25
+ from airflow.providers.common.compat.notifier import BaseNotifier
26
+ from airflow.utils.helpers import prune_dict
27
+
28
+
29
+ class SesNotifier(BaseNotifier):
30
+ """
31
+ Amazon Simple Email Service (SES) Notifier.
32
+
33
+ :param mail_from: Email address to set as email's from
34
+ :param to: List of email addresses to set as email's to
35
+ :param subject: Email's subject
36
+ :param html_content: Content of email in HTML format
37
+ :param files: List of paths of files to be attached
38
+ :param cc: List of email addresses to set as email's CC
39
+ :param bcc: List of email addresses to set as email's BCC
40
+ :param mime_subtype: Can be used to specify the subtype of the message. Default = mixed
41
+ :param mime_charset: Email's charset. Default = UTF-8.
42
+ :param return_path: The email address to which replies will be sent. By default, replies
43
+ are sent to the original sender's email address.
44
+ :param reply_to: The email address to which message bounces and complaints should be sent.
45
+ "Return-Path" is sometimes called "envelope from", "envelope sender", or "MAIL FROM".
46
+ :param custom_headers: Additional headers to add to the MIME message.
47
+ No validations are run on these values, and they should be able to be encoded.
48
+ """
49
+
50
+ template_fields: Sequence[str] = (
51
+ "aws_conn_id",
52
+ "region_name",
53
+ "mail_from",
54
+ "to",
55
+ "subject",
56
+ "html_content",
57
+ "files",
58
+ "cc",
59
+ "bcc",
60
+ "mime_subtype",
61
+ "mime_charset",
62
+ "reply_to",
63
+ "return_path",
64
+ "custom_headers",
65
+ )
66
+
67
+ def __init__(
68
+ self,
69
+ *,
70
+ aws_conn_id: str | None = SesHook.default_conn_name,
71
+ region_name: str | None = None,
72
+ mail_from: str,
73
+ to: str | Iterable[str],
74
+ subject: str,
75
+ html_content: str,
76
+ files: list[str] | None = None,
77
+ cc: str | Iterable[str] | None = None,
78
+ bcc: str | Iterable[str] | None = None,
79
+ mime_subtype: str = "mixed",
80
+ mime_charset: str = "utf-8",
81
+ reply_to: str | None = None,
82
+ return_path: str | None = None,
83
+ custom_headers: dict[str, Any] | None = None,
84
+ **kwargs,
85
+ ):
86
+ if AIRFLOW_V_3_1_PLUS:
87
+ # Support for passing context was added in 3.1.0
88
+ super().__init__(**kwargs)
89
+ else:
90
+ super().__init__()
91
+ self.aws_conn_id = aws_conn_id
92
+ self.region_name = region_name
93
+
94
+ self.mail_from = mail_from
95
+ self.to = to
96
+ self.subject = subject
97
+ self.html_content = html_content
98
+ self.files = files
99
+ self.cc = cc
100
+ self.bcc = bcc
101
+ self.mime_subtype = mime_subtype
102
+ self.mime_charset = mime_charset
103
+ self.reply_to = reply_to
104
+ self.return_path = return_path
105
+ self.custom_headers = custom_headers
106
+
107
+ def _build_send_kwargs(self):
108
+ return prune_dict(
109
+ {
110
+ "mail_from": self.mail_from,
111
+ "to": self.to,
112
+ "subject": self.subject,
113
+ "html_content": self.html_content,
114
+ "files": self.files,
115
+ "cc": self.cc,
116
+ "bcc": self.bcc,
117
+ "mime_subtype": self.mime_subtype,
118
+ "mime_charset": self.mime_charset,
119
+ "reply_to": self.reply_to,
120
+ "return_path": self.return_path,
121
+ "custom_headers": self.custom_headers,
122
+ }
123
+ )
124
+
125
+ @cached_property
126
+ def hook(self) -> SesHook:
127
+ """Amazon Simple Email Service (SES) Hook (cached)."""
128
+ return SesHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
129
+
130
+ def notify(self, context):
131
+ """Send email using Amazon Simple Email Service (SES)."""
132
+ self.hook.send_email(**self._build_send_kwargs())
133
+
134
+ async def async_notify(self, context):
135
+ """Send email using Amazon Simple Email Service (SES) (async)."""
136
+ await self.hook.asend_email(**self._build_send_kwargs())
137
+
138
+
139
+ send_ses_notification = SesNotifier
@@ -21,6 +21,7 @@ from collections.abc import Sequence
21
21
  from functools import cached_property
22
22
 
23
23
  from airflow.providers.amazon.aws.hooks.sns import SnsHook
24
+ from airflow.providers.amazon.version_compat import AIRFLOW_V_3_1_PLUS
24
25
  from airflow.providers.common.compat.notifier import BaseNotifier
25
26
 
26
27
 
@@ -60,8 +61,13 @@ class SnsNotifier(BaseNotifier):
60
61
  subject: str | None = None,
61
62
  message_attributes: dict | None = None,
62
63
  region_name: str | None = None,
64
+ **kwargs,
63
65
  ):
64
- super().__init__()
66
+ if AIRFLOW_V_3_1_PLUS:
67
+ # Support for passing context was added in 3.1.0
68
+ super().__init__(**kwargs)
69
+ else:
70
+ super().__init__()
65
71
  self.aws_conn_id = aws_conn_id
66
72
  self.region_name = region_name
67
73
  self.target_arn = target_arn
@@ -83,5 +89,14 @@ class SnsNotifier(BaseNotifier):
83
89
  message_attributes=self.message_attributes,
84
90
  )
85
91
 
92
+ async def async_notify(self, context):
93
+ """Publish the notification message to Amazon SNS (async)."""
94
+ await self.hook.apublish_to_target(
95
+ target_arn=self.target_arn,
96
+ message=self.message,
97
+ subject=self.subject,
98
+ message_attributes=self.message_attributes,
99
+ )
100
+
86
101
 
87
102
  send_sns_notification = SnsNotifier
@@ -21,6 +21,7 @@ from collections.abc import Sequence
21
21
  from functools import cached_property
22
22
 
23
23
  from airflow.providers.amazon.aws.hooks.sqs import SqsHook
24
+ from airflow.providers.amazon.version_compat import AIRFLOW_V_3_1_PLUS
24
25
  from airflow.providers.common.compat.notifier import BaseNotifier
25
26
 
26
27
 
@@ -64,8 +65,13 @@ class SqsNotifier(BaseNotifier):
64
65
  message_group_id: str | None = None,
65
66
  delay_seconds: int = 0,
66
67
  region_name: str | None = None,
68
+ **kwargs,
67
69
  ):
68
- super().__init__()
70
+ if AIRFLOW_V_3_1_PLUS:
71
+ # Support for passing context was added in 3.1.0
72
+ super().__init__(**kwargs)
73
+ else:
74
+ super().__init__()
69
75
  self.aws_conn_id = aws_conn_id
70
76
  self.region_name = region_name
71
77
  self.queue_url = queue_url
@@ -89,5 +95,15 @@ class SqsNotifier(BaseNotifier):
89
95
  message_group_id=self.message_group_id,
90
96
  )
91
97
 
98
+ async def async_notify(self, context):
99
+ """Publish the notification message to Amazon SQS queue (async)."""
100
+ await self.hook.asend_message(
101
+ queue_url=self.queue_url,
102
+ message_body=self.message_body,
103
+ delay_seconds=self.delay_seconds,
104
+ message_attributes=self.message_attributes,
105
+ message_group_id=self.message_group_id,
106
+ )
107
+
92
108
 
93
109
  send_sqs_notification = SqsNotifier
@@ -748,30 +748,32 @@ class EmrCreateJobFlowOperator(AwsBaseOperator[EmrHook]):
748
748
  job_flow_id=self._job_flow_id,
749
749
  log_uri=get_log_uri(emr_client=self.hook.conn, job_flow_id=self._job_flow_id),
750
750
  )
751
- if self.deferrable:
752
- self.defer(
753
- trigger=EmrCreateJobFlowTrigger(
754
- job_flow_id=self._job_flow_id,
755
- aws_conn_id=self.aws_conn_id,
756
- waiter_delay=self.waiter_delay,
757
- waiter_max_attempts=self.waiter_max_attempts,
758
- ),
759
- method_name="execute_complete",
760
- # timeout is set to ensure that if a trigger dies, the timeout does not restart
761
- # 60 seconds is added to allow the trigger to exit gracefully (i.e. yield TriggerEvent)
762
- timeout=timedelta(seconds=self.waiter_max_attempts * self.waiter_delay + 60),
763
- )
764
751
  if self.wait_policy:
765
752
  waiter_name = WAITER_POLICY_NAME_MAPPING[self.wait_policy]
766
- self.hook.get_waiter(waiter_name).wait(
767
- ClusterId=self._job_flow_id,
768
- WaiterConfig=prune_dict(
769
- {
770
- "Delay": self.waiter_delay,
771
- "MaxAttempts": self.waiter_max_attempts,
772
- }
773
- ),
774
- )
753
+
754
+ if self.deferrable:
755
+ self.defer(
756
+ trigger=EmrCreateJobFlowTrigger(
757
+ job_flow_id=self._job_flow_id,
758
+ aws_conn_id=self.aws_conn_id,
759
+ waiter_delay=self.waiter_delay,
760
+ waiter_max_attempts=self.waiter_max_attempts,
761
+ ),
762
+ method_name="execute_complete",
763
+ # timeout is set to ensure that if a trigger dies, the timeout does not restart
764
+ # 60 seconds is added to allow the trigger to exit gracefully (i.e. yield TriggerEvent)
765
+ timeout=timedelta(seconds=self.waiter_max_attempts * self.waiter_delay + 60),
766
+ )
767
+ else:
768
+ self.hook.get_waiter(waiter_name).wait(
769
+ ClusterId=self._job_flow_id,
770
+ WaiterConfig=prune_dict(
771
+ {
772
+ "Delay": self.waiter_delay,
773
+ "MaxAttempts": self.waiter_max_attempts,
774
+ }
775
+ ),
776
+ )
775
777
  return self._job_flow_id
776
778
 
777
779
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str:
@@ -122,8 +122,19 @@ class S3KeySensor(AwsBaseSensor[S3Hook]):
122
122
  """
123
123
  if self.wildcard_match:
124
124
  prefix = re.split(r"[\[*?]", key, 1)[0]
125
- keys = self.hook.get_file_metadata(prefix, bucket_name)
126
- key_matches = [k for k in keys if fnmatch.fnmatch(k["Key"], key)]
125
+
126
+ key_matches: list[str] = []
127
+
128
+ # Is check_fn is None, then we can return True without having to iterate through each value in
129
+ # yielded by iter_file_metadata. Otherwise, we'll check for a match, and add all matches to the
130
+ # key_matches list
131
+ for k in self.hook.iter_file_metadata(prefix, bucket_name):
132
+ if fnmatch.fnmatch(k["Key"], key):
133
+ if self.check_fn is None:
134
+ # This will only wait for a single match, and will immediately return
135
+ return True
136
+ key_matches.append(k)
137
+
127
138
  if not key_matches:
128
139
  return False
129
140
 
@@ -132,21 +143,23 @@ class S3KeySensor(AwsBaseSensor[S3Hook]):
132
143
  for f in key_matches:
133
144
  metadata = {}
134
145
  if "*" in self.metadata_keys:
135
- metadata = self.hook.head_object(f["Key"], bucket_name)
146
+ metadata = self.hook.head_object(f["Key"], bucket_name) # type: ignore[index]
136
147
  else:
137
- for key in self.metadata_keys:
148
+ for mk in self.metadata_keys:
138
149
  try:
139
- metadata[key] = f[key]
150
+ metadata[mk] = f[mk] # type: ignore[index]
140
151
  except KeyError:
141
152
  # supplied key might be from head_object response
142
- self.log.info("Key %s not found in response, performing head_object", key)
143
- metadata[key] = self.hook.head_object(f["Key"], bucket_name).get(key, None)
153
+ self.log.info("Key %s not found in response, performing head_object", mk)
154
+ metadata[mk] = self.hook.head_object(f["Key"], bucket_name).get(mk, None) # type: ignore[index]
144
155
  files.append(metadata)
156
+
145
157
  elif self.use_regex:
146
- keys = self.hook.get_file_metadata("", bucket_name)
147
- key_matches = [k for k in keys if re.match(pattern=key, string=k["Key"])]
148
- if not key_matches:
149
- return False
158
+ for k in self.hook.iter_file_metadata("", bucket_name):
159
+ if re.match(pattern=key, string=k["Key"]):
160
+ return True
161
+ return False
162
+
150
163
  else:
151
164
  obj = self.hook.head_object(key, bucket_name)
152
165
  if obj is None:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 9.14.0rc1
3
+ Version: 9.15.0
4
4
  Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
5
5
  Keywords: airflow-provider,amazon,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,9 +20,9 @@ Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0rc1
24
- Requires-Dist: apache-airflow-providers-common-compat>=1.6.1rc1
25
- Requires-Dist: apache-airflow-providers-common-sql>=1.27.0rc1
23
+ Requires-Dist: apache-airflow>=2.10.0
24
+ Requires-Dist: apache-airflow-providers-common-compat>=1.6.1
25
+ Requires-Dist: apache-airflow-providers-common-sql>=1.27.0
26
26
  Requires-Dist: apache-airflow-providers-http
27
27
  Requires-Dist: boto3>=1.37.2
28
28
  Requires-Dist: botocore>=1.37.2
@@ -37,16 +37,16 @@ Requires-Dist: sagemaker-studio>=1.0.9
37
37
  Requires-Dist: marshmallow>=3
38
38
  Requires-Dist: aiobotocore[boto3]>=2.21.1 ; extra == "aiobotocore"
39
39
  Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache-hive"
40
- Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0rc1 ; extra == "cncf-kubernetes"
41
- Requires-Dist: apache-airflow-providers-common-messaging>=2.0.0rc1 ; extra == "common-messaging"
40
+ Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0 ; extra == "cncf-kubernetes"
41
+ Requires-Dist: apache-airflow-providers-common-messaging>=2.0.0 ; extra == "common-messaging"
42
42
  Requires-Dist: apache-airflow-providers-exasol ; extra == "exasol"
43
- Requires-Dist: apache-airflow-providers-fab>=2.2.0rc1 ; extra == "fab" and ( python_version < '3.13')
43
+ Requires-Dist: apache-airflow-providers-fab>=2.2.0 ; extra == "fab" and ( python_version < '3.13')
44
44
  Requires-Dist: apache-airflow-providers-ftp ; extra == "ftp"
45
45
  Requires-Dist: apache-airflow-providers-google ; extra == "google"
46
46
  Requires-Dist: apache-airflow-providers-imap ; extra == "imap"
47
47
  Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft-azure"
48
48
  Requires-Dist: apache-airflow-providers-mongo ; extra == "mongo"
49
- Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1 ; extra == "openlineage"
49
+ Requires-Dist: apache-airflow-providers-openlineage>=2.3.0 ; extra == "openlineage"
50
50
  Requires-Dist: python3-saml>=1.16.0 ; extra == "python3-saml" and ( python_version < '3.13')
51
51
  Requires-Dist: xmlsec>=1.3.14 ; extra == "python3-saml" and ( python_version < '3.13')
52
52
  Requires-Dist: lxml>=6.0.0 ; extra == "python3-saml" and ( python_version < '3.13')
@@ -55,8 +55,8 @@ Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
55
55
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
56
56
  Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
57
57
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
58
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.14.0/changelog.html
59
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.14.0
58
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.15.0/changelog.html
59
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.15.0
60
60
  Project-URL: Mastodon, https://fosstodon.org/@airflow
61
61
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
62
62
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -104,7 +104,7 @@ Provides-Extra: standard
104
104
 
105
105
  Package ``apache-airflow-providers-amazon``
106
106
 
107
- Release: ``9.14.0``
107
+ Release: ``9.15.0``
108
108
 
109
109
 
110
110
  Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
@@ -117,7 +117,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
117
117
  are in ``airflow.providers.amazon`` python package.
118
118
 
119
119
  You can find package information and changelog for the provider
120
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.14.0/>`_.
120
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.15.0/>`_.
121
121
 
122
122
  Installation
123
123
  ------------
@@ -210,5 +210,5 @@ Extra Dependencies
210
210
  ==================== ========================================================================================================================================
211
211
 
212
212
  The changelog for the provider package can be found in the
213
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.14.0/changelog.html>`_.
213
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.15.0/changelog.html>`_.
214
214
 
@@ -1,5 +1,5 @@
1
1
  airflow/providers/amazon/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/amazon/__init__.py,sha256=x3G0awLAU_Df9ZuYvQcp6RIXS2gisytWaiMDdtQQ-O4,1496
2
+ airflow/providers/amazon/__init__.py,sha256=u7J_Abl9PCRoALJ9I1St2xTVo7_8Ox5fYlaIAYDf8Go,1496
3
3
  airflow/providers/amazon/get_provider_info.py,sha256=HqgOY-2XbaX7Nhb11ySGgUIrQJ_C8tBWRx9b6XO32zg,73282
4
4
  airflow/providers/amazon/version_compat.py,sha256=8biVK8TSccWSZKPfRoA5w9N9R6YznPWPq8RALrVDWuY,2309
5
5
  airflow/providers/amazon/aws/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -7,7 +7,7 @@ airflow/providers/amazon/aws/exceptions.py,sha256=uRGNMgXvgdzfphpOTiyj74lQhjzb70
7
7
  airflow/providers/amazon/aws/assets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
8
8
  airflow/providers/amazon/aws/assets/s3.py,sha256=wNaJiOM90-SCauD4EQneZVXMO54yDRjLPfI8D5o0-fw,1861
9
9
  airflow/providers/amazon/aws/auth_manager/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
10
- airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=adgOpZj2ClyoO0fqUPAaNI5V7oIictoOGef77wzhEEk,15698
10
+ airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py,sha256=GQKJW2lv58X6gQl-f7KQ1Xhkdy0yrxQdAl66vAad3f8,19119
11
11
  airflow/providers/amazon/aws/auth_manager/constants.py,sha256=Jdluo42InhyNGkYHB_dRtoFMpKanJLJdH0hyR9-5AZg,1050
12
12
  airflow/providers/amazon/aws/auth_manager/user.py,sha256=zds3U6gHmwAy1MuxFFPtGTYikMj-RjYVki9-TSdfnbg,2043
13
13
  airflow/providers/amazon/aws/auth_manager/avp/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -26,7 +26,7 @@ airflow/providers/amazon/aws/bundles/s3.py,sha256=UA8vVyXCzyS5gy4eIBAOPIrB_TJqEY
26
26
  airflow/providers/amazon/aws/executors/Dockerfile,sha256=VZ-YOR59KSMoztJV_g7v5hUwetKR0Ii4wNNaKqDIfyQ,4275
27
27
  airflow/providers/amazon/aws/executors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
28
28
  airflow/providers/amazon/aws/executors/aws_lambda/__init__.py,sha256=1PebDNZ6KXaXd3Zojp8lhULD6Elk-Pi_NiK3qi4G45s,950
29
- airflow/providers/amazon/aws/executors/aws_lambda/lambda_executor.py,sha256=qBTz8-MJiNd06GQc5kUTAFaVrpVkJPl1WOUsslJX6P8,23996
29
+ airflow/providers/amazon/aws/executors/aws_lambda/lambda_executor.py,sha256=2Y648YjAytgMEQkpja2B36YGVtp-yFSf90A7vkV2XKA,23997
30
30
  airflow/providers/amazon/aws/executors/aws_lambda/utils.py,sha256=6Shcr1_kMxQK9-IslzMbTK_O526PF9D2Z5CGyUrA4sA,2255
31
31
  airflow/providers/amazon/aws/executors/aws_lambda/docker/Dockerfile,sha256=_Oy_AHxEM-_BwtaL0iwWwD8Lm2RFSFGCBsiBUzzM7Dg,5043
32
32
  airflow/providers/amazon/aws/executors/aws_lambda/docker/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -85,13 +85,13 @@ airflow/providers/amazon/aws/hooks/rds.py,sha256=bAcaGeP7uNN0lp_FZtIPlt2JCZxcTEr
85
85
  airflow/providers/amazon/aws/hooks/redshift_cluster.py,sha256=0whwYfl9U3VlDBvJ60v_FTCXMO7L9J4TgN7dIu2A5MM,7952
86
86
  airflow/providers/amazon/aws/hooks/redshift_data.py,sha256=JxyXEyFeJHUtMxjjtMlCMJSW9P-cnixISd3R4Ob7fy8,11841
87
87
  airflow/providers/amazon/aws/hooks/redshift_sql.py,sha256=gpI1q9KK-mkewigehTegIhWJKrAQnQu1WedDfapx6gU,10947
88
- airflow/providers/amazon/aws/hooks/s3.py,sha256=sAuzqwpCkWzVl45Vu6juJsb3-T6mcsskaUlPUwGZxSE,67709
88
+ airflow/providers/amazon/aws/hooks/s3.py,sha256=NtMcEdVpTxWjPcN71TfDyfHKhq6ZnZ8t9-8n6znhcXo,68729
89
89
  airflow/providers/amazon/aws/hooks/sagemaker.py,sha256=mZtAOZqBeiIJVJ5gycM16_fJwLxxGMEzsEoe2hwajP4,60524
90
90
  airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py,sha256=d3A50iQGOtqvQP9FywkZONr7JU0vMMP2MoqfjoZ_554,7989
91
91
  airflow/providers/amazon/aws/hooks/secrets_manager.py,sha256=6srh3jUeSGoqyrSj1M6aSOaA9xT5kna0VGUC0kzH-q0,2690
92
- airflow/providers/amazon/aws/hooks/ses.py,sha256=DuCJwFhtg3D3mu0RSjRrebyUpwBAhrWSr-kdu8VR9qU,4174
93
- airflow/providers/amazon/aws/hooks/sns.py,sha256=SwhLeurqaV8TLhgCGsZgtf98D0_kLVLlhXer_96Anv4,4101
94
- airflow/providers/amazon/aws/hooks/sqs.py,sha256=306cpNAgRVuu1Dg8lrsg9o3aWsEJ9ELxxl7GhG-AbLA,3380
92
+ airflow/providers/amazon/aws/hooks/ses.py,sha256=YU_-vufW2xBiI2dpvyRrR_MskWVaq3rFTYF73bODt_s,6977
93
+ airflow/providers/amazon/aws/hooks/sns.py,sha256=9-XCs7vUb0IWjLLOu6SDnrsKaDwvCvNbCOIo0goP9sk,6046
94
+ airflow/providers/amazon/aws/hooks/sqs.py,sha256=OfDDpe-GObEnYh_I6C0GG7OLmvuo8hmGX_Cc29ChiZ4,5413
95
95
  airflow/providers/amazon/aws/hooks/ssm.py,sha256=QSouBw7JshWLY9YrB7qryvE05EBoTr7qVHwQBptZ8Qo,2734
96
96
  airflow/providers/amazon/aws/hooks/step_function.py,sha256=TSmPPF-CFR76a-K9f2yGtgdgd98UKZS71SP6crC_pIY,3964
97
97
  airflow/providers/amazon/aws/hooks/sts.py,sha256=6KYyou-tOhbGhRfnGHH95TUi3ENNHkISUJf0nskmuiw,1827
@@ -114,8 +114,9 @@ airflow/providers/amazon/aws/log/cloudwatch_task_handler.py,sha256=9WSFn4HPT0752
114
114
  airflow/providers/amazon/aws/log/s3_task_handler.py,sha256=9sHuzRldjpyT7kGwDnS5IjlOb5qV9n9i8yur2EotsWI,9942
115
115
  airflow/providers/amazon/aws/notifications/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
116
116
  airflow/providers/amazon/aws/notifications/chime.py,sha256=OpFM5fNknzD8mVz_04vOwKd-Ow2ArWM3QdrUAgaSHqs,2101
117
- airflow/providers/amazon/aws/notifications/sns.py,sha256=XracHC3r3BxzUuv-DzFLy6l7K6R_Ps85oJIUS0-Lkt4,3116
118
- airflow/providers/amazon/aws/notifications/sqs.py,sha256=iINaYMVw3hpu7EL2PB4BtTx7zsypFaY74C2QrNf7Z-c,3606
117
+ airflow/providers/amazon/aws/notifications/ses.py,sha256=Bvur_p2JmBDKbeRyhDxXW5QYlrbMKkaNe3TnH39GdEc,5109
118
+ airflow/providers/amazon/aws/notifications/sns.py,sha256=1MTrD9TuWPZUVsRPBngN5dgi_KOzVs66HgN1OGq7NsE,3687
119
+ airflow/providers/amazon/aws/notifications/sqs.py,sha256=KOGarVyfBfoton57Zc1QMoKbhgMkHbBHDO7SS-NmVD8,4250
119
120
  airflow/providers/amazon/aws/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
120
121
  airflow/providers/amazon/aws/operators/appflow.py,sha256=TlQSJc1frCw7yKfobjCLf2faWQIT0nKRans5Wy-kJQQ,20824
121
122
  airflow/providers/amazon/aws/operators/athena.py,sha256=CPKPZXN4dGELg0sW0LJIkG0X2-v5qPgWt4TMqYtTS18,14754
@@ -129,7 +130,7 @@ airflow/providers/amazon/aws/operators/dms.py,sha256=XmIcXpkp_--PBQF1m7NFfeHDTp4
129
130
  airflow/providers/amazon/aws/operators/ec2.py,sha256=SclBzOLo3GbQe3kw4S3MKf8zLm8IaKNSiGTc_U-OxRo,19700
130
131
  airflow/providers/amazon/aws/operators/ecs.py,sha256=5jvAibuqWS-x7-S9saRSb5umgkw8ec2YO_JGoMsja6o,33766
131
132
  airflow/providers/amazon/aws/operators/eks.py,sha256=djaoaMhj3N5JzKeQVZSMlJb9XGkiGYUAQx-xVocoBC0,52086
132
- airflow/providers/amazon/aws/operators/emr.py,sha256=Cw1qiA0eiPJODCSxHhPayo2_0TZOlA4mj8pcveV0WNc,75983
133
+ airflow/providers/amazon/aws/operators/emr.py,sha256=85QOXsZA-_zarlejwlmfQAnTc8mCHzEwdw-bFMnZlHI,76090
133
134
  airflow/providers/amazon/aws/operators/eventbridge.py,sha256=NacTdvRzZZFizSzC3rb0Z7g8dHQWkKQEXGYzFKOp3fc,10421
134
135
  airflow/providers/amazon/aws/operators/glacier.py,sha256=6TFC07B0EOmtRxLs7Bok4jwV84po2yVDa-DnlbnAOVg,3681
135
136
  airflow/providers/amazon/aws/operators/glue.py,sha256=2LA7KZp7mhitk9SrcqKBUVnS_NlqSrOwRnCDeuBCuGE,30534
@@ -179,7 +180,7 @@ airflow/providers/amazon/aws/sensors/opensearch_serverless.py,sha256=cSaZvCvAC7z
179
180
  airflow/providers/amazon/aws/sensors/quicksight.py,sha256=lm1omzh01BKh0KHU3g2I1yH9LAXtddUDiuIS3uIeOrE,3575
180
181
  airflow/providers/amazon/aws/sensors/rds.py,sha256=HWYQOQ7n9s48Ci2WxBOtrAp17aB-at5werAljq3NDYE,7420
181
182
  airflow/providers/amazon/aws/sensors/redshift_cluster.py,sha256=JZK03IPrPiXLnysps7kK0Pm19SJahTWLFZx_5oo4MbE,4609
182
- airflow/providers/amazon/aws/sensors/s3.py,sha256=8SiAyTh8_TMvPWrjpPw5gvoyB_omBhZALomG-zrpccs,17344
183
+ airflow/providers/amazon/aws/sensors/s3.py,sha256=082DjhTfpwg04K_Gu7Mq5V5JuotBRYSEKSi5baAFJhw,17866
183
184
  airflow/providers/amazon/aws/sensors/sagemaker.py,sha256=dVQntJNRyUYCLQ7cIkeHesgZxf-1yS_BBAiVBzCwaHI,13795
184
185
  airflow/providers/amazon/aws/sensors/sagemaker_unified_studio.py,sha256=ohVNJ_mYTkvoF3d8yAl556uiuFEixLurN_FXcrkvuoQ,2884
185
186
  airflow/providers/amazon/aws/sensors/sqs.py,sha256=V3d05xb2VuxdWimpDVJy_SOKX7N0ok9TBbEYO-9o3v4,10672
@@ -280,7 +281,7 @@ airflow/providers/amazon/aws/waiters/rds.json,sha256=HNmNQm5J-VaFHzjWb1pE5P7-Ix-
280
281
  airflow/providers/amazon/aws/waiters/redshift.json,sha256=jOBotCgbkko1b_CHcGEbhhRvusgt0YSzVuFiZrqVP30,1742
281
282
  airflow/providers/amazon/aws/waiters/sagemaker.json,sha256=JPHuQtUFZ1B7EMLfVmCRevNZ9jgpB71LM0dva8ZEO9A,5254
282
283
  airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=GsOH-emGerKGBAUFmI5lpMfNGH4c0ol_PSiea25DCEY,1033
283
- apache_airflow_providers_amazon-9.14.0rc1.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
284
- apache_airflow_providers_amazon-9.14.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
285
- apache_airflow_providers_amazon-9.14.0rc1.dist-info/METADATA,sha256=ec-KhqQdRn3UPepDTCWe3pA91VRS9kYLDDIM6g_YFqM,11876
286
- apache_airflow_providers_amazon-9.14.0rc1.dist-info/RECORD,,
284
+ apache_airflow_providers_amazon-9.15.0.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
285
+ apache_airflow_providers_amazon-9.15.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
286
+ apache_airflow_providers_amazon-9.15.0.dist-info/METADATA,sha256=oNDWMK0jvX1f7_HMfzR61WkRTRaSgHg7w_iIgFW3XEs,11838
287
+ apache_airflow_providers_amazon-9.15.0.dist-info/RECORD,,