apache-airflow-providers-amazon 9.11.0rc1__py3-none-any.whl → 9.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "9.11.0"
32
+ __version__ = "9.12.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -137,10 +137,23 @@ class S3DagBundle(BaseDagBundle):
137
137
  )
138
138
 
139
139
  def view_url(self, version: str | None = None) -> str | None:
140
+ """
141
+ Return a URL for viewing the DAGs in S3. Currently, versioning is not supported.
142
+
143
+ This method is deprecated and will be removed when the minimum supported Airflow version is 3.1.
144
+ Use `view_url_template` instead.
145
+ """
146
+ return self.view_url_template()
147
+
148
+ def view_url_template(self) -> str | None:
140
149
  """Return a URL for viewing the DAGs in S3. Currently, versioning is not supported."""
141
150
  if self.version:
142
151
  raise AirflowException("S3 url with version is not supported")
143
-
152
+ if hasattr(self, "_view_url_template") and self._view_url_template:
153
+ # Because we use this method in the view_url method, we need to handle
154
+ # backward compatibility for Airflow versions that doesn't have the
155
+ # _view_url_template attribute. Should be removed when we drop support for Airflow 3.0
156
+ return self._view_url_template
144
157
  # https://<bucket-name>.s3.<region>.amazonaws.com/<object-key>
145
158
  url = f"https://{self.bucket_name}.s3"
146
159
  if self.s3_hook.region_name:
@@ -38,6 +38,7 @@ S3_URI = os.environ.get("S3_URI", None)
38
38
  # Input and output keys
39
39
  TASK_KEY_KEY = "task_key"
40
40
  COMMAND_KEY = "command"
41
+ EXECUTOR_CONFIG_KEY = "executor_config"
41
42
  RETURN_CODE_KEY = "return_code"
42
43
 
43
44
 
@@ -47,8 +48,9 @@ def lambda_handler(event, context):
47
48
 
48
49
  command = event.get(COMMAND_KEY)
49
50
  task_key = event.get(TASK_KEY_KEY)
51
+ executor_config = event.get(EXECUTOR_CONFIG_KEY, {}) # noqa: F841
50
52
 
51
- # Any pre-processing or validation of the command or use of the context can be done here or above.
53
+ # Any pre-processing or validation of the command or use of the executor_config can be done here or above.
52
54
 
53
55
  # Sync dags from s3 to the local dags directory
54
56
  if S3_URI:
@@ -43,7 +43,11 @@ from airflow.providers.amazon.aws.executors.utils.exponential_backoff_retry impo
43
43
  from airflow.providers.amazon.aws.hooks.lambda_function import LambdaHook
44
44
  from airflow.providers.amazon.aws.hooks.sqs import SqsHook
45
45
  from airflow.stats import Stats
46
- from airflow.utils import timezone
46
+
47
+ try:
48
+ from airflow.sdk import timezone
49
+ except ImportError:
50
+ from airflow.utils import timezone # type: ignore[attr-defined,no-redef]
47
51
 
48
52
  from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
49
53
 
@@ -270,6 +274,7 @@ class AwsLambdaExecutor(BaseExecutor):
270
274
  payload = {
271
275
  "task_key": ser_task_key,
272
276
  "command": cmd,
277
+ "executor_config": task_to_run.executor_config,
273
278
  }
274
279
  if timezone.utcnow() < task_to_run.next_attempt_time:
275
280
  self.pending_tasks.append(task_to_run)
@@ -421,17 +426,26 @@ class AwsLambdaExecutor(BaseExecutor):
421
426
  "Successful Lambda invocation for task %s received from SQS queue.", task_key
422
427
  )
423
428
  else:
424
- # In this case the Lambda likely started but failed at run time since we got a non-zero
425
- # return code. We could consider retrying these tasks within the executor, because this _likely_
426
- # means the Airflow task did not run to completion, however we can't be sure (maybe the
427
- # lambda runtime code has a bug and is returning a non-zero when it actually passed?). So
428
- # perhaps not retrying is the safest option.
429
429
  self.fail(task_key)
430
- self.log.error(
431
- "Lambda invocation for task: %s has failed to run with return code %s",
432
- task_key,
433
- return_code,
434
- )
430
+ if queue_url == self.dlq_url and return_code is None:
431
+ # DLQ failure: AWS Lambda service could not complete the invocation after retries.
432
+ # This indicates a Lambda-level failure (timeout, memory limit, crash, etc.)
433
+ # where the function was unable to successfully execute to return a result.
434
+ self.log.error(
435
+ "DLQ message received: Lambda invocation for task: %s was unable to successfully execute. This likely indicates a Lambda-level failure (timeout, memory limit, crash, etc.).",
436
+ task_key,
437
+ )
438
+ else:
439
+ # In this case the Lambda likely started but failed at run time since we got a non-zero
440
+ # return code. We could consider retrying these tasks within the executor, because this _likely_
441
+ # means the Airflow task did not run to completion, however we can't be sure (maybe the
442
+ # lambda runtime code has a bug and is returning a non-zero when it actually passed?). So
443
+ # perhaps not retrying is the safest option.
444
+ self.log.debug(
445
+ "Lambda invocation for task: %s completed but the underlying Airflow task has returned a non-zero exit code %s",
446
+ task_key,
447
+ return_code,
448
+ )
435
449
  # Remove the task from the tracking mapping.
436
450
  self.running_tasks.pop(ser_task_key)
437
451
 
@@ -38,7 +38,11 @@ from airflow.providers.amazon.aws.executors.utils.exponential_backoff_retry impo
38
38
  from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook
39
39
  from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
40
40
  from airflow.stats import Stats
41
- from airflow.utils import timezone
41
+
42
+ try:
43
+ from airflow.sdk import timezone
44
+ except ImportError:
45
+ from airflow.utils import timezone # type: ignore[attr-defined,no-redef]
42
46
  from airflow.utils.helpers import merge_dicts
43
47
 
44
48
  if TYPE_CHECKING:
@@ -51,7 +51,11 @@ from airflow.providers.amazon.aws.executors.utils.exponential_backoff_retry impo
51
51
  from airflow.providers.amazon.aws.hooks.ecs import EcsHook
52
52
  from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
53
53
  from airflow.stats import Stats
54
- from airflow.utils import timezone
54
+
55
+ try:
56
+ from airflow.sdk import timezone
57
+ except ImportError:
58
+ from airflow.utils import timezone # type: ignore[attr-defined,no-redef]
55
59
  from airflow.utils.helpers import merge_dicts
56
60
  from airflow.utils.state import State
57
61
 
@@ -20,7 +20,10 @@ import logging
20
20
  from collections.abc import Callable
21
21
  from datetime import datetime, timedelta
22
22
 
23
- from airflow.utils import timezone
23
+ try:
24
+ from airflow.sdk import timezone
25
+ except ImportError:
26
+ from airflow.utils import timezone # type: ignore[attr-defined,no-redef]
24
27
 
25
28
  log = logging.getLogger(__name__)
26
29
 
@@ -36,7 +36,11 @@ from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
36
36
  from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
37
37
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
38
38
  from airflow.providers.amazon.aws.utils.tags import format_tags
39
- from airflow.utils import timezone
39
+
40
+ try:
41
+ from airflow.sdk import timezone
42
+ except ImportError:
43
+ from airflow.utils import timezone # type: ignore[attr-defined,no-redef]
40
44
 
41
45
 
42
46
  class LogState:
@@ -24,10 +24,9 @@ from airflow.configuration import conf
24
24
  from airflow.exceptions import AirflowException
25
25
  from airflow.providers.amazon.aws.hooks.mwaa import MwaaHook
26
26
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
27
- from airflow.providers.amazon.aws.triggers.mwaa import MwaaDagRunCompletedTrigger
28
- from airflow.providers.amazon.aws.utils import validate_execute_complete_event
27
+ from airflow.providers.amazon.aws.triggers.mwaa import MwaaDagRunCompletedTrigger, MwaaTaskCompletedTrigger
29
28
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
30
- from airflow.utils.state import DagRunState
29
+ from airflow.utils.state import DagRunState, TaskInstanceState
31
30
 
32
31
  if TYPE_CHECKING:
33
32
  from airflow.utils.context import Context
@@ -139,7 +138,7 @@ class MwaaDagRunSensor(AwsBaseSensor[MwaaHook]):
139
138
  return state in self.success_states
140
139
 
141
140
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
142
- validate_execute_complete_event(event)
141
+ return None
143
142
 
144
143
  def execute(self, context: Context):
145
144
  if self.deferrable:
@@ -150,10 +149,152 @@ class MwaaDagRunSensor(AwsBaseSensor[MwaaHook]):
150
149
  external_dag_run_id=self.external_dag_run_id,
151
150
  success_states=self.success_states,
152
151
  failure_states=self.failure_states,
153
- # somehow the type of poke_interval is derived as float ??
154
- waiter_delay=self.poke_interval, # type: ignore[arg-type]
152
+ waiter_delay=int(self.poke_interval),
155
153
  waiter_max_attempts=self.max_retries,
156
154
  aws_conn_id=self.aws_conn_id,
155
+ end_from_trigger=True,
156
+ ),
157
+ method_name="execute_complete",
158
+ )
159
+ else:
160
+ super().execute(context=context)
161
+
162
+
163
+ class MwaaTaskSensor(AwsBaseSensor[MwaaHook]):
164
+ """
165
+ Waits for a task in an MWAA Environment to complete.
166
+
167
+ If the task fails, an AirflowException is thrown.
168
+
169
+ .. seealso::
170
+ For more information on how to use this sensor, take a look at the guide:
171
+ :ref:`howto/sensor:MwaaTaskSensor`
172
+
173
+ :param external_env_name: The external MWAA environment name that contains the Task Instance you want to wait for
174
+ (templated)
175
+ :param external_dag_id: The DAG ID in the external MWAA environment that contains the Task Instance you want to wait for
176
+ (templated)
177
+ :param external_dag_run_id: The DAG Run ID in the external MWAA environment that you want to wait for (templated)
178
+ :param external_task_id: The Task ID in the external MWAA environment that you want to wait for (templated)
179
+ :param success_states: Collection of task instance states that would make this task marked as successful, default is
180
+ ``{airflow.utils.state.TaskInstanceState.SUCCESS}`` (templated)
181
+ :param failure_states: Collection of task instance states that would make this task marked as failed and raise an
182
+ AirflowException, default is ``{airflow.utils.state.TaskInstanceState.FAILED}`` (templated)
183
+ :param deferrable: If True, the sensor will operate in deferrable mode. This mode requires aiobotocore
184
+ module to be installed.
185
+ (default: False, but can be overridden in config file by setting default_deferrable to True)
186
+ :param poke_interval: Polling period in seconds to check for the status of the job. (default: 60)
187
+ :param max_retries: Number of times before returning the current state. (default: 720)
188
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
189
+ If this is ``None`` or empty then the default boto3 behaviour is used. If
190
+ running Airflow in a distributed manner and aws_conn_id is None or
191
+ empty, then default boto3 configuration would be used (and must be
192
+ maintained on each worker node).
193
+ :param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
194
+ :param verify: Whether or not to verify SSL certificates. See:
195
+ https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
196
+ :param botocore_config: Configuration dictionary (key-values) for botocore client. See:
197
+ https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
198
+ """
199
+
200
+ aws_hook_class = MwaaHook
201
+ template_fields: Sequence[str] = aws_template_fields(
202
+ "external_env_name",
203
+ "external_dag_id",
204
+ "external_dag_run_id",
205
+ "external_task_id",
206
+ "success_states",
207
+ "failure_states",
208
+ "deferrable",
209
+ "max_retries",
210
+ "poke_interval",
211
+ )
212
+
213
+ def __init__(
214
+ self,
215
+ *,
216
+ external_env_name: str,
217
+ external_dag_id: str,
218
+ external_dag_run_id: str | None = None,
219
+ external_task_id: str,
220
+ success_states: Collection[str] | None = None,
221
+ failure_states: Collection[str] | None = None,
222
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
223
+ poke_interval: int = 60,
224
+ max_retries: int = 720,
225
+ **kwargs,
226
+ ):
227
+ super().__init__(**kwargs)
228
+
229
+ self.success_states = set(success_states) if success_states else {TaskInstanceState.SUCCESS.value}
230
+ self.failure_states = set(failure_states) if failure_states else {TaskInstanceState.FAILED.value}
231
+
232
+ if len(self.success_states & self.failure_states):
233
+ raise ValueError("success_states and failure_states must not have any values in common")
234
+
235
+ self.external_env_name = external_env_name
236
+ self.external_dag_id = external_dag_id
237
+ self.external_dag_run_id = external_dag_run_id
238
+ self.external_task_id = external_task_id
239
+ self.deferrable = deferrable
240
+ self.poke_interval = poke_interval
241
+ self.max_retries = max_retries
242
+
243
+ def poke(self, context: Context) -> bool:
244
+ self.log.info(
245
+ "Poking for task %s of DAG run %s of DAG %s in MWAA environment %s",
246
+ self.external_task_id,
247
+ self.external_dag_run_id,
248
+ self.external_dag_id,
249
+ self.external_env_name,
250
+ )
251
+
252
+ response = self.hook.invoke_rest_api(
253
+ env_name=self.external_env_name,
254
+ path=f"/dags/{self.external_dag_id}/dagRuns/{self.external_dag_run_id}/taskInstances/{self.external_task_id}",
255
+ method="GET",
256
+ )
257
+ # If RestApiStatusCode == 200, the RestApiResponse must have the "state" key, otherwise something terrible has
258
+ # happened in the API and KeyError would be raised
259
+ # If RestApiStatusCode >= 300, a botocore exception would've already been raised during the
260
+ # self.hook.invoke_rest_api call
261
+ # The scope of this sensor is going to only be raising AirflowException due to failure of the task
262
+
263
+ state = response["RestApiResponse"]["state"]
264
+
265
+ if state in self.failure_states:
266
+ raise AirflowException(
267
+ f"The task {self.external_task_id} of DAG run {self.external_dag_run_id} of DAG {self.external_dag_id} in MWAA environment {self.external_env_name} "
268
+ f"failed with state: {state}"
269
+ )
270
+
271
+ return state in self.success_states
272
+
273
+ def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
274
+ return None
275
+
276
+ def execute(self, context: Context):
277
+ if self.external_dag_run_id is None:
278
+ response = self.hook.invoke_rest_api(
279
+ env_name=self.external_env_name,
280
+ path=f"/dags/{self.external_dag_id}/dagRuns",
281
+ method="GET",
282
+ )
283
+ self.external_dag_run_id = response["RestApiResponse"]["dag_runs"][-1]["dag_run_id"]
284
+
285
+ if self.deferrable:
286
+ self.defer(
287
+ trigger=MwaaTaskCompletedTrigger(
288
+ external_env_name=self.external_env_name,
289
+ external_dag_id=self.external_dag_id,
290
+ external_dag_run_id=self.external_dag_run_id,
291
+ external_task_id=self.external_task_id,
292
+ success_states=self.success_states,
293
+ failure_states=self.failure_states,
294
+ waiter_delay=int(self.poke_interval),
295
+ waiter_max_attempts=self.max_retries,
296
+ aws_conn_id=self.aws_conn_id,
297
+ end_from_trigger=True,
157
298
  ),
158
299
  method_name="execute_complete",
159
300
  )
@@ -80,7 +80,7 @@ class AwsBaseWaiterTrigger(BaseTrigger):
80
80
  waiter_delay: int,
81
81
  waiter_max_attempts: int,
82
82
  waiter_config_overrides: dict[str, Any] | None = None,
83
- aws_conn_id: str | None,
83
+ aws_conn_id: str | None = "aws_default",
84
84
  region_name: str | None = None,
85
85
  verify: bool | str | None = None,
86
86
  botocore_config: dict | None = None,
@@ -22,7 +22,7 @@ from typing import TYPE_CHECKING
22
22
 
23
23
  from airflow.providers.amazon.aws.hooks.mwaa import MwaaHook
24
24
  from airflow.providers.amazon.aws.triggers.base import AwsBaseWaiterTrigger
25
- from airflow.utils.state import DagRunState
25
+ from airflow.utils.state import DagRunState, State, TaskInstanceState
26
26
 
27
27
  if TYPE_CHECKING:
28
28
  from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
@@ -48,7 +48,7 @@ class MwaaDagRunCompletedTrigger(AwsBaseWaiterTrigger):
48
48
 
49
49
  def __init__(
50
50
  self,
51
- *,
51
+ *args,
52
52
  external_env_name: str,
53
53
  external_dag_id: str,
54
54
  external_dag_run_id: str,
@@ -56,7 +56,7 @@ class MwaaDagRunCompletedTrigger(AwsBaseWaiterTrigger):
56
56
  failure_states: Collection[str] | None = None,
57
57
  waiter_delay: int = 60,
58
58
  waiter_max_attempts: int = 720,
59
- aws_conn_id: str | None = None,
59
+ **kwargs,
60
60
  ) -> None:
61
61
  self.success_states = set(success_states) if success_states else {DagRunState.SUCCESS.value}
62
62
  self.failure_states = set(failure_states) if failure_states else {DagRunState.FAILED.value}
@@ -87,7 +87,6 @@ class MwaaDagRunCompletedTrigger(AwsBaseWaiterTrigger):
87
87
  return_value=external_dag_run_id,
88
88
  waiter_delay=waiter_delay,
89
89
  waiter_max_attempts=waiter_max_attempts,
90
- aws_conn_id=aws_conn_id,
91
90
  waiter_config_overrides={
92
91
  "acceptors": _build_waiter_acceptors(
93
92
  success_states=self.success_states,
@@ -95,6 +94,93 @@ class MwaaDagRunCompletedTrigger(AwsBaseWaiterTrigger):
95
94
  in_progress_states=in_progress_states,
96
95
  )
97
96
  },
97
+ **kwargs,
98
+ )
99
+
100
+ def hook(self) -> AwsGenericHook:
101
+ return MwaaHook(
102
+ aws_conn_id=self.aws_conn_id,
103
+ region_name=self.region_name,
104
+ verify=self.verify,
105
+ config=self.botocore_config,
106
+ )
107
+
108
+
109
+ class MwaaTaskCompletedTrigger(AwsBaseWaiterTrigger):
110
+ """
111
+ Trigger when an MWAA Task is complete.
112
+
113
+ :param external_env_name: The external MWAA environment name that contains the Task Instance you want to wait for
114
+ (templated)
115
+ :param external_dag_id: The DAG ID in the external MWAA environment that contains the Task Instance you want to wait for
116
+ (templated)
117
+ :param external_dag_run_id: The DAG Run ID in the external MWAA environment that you want to wait for (templated).
118
+ If not provided, the latest DAG run is used by default.
119
+ :param external_task_id: The Task ID in the external MWAA environment that you want to wait for (templated)
120
+ :param success_states: Collection of task instance states that would make this task marked as successful, default is
121
+ ``{airflow.utils.state.TaskInstanceState.SUCCESS}`` (templated)
122
+ :param failure_states: Collection of task instance states that would make this task marked as failed and raise an
123
+ AirflowException, default is ``{airflow.utils.state.TaskInstanceState.FAILED}`` (templated)
124
+ :param waiter_delay: The amount of time in seconds to wait between attempts. (default: 60)
125
+ :param waiter_max_attempts: The maximum number of attempts to be made. (default: 720)
126
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
127
+ """
128
+
129
+ def __init__(
130
+ self,
131
+ *args,
132
+ external_env_name: str,
133
+ external_dag_id: str,
134
+ external_dag_run_id: str | None = None,
135
+ external_task_id: str,
136
+ success_states: Collection[str] | None = None,
137
+ failure_states: Collection[str] | None = None,
138
+ waiter_delay: int = 60,
139
+ waiter_max_attempts: int = 720,
140
+ **kwargs,
141
+ ) -> None:
142
+ self.success_states = (
143
+ set(success_states) if success_states else {state.value for state in State.success_states}
144
+ )
145
+ self.failure_states = (
146
+ set(failure_states) if failure_states else {state.value for state in State.failed_states}
147
+ )
148
+
149
+ if len(self.success_states & self.failure_states):
150
+ raise ValueError("success_states and failure_states must not have any values in common")
151
+
152
+ in_progress_states = {s.value for s in TaskInstanceState} - self.success_states - self.failure_states
153
+
154
+ super().__init__(
155
+ serialized_fields={
156
+ "external_env_name": external_env_name,
157
+ "external_dag_id": external_dag_id,
158
+ "external_dag_run_id": external_dag_run_id,
159
+ "external_task_id": external_task_id,
160
+ "success_states": success_states,
161
+ "failure_states": failure_states,
162
+ },
163
+ waiter_name="mwaa_task_complete",
164
+ waiter_args={
165
+ "Name": external_env_name,
166
+ "Path": f"/dags/{external_dag_id}/dagRuns/{external_dag_run_id}/taskInstances/{external_task_id}",
167
+ "Method": "GET",
168
+ },
169
+ failure_message=f"The task {external_task_id} of DAG run {external_dag_run_id} of DAG {external_dag_id} in MWAA environment {external_env_name} failed with state",
170
+ status_message="State of DAG run",
171
+ status_queries=["RestApiResponse.state"],
172
+ return_key="task_id",
173
+ return_value=external_task_id,
174
+ waiter_delay=waiter_delay,
175
+ waiter_max_attempts=waiter_max_attempts,
176
+ waiter_config_overrides={
177
+ "acceptors": _build_waiter_acceptors(
178
+ success_states=self.success_states,
179
+ failure_states=self.failure_states,
180
+ in_progress_states=in_progress_states,
181
+ )
182
+ },
183
+ **kwargs,
98
184
  )
99
185
 
100
186
  def hook(self) -> AwsGenericHook:
@@ -31,6 +31,11 @@
31
31
  "state": "failure"
32
32
  }
33
33
  ]
34
+ },
35
+ "mwaa_task_complete": {
36
+ "delay": 60,
37
+ "maxAttempts": 20,
38
+ "operation": "InvokeRestApi"
34
39
  }
35
40
  }
36
41
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 9.11.0rc1
3
+ Version: 9.12.0
4
4
  Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
5
5
  Keywords: airflow-provider,amazon,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,9 +20,9 @@ Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0rc1
24
- Requires-Dist: apache-airflow-providers-common-compat>=1.6.1rc1
25
- Requires-Dist: apache-airflow-providers-common-sql>=1.27.0rc1
23
+ Requires-Dist: apache-airflow>=2.10.0
24
+ Requires-Dist: apache-airflow-providers-common-compat>=1.6.1
25
+ Requires-Dist: apache-airflow-providers-common-sql>=1.27.0
26
26
  Requires-Dist: apache-airflow-providers-http
27
27
  Requires-Dist: boto3>=1.37.2
28
28
  Requires-Dist: botocore>=1.37.2
@@ -37,16 +37,16 @@ Requires-Dist: sagemaker-studio>=1.0.9
37
37
  Requires-Dist: marshmallow>=3
38
38
  Requires-Dist: aiobotocore[boto3]>=2.21.1 ; extra == "aiobotocore"
39
39
  Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache-hive"
40
- Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0rc1 ; extra == "cncf-kubernetes"
41
- Requires-Dist: apache-airflow-providers-common-messaging>=1.0.1rc1 ; extra == "common-messaging"
40
+ Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0 ; extra == "cncf-kubernetes"
41
+ Requires-Dist: apache-airflow-providers-common-messaging>=1.0.3 ; extra == "common-messaging"
42
42
  Requires-Dist: apache-airflow-providers-exasol ; extra == "exasol"
43
- Requires-Dist: apache-airflow-providers-fab>=2.2.0rc1 ; extra == "fab" and ( python_version < '3.13')
43
+ Requires-Dist: apache-airflow-providers-fab>=2.2.0 ; extra == "fab" and ( python_version < '3.13')
44
44
  Requires-Dist: apache-airflow-providers-ftp ; extra == "ftp"
45
45
  Requires-Dist: apache-airflow-providers-google ; extra == "google"
46
46
  Requires-Dist: apache-airflow-providers-imap ; extra == "imap"
47
47
  Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft-azure"
48
48
  Requires-Dist: apache-airflow-providers-mongo ; extra == "mongo"
49
- Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1 ; extra == "openlineage"
49
+ Requires-Dist: apache-airflow-providers-openlineage>=2.3.0 ; extra == "openlineage"
50
50
  Requires-Dist: python3-saml>=1.16.0 ; extra == "python3-saml" and ( python_version < '3.13')
51
51
  Requires-Dist: xmlsec>=1.3.14 ; extra == "python3-saml" and ( python_version < '3.13')
52
52
  Requires-Dist: lxml>=6.0.0 ; extra == "python3-saml" and ( python_version < '3.13')
@@ -55,8 +55,8 @@ Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
55
55
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
56
56
  Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
57
57
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
58
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.11.0/changelog.html
59
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-amazon/9.11.0
58
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.12.0/changelog.html
59
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.12.0
60
60
  Project-URL: Mastodon, https://fosstodon.org/@airflow
61
61
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
62
62
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -104,7 +104,7 @@ Provides-Extra: standard
104
104
 
105
105
  Package ``apache-airflow-providers-amazon``
106
106
 
107
- Release: ``9.11.0``
107
+ Release: ``9.12.0``
108
108
 
109
109
  Release Date: ``|PypiReleaseDate|``
110
110
 
@@ -118,7 +118,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
118
118
  are in ``airflow.providers.amazon`` python package.
119
119
 
120
120
  You can find package information and changelog for the provider
121
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.11.0/>`_.
121
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.12.0/>`_.
122
122
 
123
123
  Installation
124
124
  ------------
@@ -186,5 +186,5 @@ Dependent package
186
186
  ======================================================================================================================== ====================
187
187
 
188
188
  The changelog for the provider package can be found in the
189
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.11.0/changelog.html>`_.
189
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.12.0/changelog.html>`_.
190
190
 
@@ -1,5 +1,5 @@
1
1
  airflow/providers/amazon/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/amazon/__init__.py,sha256=jBnxUQuGC6nq67_O7qB3iK1Ir2z6m83935PsA7hd03U,1496
2
+ airflow/providers/amazon/__init__.py,sha256=dZcJ_86JV23wzB-bs-s5B6vbm3j55gDjNXaOL-VI6fs,1496
3
3
  airflow/providers/amazon/get_provider_info.py,sha256=HqgOY-2XbaX7Nhb11ySGgUIrQJ_C8tBWRx9b6XO32zg,73282
4
4
  airflow/providers/amazon/version_compat.py,sha256=8biVK8TSccWSZKPfRoA5w9N9R6YznPWPq8RALrVDWuY,2309
5
5
  airflow/providers/amazon/aws/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -22,28 +22,28 @@ airflow/providers/amazon/aws/auth_manager/datamodels/login.py,sha256=s2mwJ3Hg3PA
22
22
  airflow/providers/amazon/aws/auth_manager/routes/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
23
23
  airflow/providers/amazon/aws/auth_manager/routes/login.py,sha256=gfEhqrrTIIzktTZe_kwOryRloURmZdzSUJDoj23_cLM,6061
24
24
  airflow/providers/amazon/aws/bundles/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
25
- airflow/providers/amazon/aws/bundles/s3.py,sha256=I9vjYnn5kQyOlwMcANTxHeH79zGzocz_9UWpUCBgCaI,5623
25
+ airflow/providers/amazon/aws/bundles/s3.py,sha256=UA8vVyXCzyS5gy4eIBAOPIrB_TJqEYDmKVJLm4sosgQ,6351
26
26
  airflow/providers/amazon/aws/executors/Dockerfile,sha256=VZ-YOR59KSMoztJV_g7v5hUwetKR0Ii4wNNaKqDIfyQ,4275
27
27
  airflow/providers/amazon/aws/executors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
28
28
  airflow/providers/amazon/aws/executors/aws_lambda/__init__.py,sha256=1PebDNZ6KXaXd3Zojp8lhULD6Elk-Pi_NiK3qi4G45s,950
29
- airflow/providers/amazon/aws/executors/aws_lambda/lambda_executor.py,sha256=CUvpOtpezfNRjUj4ZNLf1yYKSd0DZ9DUB-v-bbyiy4Q,23030
29
+ airflow/providers/amazon/aws/executors/aws_lambda/lambda_executor.py,sha256=qBTz8-MJiNd06GQc5kUTAFaVrpVkJPl1WOUsslJX6P8,23996
30
30
  airflow/providers/amazon/aws/executors/aws_lambda/utils.py,sha256=6Shcr1_kMxQK9-IslzMbTK_O526PF9D2Z5CGyUrA4sA,2255
31
31
  airflow/providers/amazon/aws/executors/aws_lambda/docker/Dockerfile,sha256=_Oy_AHxEM-_BwtaL0iwWwD8Lm2RFSFGCBsiBUzzM7Dg,5043
32
32
  airflow/providers/amazon/aws/executors/aws_lambda/docker/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
33
- airflow/providers/amazon/aws/executors/aws_lambda/docker/app.py,sha256=-qw38fir2vc73VcuGgZ-dwoMeCCeQMbJ-QNIis4CDCc,4758
33
+ airflow/providers/amazon/aws/executors/aws_lambda/docker/app.py,sha256=_H1lnjL2tEN5ENuEoTyNORsuAxQFAmwmv_6mF0OEwb4,4877
34
34
  airflow/providers/amazon/aws/executors/batch/__init__.py,sha256=TPSNZJ6E3zqN7mvdrMrarqwHeFYN9Efd2jD3hpN7tr0,970
35
- airflow/providers/amazon/aws/executors/batch/batch_executor.py,sha256=O9MOSlUtvFS5feCCnu3CTXzRxGVAxt3JthznJdbVyMo,22915
35
+ airflow/providers/amazon/aws/executors/batch/batch_executor.py,sha256=IuP6STjX9vKwh7dpdClz15SM2jlOpGbP19vVqLsxYGk,23021
36
36
  airflow/providers/amazon/aws/executors/batch/batch_executor_config.py,sha256=7yYLKB1jRoBy0AeW5chcpz7i2UfvSQob9QLvMhYUWDQ,3223
37
37
  airflow/providers/amazon/aws/executors/batch/boto_schema.py,sha256=Rqr_uk6Tx6hNVYsQRPNlLj0zC8TC_awWk2rv3tkUuYU,2445
38
38
  airflow/providers/amazon/aws/executors/batch/utils.py,sha256=QXaKyrUMCYr_Oz7Hq5b7A-gppP61fQtaOX7wip1J7ho,5274
39
39
  airflow/providers/amazon/aws/executors/ecs/__init__.py,sha256=J_B7TIPPQmn67Y7kzr4pgzcpFRr0wUp6gVsyfz5GKc4,962
40
40
  airflow/providers/amazon/aws/executors/ecs/boto_schema.py,sha256=c_2BJu6pC9xjRuPfufqSMYPZVDAbma0JO71JKSBRMSg,3760
41
- airflow/providers/amazon/aws/executors/ecs/ecs_executor.py,sha256=NDambx-tjmlhQnQhhKr5FM0leMr3nnIKXmXxXJC60hQ,26843
41
+ airflow/providers/amazon/aws/executors/ecs/ecs_executor.py,sha256=obfHr_-h1F_2dt5AEmtJYfUcqGHqGBALwH6e7_lpZnY,26949
42
42
  airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py,sha256=HkQDNKKNQKYjbCWP8MKoMjF2DRp1NLleajZpJcQ6gVw,5875
43
43
  airflow/providers/amazon/aws/executors/ecs/utils.py,sha256=hp_C_XzfNqKXXBOSN8e8TBG4vAqEDkaUi0YqLGJzNbE,9759
44
44
  airflow/providers/amazon/aws/executors/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
45
45
  airflow/providers/amazon/aws/executors/utils/base_config_keys.py,sha256=q-xDVM8_iiygte8PK1khJjX7442sTNe72xJGwngtdV8,1169
46
- airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py,sha256=gcxqB4nbzC2n9F8-tpHzvdjpMmItQznTXycHi8EWYS4,3109
46
+ airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py,sha256=Z-k6rMiqevskVF1smvbt3_CdSwFy1gVbiPzOHGM-u0o,3214
47
47
  airflow/providers/amazon/aws/fs/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
48
48
  airflow/providers/amazon/aws/fs/s3.py,sha256=i-vbkhLjH4RQ1pNIdNyERbVw_PhCsTiKktg4jrJ0TT8,4818
49
49
  airflow/providers/amazon/aws/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -86,7 +86,7 @@ airflow/providers/amazon/aws/hooks/redshift_cluster.py,sha256=FTpUkyLCM_aRCIApVk
86
86
  airflow/providers/amazon/aws/hooks/redshift_data.py,sha256=JxyXEyFeJHUtMxjjtMlCMJSW9P-cnixISd3R4Ob7fy8,11841
87
87
  airflow/providers/amazon/aws/hooks/redshift_sql.py,sha256=gpI1q9KK-mkewigehTegIhWJKrAQnQu1WedDfapx6gU,10947
88
88
  airflow/providers/amazon/aws/hooks/s3.py,sha256=sAuzqwpCkWzVl45Vu6juJsb3-T6mcsskaUlPUwGZxSE,67709
89
- airflow/providers/amazon/aws/hooks/sagemaker.py,sha256=mq-zL8hQHa6SoXArbHzZ31IFylF-P2vhLW310Ggb9Ts,60418
89
+ airflow/providers/amazon/aws/hooks/sagemaker.py,sha256=mZtAOZqBeiIJVJ5gycM16_fJwLxxGMEzsEoe2hwajP4,60524
90
90
  airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py,sha256=d3A50iQGOtqvQP9FywkZONr7JU0vMMP2MoqfjoZ_554,7989
91
91
  airflow/providers/amazon/aws/hooks/secrets_manager.py,sha256=6srh3jUeSGoqyrSj1M6aSOaA9xT5kna0VGUC0kzH-q0,2690
92
92
  airflow/providers/amazon/aws/hooks/ses.py,sha256=DuCJwFhtg3D3mu0RSjRrebyUpwBAhrWSr-kdu8VR9qU,4174
@@ -174,7 +174,7 @@ airflow/providers/amazon/aws/sensors/glue_catalog_partition.py,sha256=YXvkQRnu5T
174
174
  airflow/providers/amazon/aws/sensors/glue_crawler.py,sha256=ps-fG09QV_QNs8dxlmFKXNVKhvPIuvJoMJCeYi-6QBc,3375
175
175
  airflow/providers/amazon/aws/sensors/kinesis_analytics.py,sha256=TDsQKi5nx10CgMoTSVbYRo4m-PiKFDhyhnO7dQZSnuI,9933
176
176
  airflow/providers/amazon/aws/sensors/lambda_function.py,sha256=kO4UyrEMaMYSYbQyBN3F2eoysze_kIYCbMaF4tqiKo0,3287
177
- airflow/providers/amazon/aws/sensors/mwaa.py,sha256=FK6qPOkV6fZKt4-3ayUyr9O8Q-RZVUphL5a3wIcVHVw,7474
177
+ airflow/providers/amazon/aws/sensors/mwaa.py,sha256=ahWoH1hjBL_FcYwg21G15Z-00Bb0xqZe5evWlm8nRo8,14063
178
178
  airflow/providers/amazon/aws/sensors/opensearch_serverless.py,sha256=cSaZvCvAC7zhFqBYNympTiQHtgCZ7srC5-TrbS4l2GQ,5508
179
179
  airflow/providers/amazon/aws/sensors/quicksight.py,sha256=lm1omzh01BKh0KHU3g2I1yH9LAXtddUDiuIS3uIeOrE,3575
180
180
  airflow/providers/amazon/aws/sensors/rds.py,sha256=HWYQOQ7n9s48Ci2WxBOtrAp17aB-at5werAljq3NDYE,7420
@@ -211,7 +211,7 @@ airflow/providers/amazon/aws/transfers/sql_to_s3.py,sha256=-OdWGu-1P9yHsFUqmFuGT
211
211
  airflow/providers/amazon/aws/triggers/README.md,sha256=ax2F0w2CuQSDN4ghJADozrrv5W4OeCDPA8Vzp00BXOU,10919
212
212
  airflow/providers/amazon/aws/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
213
213
  airflow/providers/amazon/aws/triggers/athena.py,sha256=62ty40zejcm5Y0d1rTQZuYzSjq3hUkmAs0d_zxM_Kjw,2596
214
- airflow/providers/amazon/aws/triggers/base.py,sha256=KKGx72M3VnlGOJuwnHALXWOpTkRGcHKQ1yY7GWStnPk,6804
214
+ airflow/providers/amazon/aws/triggers/base.py,sha256=z19widJByDpOflWTOlqMq35O6a4tfpU4fdaXVwWywA4,6820
215
215
  airflow/providers/amazon/aws/triggers/batch.py,sha256=GogZnPaSc1ms55_aNcSDtV4wIZL3kMCSCY7dqpGJH4o,4257
216
216
  airflow/providers/amazon/aws/triggers/bedrock.py,sha256=9SI7WknPhrnGkFllv0Dk-XAat99cvQP5rNdVRlMS4Cw,10734
217
217
  airflow/providers/amazon/aws/triggers/comprehend.py,sha256=atK02t-G6e-Rgd-a-IHc4n-wGZ3oC4pKueOwNeaLCrI,4063
@@ -225,7 +225,7 @@ airflow/providers/amazon/aws/triggers/glue_crawler.py,sha256=W6EYAizQQtVbH5SiZmo
225
225
  airflow/providers/amazon/aws/triggers/glue_databrew.py,sha256=SWbsgUrEfPN2Efk3Jfu8mlCAlUJRCHp8PnnqI4YbUR8,2593
226
226
  airflow/providers/amazon/aws/triggers/kinesis_analytics.py,sha256=FERA9pE2o4juRJZVlEauDcJcPkhlQ6K9Q6RHt2MZlcE,2937
227
227
  airflow/providers/amazon/aws/triggers/lambda_function.py,sha256=CWVJHvUSd1v4THrFOA59XW0AjOqfwTR87rT4tUaYaYQ,2847
228
- airflow/providers/amazon/aws/triggers/mwaa.py,sha256=X6YgeDhQTZW-hGWbazgAZasl6RLZ-pnwU76vSqjpVRg,5407
228
+ airflow/providers/amazon/aws/triggers/mwaa.py,sha256=n6kx4BF5AqgOSZlGOEakho2v_RrJk18XEdRpmI6x6uA,9407
229
229
  airflow/providers/amazon/aws/triggers/neptune.py,sha256=bL9W78zgDp5rASdLGm-_WO6XKne5_tcOMkbGBPmdX-8,5868
230
230
  airflow/providers/amazon/aws/triggers/opensearch_serverless.py,sha256=iHaYvCsNQ-C-MppZ2bkaQsvh2Ox284Fka5TKux6XoKM,3217
231
231
  airflow/providers/amazon/aws/triggers/rds.py,sha256=jT714qEymUpH3zIiwNbCyFFyJarTgCIlfd4v1l3lZFw,7763
@@ -273,14 +273,14 @@ airflow/providers/amazon/aws/waiters/emr-serverless.json,sha256=Xri0SRcApLOLnwZM
273
273
  airflow/providers/amazon/aws/waiters/emr.json,sha256=uBCMdBhxhRv1aOu9sBsyi99KkFw1hOsVjoiIYBZcSsA,5513
274
274
  airflow/providers/amazon/aws/waiters/glue.json,sha256=m-dl9wZno20lLIwTzPWFX1Gpzx24PEYIb3RvrmNs8QI,6050
275
275
  airflow/providers/amazon/aws/waiters/kinesisanalyticsv2.json,sha256=4kjJb4MEQM0CYCsMBK2gacRAh2fvNIAbppaux1VXCdI,5507
276
- airflow/providers/amazon/aws/waiters/mwaa.json,sha256=42Sm8rJeMxcSk5QVFoamRj09hNXZlBCBPj7mCvuvhLs,814
276
+ airflow/providers/amazon/aws/waiters/mwaa.json,sha256=thUbC4nfguy2bx47Cv623iE_oGvIa539ZYPGTGJSJs4,928
277
277
  airflow/providers/amazon/aws/waiters/neptune.json,sha256=4IP0FPqdItVmyP_au9hxpMTx6DJ-vWv_Yf4HEYlnv90,2922
278
278
  airflow/providers/amazon/aws/waiters/opensearchserverless.json,sha256=7UkPgv_tBm6MS-WjhxC4CX55pHfcc-T_WdtHrcA5LVg,1115
279
279
  airflow/providers/amazon/aws/waiters/rds.json,sha256=HNmNQm5J-VaFHzjWb1pE5P7-Ix-yR4CQrlhfK9bAdM4,9511
280
280
  airflow/providers/amazon/aws/waiters/redshift.json,sha256=jOBotCgbkko1b_CHcGEbhhRvusgt0YSzVuFiZrqVP30,1742
281
281
  airflow/providers/amazon/aws/waiters/sagemaker.json,sha256=JPHuQtUFZ1B7EMLfVmCRevNZ9jgpB71LM0dva8ZEO9A,5254
282
282
  airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=GsOH-emGerKGBAUFmI5lpMfNGH4c0ol_PSiea25DCEY,1033
283
- apache_airflow_providers_amazon-9.11.0rc1.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
284
- apache_airflow_providers_amazon-9.11.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
285
- apache_airflow_providers_amazon-9.11.0rc1.dist-info/METADATA,sha256=0pKkjHOqSZ_T4Z3o5JDver1MDZEAPIRgYbcgaOUDxMs,10204
286
- apache_airflow_providers_amazon-9.11.0rc1.dist-info/RECORD,,
283
+ apache_airflow_providers_amazon-9.12.0.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
284
+ apache_airflow_providers_amazon-9.12.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
285
+ apache_airflow_providers_amazon-9.12.0.dist-info/METADATA,sha256=cbCdZmUz3Hm_a0Z8o_4jcnvr7So3NJK-3j7_iUt3S-E,10166
286
+ apache_airflow_providers_amazon-9.12.0.dist-info/RECORD,,