apache-airflow-providers-amazon 9.2.0rc2__py3-none-any.whl → 9.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. airflow/providers/amazon/LICENSE +0 -52
  2. airflow/providers/amazon/__init__.py +1 -1
  3. airflow/providers/amazon/aws/auth_manager/avp/facade.py +1 -4
  4. airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py +90 -106
  5. airflow/providers/amazon/aws/auth_manager/router/login.py +124 -0
  6. airflow/providers/amazon/aws/executors/batch/batch_executor.py +2 -2
  7. airflow/providers/amazon/aws/executors/ecs/boto_schema.py +1 -1
  8. airflow/providers/amazon/aws/executors/ecs/utils.py +2 -1
  9. airflow/providers/amazon/aws/hooks/base_aws.py +6 -1
  10. airflow/providers/amazon/aws/hooks/batch_client.py +1 -2
  11. airflow/providers/amazon/aws/hooks/ecr.py +7 -1
  12. airflow/providers/amazon/aws/hooks/ecs.py +1 -2
  13. airflow/providers/amazon/aws/hooks/eks.py +10 -3
  14. airflow/providers/amazon/aws/hooks/emr.py +20 -0
  15. airflow/providers/amazon/aws/hooks/mwaa.py +85 -0
  16. airflow/providers/amazon/aws/hooks/sqs.py +4 -0
  17. airflow/providers/amazon/aws/hooks/ssm.py +10 -1
  18. airflow/providers/amazon/aws/links/comprehend.py +41 -0
  19. airflow/providers/amazon/aws/links/datasync.py +37 -0
  20. airflow/providers/amazon/aws/links/ec2.py +46 -0
  21. airflow/providers/amazon/aws/links/sagemaker.py +27 -0
  22. airflow/providers/amazon/aws/operators/athena.py +7 -5
  23. airflow/providers/amazon/aws/operators/batch.py +16 -8
  24. airflow/providers/amazon/aws/operators/bedrock.py +20 -18
  25. airflow/providers/amazon/aws/operators/comprehend.py +52 -11
  26. airflow/providers/amazon/aws/operators/datasync.py +40 -2
  27. airflow/providers/amazon/aws/operators/dms.py +0 -4
  28. airflow/providers/amazon/aws/operators/ec2.py +50 -0
  29. airflow/providers/amazon/aws/operators/ecs.py +11 -7
  30. airflow/providers/amazon/aws/operators/eks.py +17 -17
  31. airflow/providers/amazon/aws/operators/emr.py +27 -27
  32. airflow/providers/amazon/aws/operators/glue.py +16 -14
  33. airflow/providers/amazon/aws/operators/glue_crawler.py +3 -3
  34. airflow/providers/amazon/aws/operators/glue_databrew.py +5 -5
  35. airflow/providers/amazon/aws/operators/kinesis_analytics.py +9 -9
  36. airflow/providers/amazon/aws/operators/lambda_function.py +4 -4
  37. airflow/providers/amazon/aws/operators/mwaa.py +109 -0
  38. airflow/providers/amazon/aws/operators/rds.py +16 -16
  39. airflow/providers/amazon/aws/operators/redshift_cluster.py +15 -15
  40. airflow/providers/amazon/aws/operators/redshift_data.py +4 -4
  41. airflow/providers/amazon/aws/operators/sagemaker.py +52 -29
  42. airflow/providers/amazon/aws/operators/sqs.py +6 -0
  43. airflow/providers/amazon/aws/operators/step_function.py +4 -4
  44. airflow/providers/amazon/aws/sensors/ec2.py +3 -3
  45. airflow/providers/amazon/aws/sensors/emr.py +9 -9
  46. airflow/providers/amazon/aws/sensors/glue.py +7 -7
  47. airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +3 -3
  48. airflow/providers/amazon/aws/sensors/redshift_cluster.py +3 -3
  49. airflow/providers/amazon/aws/sensors/sqs.py +6 -5
  50. airflow/providers/amazon/aws/transfers/google_api_to_s3.py +8 -3
  51. airflow/providers/amazon/aws/triggers/README.md +1 -1
  52. airflow/providers/amazon/aws/triggers/opensearch_serverless.py +2 -1
  53. airflow/providers/amazon/aws/triggers/sqs.py +2 -1
  54. airflow/providers/amazon/aws/utils/sqs.py +6 -4
  55. airflow/providers/amazon/aws/waiters/dms.json +12 -0
  56. airflow/providers/amazon/get_provider_info.py +106 -87
  57. {apache_airflow_providers_amazon-9.2.0rc2.dist-info → apache_airflow_providers_amazon-9.3.0.dist-info}/METADATA +18 -36
  58. {apache_airflow_providers_amazon-9.2.0rc2.dist-info → apache_airflow_providers_amazon-9.3.0.dist-info}/RECORD +61 -55
  59. airflow/providers/amazon/aws/auth_manager/views/auth.py +0 -151
  60. /airflow/providers/amazon/aws/auth_manager/{views → router}/__init__.py +0 -0
  61. {apache_airflow_providers_amazon-9.2.0rc2.dist-info → apache_airflow_providers_amazon-9.3.0.dist-info}/WHEEL +0 -0
  62. {apache_airflow_providers_amazon-9.2.0rc2.dist-info → apache_airflow_providers_amazon-9.3.0.dist-info}/entry_points.txt +0 -0
@@ -145,13 +145,13 @@ class StepFunctionStartExecutionOperator(AwsBaseOperator[StepFunctionHook]):
145
145
  return execution_arn
146
146
 
147
147
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
148
- event = validate_execute_complete_event(event)
148
+ validated_event = validate_execute_complete_event(event)
149
149
 
150
- if event["status"] != "success":
151
- raise AirflowException(f"Trigger error: event is {event}")
150
+ if validated_event["status"] != "success":
151
+ raise AirflowException(f"Trigger error: event is {validated_event}")
152
152
 
153
153
  self.log.info("State Machine execution completed successfully")
154
- return event["execution_arn"]
154
+ return validated_event["execution_arn"]
155
155
 
156
156
 
157
157
  class StepFunctionGetExecutionOutputOperator(AwsBaseOperator[StepFunctionHook]):
@@ -95,7 +95,7 @@ class EC2InstanceStateSensor(BaseSensorOperator):
95
95
  return instance_state == self.target_state
96
96
 
97
97
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
98
- event = validate_execute_complete_event(event)
98
+ validated_event = validate_execute_complete_event(event)
99
99
 
100
- if event["status"] != "success":
101
- raise AirflowException(f"Error: {event}")
100
+ if validated_event["status"] != "success":
101
+ raise AirflowException(f"Error: {validated_event}")
@@ -346,10 +346,10 @@ class EmrContainerSensor(BaseSensorOperator):
346
346
  )
347
347
 
348
348
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
349
- event = validate_execute_complete_event(event)
349
+ validated_event = validate_execute_complete_event(event)
350
350
 
351
- if event["status"] != "success":
352
- raise AirflowException(f"Error while running job: {event}")
351
+ if validated_event["status"] != "success":
352
+ raise AirflowException(f"Error while running job: {validated_event}")
353
353
 
354
354
  self.log.info("Job completed.")
355
355
 
@@ -535,10 +535,10 @@ class EmrJobFlowSensor(EmrBaseSensor):
535
535
  )
536
536
 
537
537
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
538
- event = validate_execute_complete_event(event)
538
+ validated_event = validate_execute_complete_event(event)
539
539
 
540
- if event["status"] != "success":
541
- raise AirflowException(f"Error while running job: {event}")
540
+ if validated_event["status"] != "success":
541
+ raise AirflowException(f"Error while running job: {validated_event}")
542
542
  self.log.info("Job completed.")
543
543
 
544
544
 
@@ -664,9 +664,9 @@ class EmrStepSensor(EmrBaseSensor):
664
664
  )
665
665
 
666
666
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
667
- event = validate_execute_complete_event(event)
667
+ validated_event = validate_execute_complete_event(event)
668
668
 
669
- if event["status"] != "success":
670
- raise AirflowException(f"Error while running job: {event}")
669
+ if validated_event["status"] != "success":
670
+ raise AirflowException(f"Error while running job: {validated_event}")
671
671
 
672
672
  self.log.info("Job %s completed.", self.job_flow_id)
@@ -174,14 +174,14 @@ class GlueDataQualityRuleSetEvaluationRunSensor(AwsBaseSensor[GlueDataQualityHoo
174
174
  super().execute(context=context)
175
175
 
176
176
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
177
- event = validate_execute_complete_event(event)
177
+ validated_event = validate_execute_complete_event(event)
178
178
 
179
- if event["status"] != "success":
180
- message = f"Error: AWS Glue data quality ruleset evaluation run: {event}"
179
+ if validated_event["status"] != "success":
180
+ message = f"Error: AWS Glue data quality ruleset evaluation run: {validated_event}"
181
181
  raise AirflowException(message)
182
182
 
183
183
  self.hook.validate_evaluation_run_results(
184
- evaluation_run_id=event["evaluation_run_id"],
184
+ evaluation_run_id=validated_event["evaluation_run_id"],
185
185
  show_results=self.show_results,
186
186
  verify_result_status=self.verify_result_status,
187
187
  )
@@ -295,10 +295,10 @@ class GlueDataQualityRuleRecommendationRunSensor(AwsBaseSensor[GlueDataQualityHo
295
295
  super().execute(context=context)
296
296
 
297
297
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
298
- event = validate_execute_complete_event(event)
298
+ validated_event = validate_execute_complete_event(event)
299
299
 
300
- if event["status"] != "success":
301
- message = f"Error: AWS Glue data quality recommendation run: {event}"
300
+ if validated_event["status"] != "success":
301
+ message = f"Error: AWS Glue data quality recommendation run: {validated_event}"
302
302
  raise AirflowException(message)
303
303
 
304
304
  if self.show_results:
@@ -123,8 +123,8 @@ class GlueCatalogPartitionSensor(AwsBaseSensor[GlueCatalogHook]):
123
123
  return self.hook.check_for_partition(self.database_name, self.table_name, self.expression)
124
124
 
125
125
  def execute_complete(self, context: Context, event: dict | None = None) -> None:
126
- event = validate_execute_complete_event(event)
126
+ validated_event = validate_execute_complete_event(event)
127
127
 
128
- if event["status"] != "success":
129
- raise AirflowException(f"Trigger error: event is {event}")
128
+ if validated_event["status"] != "success":
129
+ raise AirflowException(f"Trigger error: event is {validated_event}")
130
130
  self.log.info("Partition exists in the Glue Catalog")
@@ -88,11 +88,11 @@ class RedshiftClusterSensor(BaseSensorOperator):
88
88
  )
89
89
 
90
90
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
91
- event = validate_execute_complete_event(event)
91
+ validated_event = validate_execute_complete_event(event)
92
92
 
93
- status = event["status"]
93
+ status = validated_event["status"]
94
94
  if status == "error":
95
- raise AirflowException(f"{event['status']}: {event['message']}")
95
+ raise AirflowException(f"{validated_event['status']}: {validated_event['message']}")
96
96
  elif status == "success":
97
97
  self.log.info("%s completed successfully.", self.task_id)
98
98
  self.log.info("Cluster Identifier %s is in %s state", self.cluster_identifier, self.target_status)
@@ -30,10 +30,11 @@ from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
30
30
  from airflow.providers.amazon.aws.triggers.sqs import SqsSensorTrigger
31
31
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
32
32
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
33
- from airflow.providers.amazon.aws.utils.sqs import MessageFilteringType, process_response
33
+ from airflow.providers.amazon.aws.utils.sqs import process_response
34
34
 
35
35
  if TYPE_CHECKING:
36
36
  from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection
37
+ from airflow.providers.amazon.aws.utils.sqs import MessageFilteringType
37
38
  from airflow.utils.context import Context
38
39
 
39
40
 
@@ -156,11 +157,11 @@ class SqsSensor(AwsBaseSensor[SqsHook]):
156
157
  super().execute(context=context)
157
158
 
158
159
  def execute_complete(self, context: Context, event: dict | None = None) -> None:
159
- event = validate_execute_complete_event(event)
160
+ validated_event = validate_execute_complete_event(event)
160
161
 
161
- if event["status"] != "success":
162
- raise AirflowException(f"Trigger error: event is {event}")
163
- context["ti"].xcom_push(key="messages", value=event["message_batch"])
162
+ if validated_event["status"] != "success":
163
+ raise AirflowException(f"Trigger error: event is {validated_event}")
164
+ context["ti"].xcom_push(key="messages", value=validated_event["message_batch"])
164
165
 
165
166
  def poll_sqs(self, sqs_conn: BaseAwsConnection) -> Collection:
166
167
  """
@@ -30,7 +30,10 @@ from airflow.providers.amazon.aws.hooks.s3 import S3Hook
30
30
  from airflow.providers.google.common.hooks.discovery_api import GoogleDiscoveryApiHook
31
31
 
32
32
  if TYPE_CHECKING:
33
- from airflow.models import TaskInstance
33
+ try:
34
+ from airflow.sdk.types import RuntimeTaskInstanceProtocol
35
+ except ImportError:
36
+ from airflow.models import TaskInstance as RuntimeTaskInstanceProtocol # type: ignore[assignment]
34
37
  from airflow.utils.context import Context
35
38
 
36
39
 
@@ -174,7 +177,7 @@ class GoogleApiToS3Operator(BaseOperator):
174
177
  replace=self.s3_overwrite,
175
178
  )
176
179
 
177
- def _update_google_api_endpoint_params_via_xcom(self, task_instance: TaskInstance) -> None:
180
+ def _update_google_api_endpoint_params_via_xcom(self, task_instance: RuntimeTaskInstanceProtocol) -> None:
178
181
  if self.google_api_endpoint_params_via_xcom:
179
182
  google_api_endpoint_params = task_instance.xcom_pull(
180
183
  task_ids=self.google_api_endpoint_params_via_xcom_task_ids,
@@ -182,7 +185,9 @@ class GoogleApiToS3Operator(BaseOperator):
182
185
  )
183
186
  self.google_api_endpoint_params.update(google_api_endpoint_params)
184
187
 
185
- def _expose_google_api_response_via_xcom(self, task_instance: TaskInstance, data: dict) -> None:
188
+ def _expose_google_api_response_via_xcom(
189
+ self, task_instance: RuntimeTaskInstanceProtocol, data: dict
190
+ ) -> None:
186
191
  if sys.getsizeof(data) < MAX_XCOM_SIZE:
187
192
  task_instance.xcom_push(key=self.google_api_response_via_xcom or XCOM_RETURN_KEY, value=data)
188
193
  else:
@@ -126,7 +126,7 @@ Here, we are calling the `get_waiter` function defined in `base_aws.py` which ta
126
126
  }
127
127
  ```
128
128
 
129
- For more information about writing custom waiter, see the [README.md](https://github.com/apache/airflow/blob/main/airflow/providers/amazon/aws/waiters/README.md) for custom waiters.
129
+ For more information about writing custom waiter, see the [README.md](https://github.com/apache/airflow/blob/main/providers/amazon/aws/src/airflow/providers/amazon/aws/waiters/README.md) for custom waiters.
130
130
 
131
131
  In some cases, a built-in or custom waiter may not be able to solve the problem. In such cases, the asynchronous method used to poll the boto3 API would need to be defined in the hook of the service being used. This method is essentially the same as the synchronous version of the method, except that it will use the aiobotocore client, and will be awaited. For the Redshift example, the async `describe_clusters` method would look as follows:
132
132
 
@@ -53,7 +53,8 @@ class OpenSearchServerlessCollectionActiveTrigger(AwsBaseWaiterTrigger):
53
53
  super().__init__(
54
54
  serialized_fields={"collection_id": collection_id, "collection_name": collection_name},
55
55
  waiter_name="collection_available",
56
- waiter_args={"ids": [collection_id]} if collection_id else {"names": [collection_name]},
56
+ # waiter_args is a dict[str, Any], allow a possible list of None (it is caught above)
57
+ waiter_args={"ids": [collection_id]} if collection_id else {"names": [collection_name]}, # type: ignore[list-item]
57
58
  failure_message="OpenSearch Serverless Collection creation failed.",
58
59
  status_message="Status of OpenSearch Serverless Collection is",
59
60
  status_queries=["status"],
@@ -22,11 +22,12 @@ from typing import TYPE_CHECKING, Any
22
22
 
23
23
  from airflow.exceptions import AirflowException
24
24
  from airflow.providers.amazon.aws.hooks.sqs import SqsHook
25
- from airflow.providers.amazon.aws.utils.sqs import MessageFilteringType, process_response
25
+ from airflow.providers.amazon.aws.utils.sqs import process_response
26
26
  from airflow.triggers.base import BaseTrigger, TriggerEvent
27
27
 
28
28
  if TYPE_CHECKING:
29
29
  from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection
30
+ from airflow.providers.amazon.aws.utils.sqs import MessageFilteringType
30
31
 
31
32
 
32
33
  class SqsSensorTrigger(BaseTrigger):
@@ -14,20 +14,22 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
+ #
17
18
  from __future__ import annotations
18
19
 
19
20
  import json
20
21
  import logging
21
- from typing import Any
22
+ from typing import TYPE_CHECKING, Any
22
23
 
23
24
  import jsonpath_ng
24
25
  import jsonpath_ng.ext
25
- from typing_extensions import Literal
26
26
 
27
- log = logging.getLogger(__name__)
27
+ if TYPE_CHECKING:
28
+ from typing import Literal
28
29
 
30
+ MessageFilteringType = Literal["literal", "jsonpath", "jsonpath-ext"]
29
31
 
30
- MessageFilteringType = Literal["literal", "jsonpath", "jsonpath-ext"]
32
+ log = logging.getLogger(__name__)
31
33
 
32
34
 
33
35
  def process_response(
@@ -36,6 +36,18 @@
36
36
  "argument": "Replications[0].Status",
37
37
  "expected": "stopped",
38
38
  "state": "success"
39
+ },
40
+ {
41
+ "matcher": "path",
42
+ "argument": "Replications[0].Status",
43
+ "expected": "created",
44
+ "state": "success"
45
+ },
46
+ {
47
+ "matcher": "path",
48
+ "argument": "Replications[0].ProvisionData.ProvisionState",
49
+ "expected": "deprovisioned",
50
+ "state": "success"
39
51
  }
40
52
  ]
41
53
  },