apache-airflow-providers-amazon 8.27.0__py3-none-any.whl → 8.27.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1382,30 +1382,30 @@ class EmrServerlessStartJobOperator(BaseOperator):
1382
1382
 
1383
1383
  self.persist_links(context)
1384
1384
 
1385
- if self.wait_for_completion:
1386
- if self.deferrable:
1387
- self.defer(
1388
- trigger=EmrServerlessStartJobTrigger(
1389
- application_id=self.application_id,
1390
- job_id=self.job_id,
1391
- waiter_delay=self.waiter_delay,
1392
- waiter_max_attempts=self.waiter_max_attempts,
1393
- aws_conn_id=self.aws_conn_id,
1394
- ),
1395
- method_name="execute_complete",
1396
- timeout=timedelta(seconds=self.waiter_max_attempts * self.waiter_delay),
1397
- )
1398
- else:
1399
- waiter = self.hook.get_waiter("serverless_job_completed")
1400
- wait(
1401
- waiter=waiter,
1402
- waiter_max_attempts=self.waiter_max_attempts,
1385
+ if self.deferrable:
1386
+ self.defer(
1387
+ trigger=EmrServerlessStartJobTrigger(
1388
+ application_id=self.application_id,
1389
+ job_id=self.job_id,
1403
1390
  waiter_delay=self.waiter_delay,
1404
- args={"applicationId": self.application_id, "jobRunId": self.job_id},
1405
- failure_message="Serverless Job failed",
1406
- status_message="Serverless Job status is",
1407
- status_args=["jobRun.state", "jobRun.stateDetails"],
1408
- )
1391
+ waiter_max_attempts=self.waiter_max_attempts,
1392
+ aws_conn_id=self.aws_conn_id,
1393
+ ),
1394
+ method_name="execute_complete",
1395
+ timeout=timedelta(seconds=self.waiter_max_attempts * self.waiter_delay),
1396
+ )
1397
+
1398
+ if self.wait_for_completion:
1399
+ waiter = self.hook.get_waiter("serverless_job_completed")
1400
+ wait(
1401
+ waiter=waiter,
1402
+ waiter_max_attempts=self.waiter_max_attempts,
1403
+ waiter_delay=self.waiter_delay,
1404
+ args={"applicationId": self.application_id, "jobRunId": self.job_id},
1405
+ failure_message="Serverless Job failed",
1406
+ status_message="Serverless Job status is",
1407
+ status_args=["jobRun.state", "jobRun.stateDetails"],
1408
+ )
1409
1409
 
1410
1410
  return self.job_id
1411
1411
 
@@ -32,7 +32,6 @@ from airflow.providers.amazon.aws.triggers.redshift_cluster import (
32
32
  RedshiftResumeClusterTrigger,
33
33
  )
34
34
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
35
- from airflow.utils.helpers import prune_dict
36
35
 
37
36
  if TYPE_CHECKING:
38
37
  from airflow.utils.context import Context
@@ -508,8 +507,8 @@ class RedshiftResumeClusterOperator(BaseOperator):
508
507
  aws_conn_id: str | None = "aws_default",
509
508
  wait_for_completion: bool = False,
510
509
  deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
511
- poll_interval: int = 30,
512
- max_attempts: int = 30,
510
+ poll_interval: int = 10,
511
+ max_attempts: int = 10,
513
512
  **kwargs,
514
513
  ):
515
514
  super().__init__(**kwargs)
@@ -543,38 +542,38 @@ class RedshiftResumeClusterOperator(BaseOperator):
543
542
  else:
544
543
  raise error
545
544
 
546
- if self.wait_for_completion:
547
- if self.deferrable:
548
- cluster_state = redshift_hook.cluster_status(cluster_identifier=self.cluster_identifier)
549
- if cluster_state == "available":
550
- self.log.info("Resumed cluster successfully")
551
- elif cluster_state == "deleting":
552
- raise AirflowException(
553
- "Unable to resume cluster since cluster is currently in status: %s", cluster_state
554
- )
555
- else:
556
- self.defer(
557
- trigger=RedshiftResumeClusterTrigger(
558
- cluster_identifier=self.cluster_identifier,
559
- waiter_delay=self.poll_interval,
560
- waiter_max_attempts=self.max_attempts,
561
- aws_conn_id=self.aws_conn_id,
562
- ),
563
- method_name="execute_complete",
564
- # timeout is set to ensure that if a trigger dies, the timeout does not restart
565
- # 60 seconds is added to allow the trigger to exit gracefully (i.e. yield TriggerEvent)
566
- timeout=timedelta(seconds=self.max_attempts * self.poll_interval + 60),
567
- )
545
+ if self.deferrable:
546
+ cluster_state = redshift_hook.cluster_status(cluster_identifier=self.cluster_identifier)
547
+ if cluster_state == "available":
548
+ self.log.info("Resumed cluster successfully")
549
+ elif cluster_state == "deleting":
550
+ raise AirflowException(
551
+ "Unable to resume cluster since cluster is currently in status: %s", cluster_state
552
+ )
568
553
  else:
569
- waiter = redshift_hook.get_waiter("cluster_resumed")
570
- waiter.wait(
571
- ClusterIdentifier=self.cluster_identifier,
572
- WaiterConfig={
573
- "Delay": self.poll_interval,
574
- "MaxAttempts": self.max_attempts,
575
- },
554
+ self.defer(
555
+ trigger=RedshiftResumeClusterTrigger(
556
+ cluster_identifier=self.cluster_identifier,
557
+ waiter_delay=self.poll_interval,
558
+ waiter_max_attempts=self.max_attempts,
559
+ aws_conn_id=self.aws_conn_id,
560
+ ),
561
+ method_name="execute_complete",
562
+ # timeout is set to ensure that if a trigger dies, the timeout does not restart
563
+ # 60 seconds is added to allow the trigger to exit gracefully (i.e. yield TriggerEvent)
564
+ timeout=timedelta(seconds=self.max_attempts * self.poll_interval + 60),
576
565
  )
577
566
 
567
+ if self.wait_for_completion:
568
+ waiter = redshift_hook.get_waiter("cluster_resumed")
569
+ waiter.wait(
570
+ ClusterIdentifier=self.cluster_identifier,
571
+ WaiterConfig={
572
+ "Delay": self.poll_interval,
573
+ "MaxAttempts": self.max_attempts,
574
+ },
575
+ )
576
+
578
577
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
579
578
  event = validate_execute_complete_event(event)
580
579
 
@@ -597,7 +596,6 @@ class RedshiftPauseClusterOperator(BaseOperator):
597
596
  running Airflow in a distributed manner and aws_conn_id is None or
598
597
  empty, then default boto3 configuration would be used (and must be
599
598
  maintained on each worker node).
600
- :param wait_for_completion: If True, waits for the cluster to be paused. (default: False)
601
599
  :param deferrable: Run operator in the deferrable mode
602
600
  :param poll_interval: Time (in seconds) to wait between two consecutive calls to check cluster state
603
601
  :param max_attempts: Maximum number of attempts to poll the cluster
@@ -612,16 +610,14 @@ class RedshiftPauseClusterOperator(BaseOperator):
612
610
  *,
613
611
  cluster_identifier: str,
614
612
  aws_conn_id: str | None = "aws_default",
615
- wait_for_completion: bool = False,
616
613
  deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
617
- poll_interval: int = 30,
618
- max_attempts: int = 30,
614
+ poll_interval: int = 10,
615
+ max_attempts: int = 15,
619
616
  **kwargs,
620
617
  ):
621
618
  super().__init__(**kwargs)
622
619
  self.cluster_identifier = cluster_identifier
623
620
  self.aws_conn_id = aws_conn_id
624
- self.wait_for_completion = wait_for_completion
625
621
  self.deferrable = deferrable
626
622
  self.max_attempts = max_attempts
627
623
  self.poll_interval = poll_interval
@@ -647,38 +643,26 @@ class RedshiftPauseClusterOperator(BaseOperator):
647
643
  time.sleep(self._attempt_interval)
648
644
  else:
649
645
  raise error
650
- if self.wait_for_completion:
651
- if self.deferrable:
652
- cluster_state = redshift_hook.cluster_status(cluster_identifier=self.cluster_identifier)
653
- if cluster_state == "paused":
654
- self.log.info("Paused cluster successfully")
655
- elif cluster_state == "deleting":
656
- raise AirflowException(
657
- f"Unable to pause cluster since cluster is currently in status: {cluster_state}"
658
- )
659
- else:
660
- self.defer(
661
- trigger=RedshiftPauseClusterTrigger(
662
- cluster_identifier=self.cluster_identifier,
663
- waiter_delay=self.poll_interval,
664
- waiter_max_attempts=self.max_attempts,
665
- aws_conn_id=self.aws_conn_id,
666
- ),
667
- method_name="execute_complete",
668
- # timeout is set to ensure that if a trigger dies, the timeout does not restart
669
- # 60 seconds is added to allow the trigger to exit gracefully (i.e. yield TriggerEvent)
670
- timeout=timedelta(seconds=self.max_attempts * self.poll_interval + 60),
671
- )
646
+ if self.deferrable:
647
+ cluster_state = redshift_hook.cluster_status(cluster_identifier=self.cluster_identifier)
648
+ if cluster_state == "paused":
649
+ self.log.info("Paused cluster successfully")
650
+ elif cluster_state == "deleting":
651
+ raise AirflowException(
652
+ f"Unable to pause cluster since cluster is currently in status: {cluster_state}"
653
+ )
672
654
  else:
673
- waiter = redshift_hook.get_waiter("cluster_paused")
674
- waiter.wait(
675
- ClusterIdentifier=self.cluster_identifier,
676
- WaiterConfig=prune_dict(
677
- {
678
- "Delay": self.poll_interval,
679
- "MaxAttempts": self.max_attempts,
680
- }
655
+ self.defer(
656
+ trigger=RedshiftPauseClusterTrigger(
657
+ cluster_identifier=self.cluster_identifier,
658
+ waiter_delay=self.poll_interval,
659
+ waiter_max_attempts=self.max_attempts,
660
+ aws_conn_id=self.aws_conn_id,
681
661
  ),
662
+ method_name="execute_complete",
663
+ # timeout is set to ensure that if a trigger dies, the timeout does not restart
664
+ # 60 seconds is added to allow the trigger to exit gracefully (i.e. yield TriggerEvent)
665
+ timeout=timedelta(seconds=self.max_attempts * self.poll_interval + 60),
682
666
  )
683
667
 
684
668
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
@@ -36,6 +36,7 @@ from airflow.providers.amazon.aws.hooks.sagemaker import (
36
36
  )
37
37
  from airflow.providers.amazon.aws.triggers.sagemaker import (
38
38
  SageMakerPipelineTrigger,
39
+ SageMakerTrainingPrintLogTrigger,
39
40
  SageMakerTrigger,
40
41
  )
41
42
  from airflow.providers.amazon.aws.utils import trim_none_values, validate_execute_complete_event
@@ -1194,15 +1195,25 @@ class SageMakerTrainingOperator(SageMakerBaseOperator):
1194
1195
  if self.max_ingestion_time:
1195
1196
  timeout = datetime.timedelta(seconds=self.max_ingestion_time)
1196
1197
 
1197
- self.defer(
1198
- timeout=timeout,
1199
- trigger=SageMakerTrigger(
1198
+ trigger: SageMakerTrainingPrintLogTrigger | SageMakerTrigger
1199
+ if self.print_log:
1200
+ trigger = SageMakerTrainingPrintLogTrigger(
1201
+ job_name=self.config["TrainingJobName"],
1202
+ poke_interval=self.check_interval,
1203
+ aws_conn_id=self.aws_conn_id,
1204
+ )
1205
+ else:
1206
+ trigger = SageMakerTrigger(
1200
1207
  job_name=self.config["TrainingJobName"],
1201
1208
  job_type="Training",
1202
1209
  poke_interval=self.check_interval,
1203
1210
  max_attempts=self.max_attempts,
1204
1211
  aws_conn_id=self.aws_conn_id,
1205
- ),
1212
+ )
1213
+
1214
+ self.defer(
1215
+ timeout=timeout,
1216
+ trigger=trigger,
1206
1217
  method_name="execute_complete",
1207
1218
  )
1208
1219
 
@@ -25,9 +25,8 @@ from functools import cached_property
25
25
  from typing import Any, AsyncIterator
26
26
 
27
27
  from botocore.exceptions import WaiterError
28
- from deprecated import deprecated
29
28
 
30
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
29
+ from airflow.exceptions import AirflowException
31
30
  from airflow.providers.amazon.aws.hooks.sagemaker import LogState, SageMakerHook
32
31
  from airflow.providers.amazon.aws.utils.waiter_with_logging import async_wait
33
32
  from airflow.triggers.base import BaseTrigger, TriggerEvent
@@ -200,13 +199,6 @@ class SageMakerPipelineTrigger(BaseTrigger):
200
199
  raise AirflowException("Waiter error: max attempts reached")
201
200
 
202
201
 
203
- @deprecated(
204
- reason=(
205
- "`airflow.providers.amazon.aws.triggers.sagemaker.SageMakerTrainingPrintLogTrigger` "
206
- "has been deprecated and will be removed in future. Please use ``SageMakerTrigger`` instead."
207
- ),
208
- category=AirflowProviderDeprecationWarning,
209
- )
210
202
  class SageMakerTrainingPrintLogTrigger(BaseTrigger):
211
203
  """
212
204
  SageMakerTrainingPrintLogTrigger is fired as deferred class with params to run the task in triggerer.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 8.27.0
3
+ Version: 8.27.0rc1
4
4
  Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
5
5
  Keywords: airflow-provider,amazon,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -22,11 +22,11 @@ Classifier: Programming Language :: Python :: 3.11
22
22
  Classifier: Programming Language :: Python :: 3.12
23
23
  Classifier: Topic :: System :: Monitoring
24
24
  Requires-Dist: PyAthena>=3.0.10
25
- Requires-Dist: apache-airflow-providers-common-compat>=1.1.0
26
- Requires-Dist: apache-airflow-providers-common-compat>=1.1.0
27
- Requires-Dist: apache-airflow-providers-common-sql>=1.3.1
25
+ Requires-Dist: apache-airflow-providers-common-compat>=1.1.0rc0
26
+ Requires-Dist: apache-airflow-providers-common-compat>=1.1.0rc0
27
+ Requires-Dist: apache-airflow-providers-common-sql>=1.3.1rc0
28
28
  Requires-Dist: apache-airflow-providers-http
29
- Requires-Dist: apache-airflow>=2.7.0
29
+ Requires-Dist: apache-airflow>=2.7.0rc0
30
30
  Requires-Dist: asgiref>=2.3.0
31
31
  Requires-Dist: boto3>=1.34.90
32
32
  Requires-Dist: botocore>=1.34.90
@@ -38,7 +38,7 @@ Requires-Dist: sqlalchemy_redshift>=0.8.6
38
38
  Requires-Dist: watchtower>=3.0.0,<4
39
39
  Requires-Dist: aiobotocore[boto3]>=2.13.0 ; extra == "aiobotocore"
40
40
  Requires-Dist: apache-airflow-providers-apache-hive ; extra == "apache.hive"
41
- Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0 ; extra == "cncf.kubernetes"
41
+ Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0rc0 ; extra == "cncf.kubernetes"
42
42
  Requires-Dist: apache-airflow-providers-common-compat ; extra == "common.compat"
43
43
  Requires-Dist: apache-airflow-providers-common-sql ; extra == "common.sql"
44
44
  Requires-Dist: apache-airflow-providers-exasol ; extra == "exasol"
@@ -125,7 +125,7 @@ Provides-Extra: ssh
125
125
 
126
126
  Package ``apache-airflow-providers-amazon``
127
127
 
128
- Release: ``8.27.0``
128
+ Release: ``8.27.0.rc1``
129
129
 
130
130
 
131
131
  Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
@@ -114,7 +114,7 @@ airflow/providers/amazon/aws/operators/dms.py,sha256=6RhUtbELAjp0LLkUWl73kdcH4MR
114
114
  airflow/providers/amazon/aws/operators/ec2.py,sha256=aQj6cL3nZzu0tcn3dq6RBSPsByZe8fNtn6qcpQYtlNI,17051
115
115
  airflow/providers/amazon/aws/operators/ecs.py,sha256=pmm05ugkdyUNwkfY1X-mQdT8pqui0qJumwt92tEaEgU,32601
116
116
  airflow/providers/amazon/aws/operators/eks.py,sha256=0xHtPzxZx3ymr6-iqGvAoZsnA24PrjYRSAiFzuxrl3U,50761
117
- airflow/providers/amazon/aws/operators/emr.py,sha256=XNafysiROWBcMxAPK_9MQplcY-itOivKYnljffQo6eE,84909
117
+ airflow/providers/amazon/aws/operators/emr.py,sha256=tygUmL90ebpita9eKg7sMygHWW1BVomGfc_wnaHjOpE,84804
118
118
  airflow/providers/amazon/aws/operators/eventbridge.py,sha256=e686XFhVi54DbaCk7oVc0fhvH6GIPU3p8jgyCie1yBU,10394
119
119
  airflow/providers/amazon/aws/operators/glacier.py,sha256=zxwC6lLk6sWerjlogXq6HgNOJx4h0hkqpGpqn23hJWk,3654
120
120
  airflow/providers/amazon/aws/operators/glue.py,sha256=m8hdF6eTyzsK3onOqt6Td0dGshhgf_XU1f4EtMb42LU,28390
@@ -125,10 +125,10 @@ airflow/providers/amazon/aws/operators/lambda_function.py,sha256=96KtK5KUpMPW2i8
125
125
  airflow/providers/amazon/aws/operators/neptune.py,sha256=on5oNX5K4yHfW1POE0eeZujta71vkJdVL07vucGjX-4,14751
126
126
  airflow/providers/amazon/aws/operators/quicksight.py,sha256=jc3Eof19UfLt5IqbQswRzaHaK8h0ACLY99i_1Prtq10,4089
127
127
  airflow/providers/amazon/aws/operators/rds.py,sha256=zGiUIwpO2EdUByCYkgFwLbFCQhg_TCTTHVuNnee6X_g,39325
128
- airflow/providers/amazon/aws/operators/redshift_cluster.py,sha256=rmBHCssxrYEJ8EnENY-AnzC004lbtHvxXHpy69sHtV0,36681
128
+ airflow/providers/amazon/aws/operators/redshift_cluster.py,sha256=tYTmkXbITvRMaf2R8m9Cx5f2zYF2w1whNfO-l08wKjY,35755
129
129
  airflow/providers/amazon/aws/operators/redshift_data.py,sha256=wK-vTDcn0MqOuF9e-71JYIEkLKihah6oGU-p_8VT2HI,8612
130
130
  airflow/providers/amazon/aws/operators/s3.py,sha256=8mzkCBGnAbb5CHf0PFokQwdEmio_D3u99JnNoqTBUW4,35844
131
- airflow/providers/amazon/aws/operators/sagemaker.py,sha256=0ZP6vyPHMOVkco80sViyDtwhguawSJFo5l1GLOvunQc,83614
131
+ airflow/providers/amazon/aws/operators/sagemaker.py,sha256=MJ-LceYW72Mqtpy5rb_i_oxG9BCUAXrAc7IQOatR85Y,84053
132
132
  airflow/providers/amazon/aws/operators/sns.py,sha256=Rttd015UhLo4pCplGybxtLhflyu_26IFzYP7WTmQFk8,3730
133
133
  airflow/providers/amazon/aws/operators/sqs.py,sha256=0KkhhIblMggNHLxAyrv5dbWcaXvdSWQA2AOQP2CzOlo,4327
134
134
  airflow/providers/amazon/aws/operators/step_function.py,sha256=eXZAxZqG5VNPaFVEchyL4vKmOh54jc83ZjrIZDeld34,9515
@@ -207,7 +207,7 @@ airflow/providers/amazon/aws/triggers/rds.py,sha256=i7FvDkHjg7yr-PQsl2ymn_Lu8B2Z
207
207
  airflow/providers/amazon/aws/triggers/redshift_cluster.py,sha256=4VlZgtpfCbe9C2d8IW2qK6QmvFIhs8Guciuq8TZ5GbU,12920
208
208
  airflow/providers/amazon/aws/triggers/redshift_data.py,sha256=hRWsyFVt0F6yEMnSb-5pZekUcuWhnpw8tNXzf8Im6cI,4245
209
209
  airflow/providers/amazon/aws/triggers/s3.py,sha256=J2MvoiQoQ3GKf8OtFF9awvY6SwVXnhKG_m3ZJ1Z7iwU,9444
210
- airflow/providers/amazon/aws/triggers/sagemaker.py,sha256=a8gck8xLEV1vQsp1mu8a6Z08pwidGd500sU2byva5-Q,11890
210
+ airflow/providers/amazon/aws/triggers/sagemaker.py,sha256=C52lhVsiDEiw-1Kswc0kvNDOyPdDvV5bF7vjbuBN45o,11542
211
211
  airflow/providers/amazon/aws/triggers/sqs.py,sha256=tVA1i8XzV5AqbVQAdWrgrLKoZF8ewqgEwV7ggk1hrQM,8257
212
212
  airflow/providers/amazon/aws/triggers/step_function.py,sha256=M1HGdrnxL_T9KSCBNy2t531xMNJaFc-Y792T9cSmLGM,2685
213
213
  airflow/providers/amazon/aws/utils/__init__.py,sha256=yUkoHb2LuqSyHvj-HAhc2r2s04Kv_PhdyLMq52KarO8,3878
@@ -249,7 +249,7 @@ airflow/providers/amazon/aws/waiters/rds.json,sha256=HNmNQm5J-VaFHzjWb1pE5P7-Ix-
249
249
  airflow/providers/amazon/aws/waiters/redshift.json,sha256=jOBotCgbkko1b_CHcGEbhhRvusgt0YSzVuFiZrqVP30,1742
250
250
  airflow/providers/amazon/aws/waiters/sagemaker.json,sha256=JPHuQtUFZ1B7EMLfVmCRevNZ9jgpB71LM0dva8ZEO9A,5254
251
251
  airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=aBaAZaGv8ZZGdN-2gvYEbq3fL_WHI_7s6SSDL-nWS1A,1034
252
- apache_airflow_providers_amazon-8.27.0.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
253
- apache_airflow_providers_amazon-8.27.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
254
- apache_airflow_providers_amazon-8.27.0.dist-info/METADATA,sha256=5X4DNhW0DOdh4MsTBW_fObTzfM2_vGxW1Cm3f-Yfcgw,10822
255
- apache_airflow_providers_amazon-8.27.0.dist-info/RECORD,,
252
+ apache_airflow_providers_amazon-8.27.0rc1.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
253
+ apache_airflow_providers_amazon-8.27.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
254
+ apache_airflow_providers_amazon-8.27.0rc1.dist-info/METADATA,sha256=ZRHb0Da4c3DYUezf690tX4_PpC6VoOQiWtivY82doB4,10844
255
+ apache_airflow_providers_amazon-8.27.0rc1.dist-info/RECORD,,