apache-airflow-providers-amazon 8.27.0rc1__py3-none-any.whl → 8.27.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1382,30 +1382,30 @@ class EmrServerlessStartJobOperator(BaseOperator):
1382
1382
 
1383
1383
  self.persist_links(context)
1384
1384
 
1385
- if self.deferrable:
1386
- self.defer(
1387
- trigger=EmrServerlessStartJobTrigger(
1388
- application_id=self.application_id,
1389
- job_id=self.job_id,
1390
- waiter_delay=self.waiter_delay,
1391
- waiter_max_attempts=self.waiter_max_attempts,
1392
- aws_conn_id=self.aws_conn_id,
1393
- ),
1394
- method_name="execute_complete",
1395
- timeout=timedelta(seconds=self.waiter_max_attempts * self.waiter_delay),
1396
- )
1397
-
1398
1385
  if self.wait_for_completion:
1399
- waiter = self.hook.get_waiter("serverless_job_completed")
1400
- wait(
1401
- waiter=waiter,
1402
- waiter_max_attempts=self.waiter_max_attempts,
1403
- waiter_delay=self.waiter_delay,
1404
- args={"applicationId": self.application_id, "jobRunId": self.job_id},
1405
- failure_message="Serverless Job failed",
1406
- status_message="Serverless Job status is",
1407
- status_args=["jobRun.state", "jobRun.stateDetails"],
1408
- )
1386
+ if self.deferrable:
1387
+ self.defer(
1388
+ trigger=EmrServerlessStartJobTrigger(
1389
+ application_id=self.application_id,
1390
+ job_id=self.job_id,
1391
+ waiter_delay=self.waiter_delay,
1392
+ waiter_max_attempts=self.waiter_max_attempts,
1393
+ aws_conn_id=self.aws_conn_id,
1394
+ ),
1395
+ method_name="execute_complete",
1396
+ timeout=timedelta(seconds=self.waiter_max_attempts * self.waiter_delay),
1397
+ )
1398
+ else:
1399
+ waiter = self.hook.get_waiter("serverless_job_completed")
1400
+ wait(
1401
+ waiter=waiter,
1402
+ waiter_max_attempts=self.waiter_max_attempts,
1403
+ waiter_delay=self.waiter_delay,
1404
+ args={"applicationId": self.application_id, "jobRunId": self.job_id},
1405
+ failure_message="Serverless Job failed",
1406
+ status_message="Serverless Job status is",
1407
+ status_args=["jobRun.state", "jobRun.stateDetails"],
1408
+ )
1409
1409
 
1410
1410
  return self.job_id
1411
1411
 
@@ -32,6 +32,7 @@ from airflow.providers.amazon.aws.triggers.redshift_cluster import (
32
32
  RedshiftResumeClusterTrigger,
33
33
  )
34
34
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
35
+ from airflow.utils.helpers import prune_dict
35
36
 
36
37
  if TYPE_CHECKING:
37
38
  from airflow.utils.context import Context
@@ -507,8 +508,8 @@ class RedshiftResumeClusterOperator(BaseOperator):
507
508
  aws_conn_id: str | None = "aws_default",
508
509
  wait_for_completion: bool = False,
509
510
  deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
510
- poll_interval: int = 10,
511
- max_attempts: int = 10,
511
+ poll_interval: int = 30,
512
+ max_attempts: int = 30,
512
513
  **kwargs,
513
514
  ):
514
515
  super().__init__(**kwargs)
@@ -542,38 +543,38 @@ class RedshiftResumeClusterOperator(BaseOperator):
542
543
  else:
543
544
  raise error
544
545
 
545
- if self.deferrable:
546
- cluster_state = redshift_hook.cluster_status(cluster_identifier=self.cluster_identifier)
547
- if cluster_state == "available":
548
- self.log.info("Resumed cluster successfully")
549
- elif cluster_state == "deleting":
550
- raise AirflowException(
551
- "Unable to resume cluster since cluster is currently in status: %s", cluster_state
552
- )
546
+ if self.wait_for_completion:
547
+ if self.deferrable:
548
+ cluster_state = redshift_hook.cluster_status(cluster_identifier=self.cluster_identifier)
549
+ if cluster_state == "available":
550
+ self.log.info("Resumed cluster successfully")
551
+ elif cluster_state == "deleting":
552
+ raise AirflowException(
553
+ "Unable to resume cluster since cluster is currently in status: %s", cluster_state
554
+ )
555
+ else:
556
+ self.defer(
557
+ trigger=RedshiftResumeClusterTrigger(
558
+ cluster_identifier=self.cluster_identifier,
559
+ waiter_delay=self.poll_interval,
560
+ waiter_max_attempts=self.max_attempts,
561
+ aws_conn_id=self.aws_conn_id,
562
+ ),
563
+ method_name="execute_complete",
564
+ # timeout is set to ensure that if a trigger dies, the timeout does not restart
565
+ # 60 seconds is added to allow the trigger to exit gracefully (i.e. yield TriggerEvent)
566
+ timeout=timedelta(seconds=self.max_attempts * self.poll_interval + 60),
567
+ )
553
568
  else:
554
- self.defer(
555
- trigger=RedshiftResumeClusterTrigger(
556
- cluster_identifier=self.cluster_identifier,
557
- waiter_delay=self.poll_interval,
558
- waiter_max_attempts=self.max_attempts,
559
- aws_conn_id=self.aws_conn_id,
560
- ),
561
- method_name="execute_complete",
562
- # timeout is set to ensure that if a trigger dies, the timeout does not restart
563
- # 60 seconds is added to allow the trigger to exit gracefully (i.e. yield TriggerEvent)
564
- timeout=timedelta(seconds=self.max_attempts * self.poll_interval + 60),
569
+ waiter = redshift_hook.get_waiter("cluster_resumed")
570
+ waiter.wait(
571
+ ClusterIdentifier=self.cluster_identifier,
572
+ WaiterConfig={
573
+ "Delay": self.poll_interval,
574
+ "MaxAttempts": self.max_attempts,
575
+ },
565
576
  )
566
577
 
567
- if self.wait_for_completion:
568
- waiter = redshift_hook.get_waiter("cluster_resumed")
569
- waiter.wait(
570
- ClusterIdentifier=self.cluster_identifier,
571
- WaiterConfig={
572
- "Delay": self.poll_interval,
573
- "MaxAttempts": self.max_attempts,
574
- },
575
- )
576
-
577
578
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
578
579
  event = validate_execute_complete_event(event)
579
580
 
@@ -596,6 +597,7 @@ class RedshiftPauseClusterOperator(BaseOperator):
596
597
  running Airflow in a distributed manner and aws_conn_id is None or
597
598
  empty, then default boto3 configuration would be used (and must be
598
599
  maintained on each worker node).
600
+ :param wait_for_completion: If True, waits for the cluster to be paused. (default: False)
599
601
  :param deferrable: Run operator in the deferrable mode
600
602
  :param poll_interval: Time (in seconds) to wait between two consecutive calls to check cluster state
601
603
  :param max_attempts: Maximum number of attempts to poll the cluster
@@ -610,14 +612,16 @@ class RedshiftPauseClusterOperator(BaseOperator):
610
612
  *,
611
613
  cluster_identifier: str,
612
614
  aws_conn_id: str | None = "aws_default",
615
+ wait_for_completion: bool = False,
613
616
  deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
614
- poll_interval: int = 10,
615
- max_attempts: int = 15,
617
+ poll_interval: int = 30,
618
+ max_attempts: int = 30,
616
619
  **kwargs,
617
620
  ):
618
621
  super().__init__(**kwargs)
619
622
  self.cluster_identifier = cluster_identifier
620
623
  self.aws_conn_id = aws_conn_id
624
+ self.wait_for_completion = wait_for_completion
621
625
  self.deferrable = deferrable
622
626
  self.max_attempts = max_attempts
623
627
  self.poll_interval = poll_interval
@@ -643,26 +647,38 @@ class RedshiftPauseClusterOperator(BaseOperator):
643
647
  time.sleep(self._attempt_interval)
644
648
  else:
645
649
  raise error
646
- if self.deferrable:
647
- cluster_state = redshift_hook.cluster_status(cluster_identifier=self.cluster_identifier)
648
- if cluster_state == "paused":
649
- self.log.info("Paused cluster successfully")
650
- elif cluster_state == "deleting":
651
- raise AirflowException(
652
- f"Unable to pause cluster since cluster is currently in status: {cluster_state}"
653
- )
650
+ if self.wait_for_completion:
651
+ if self.deferrable:
652
+ cluster_state = redshift_hook.cluster_status(cluster_identifier=self.cluster_identifier)
653
+ if cluster_state == "paused":
654
+ self.log.info("Paused cluster successfully")
655
+ elif cluster_state == "deleting":
656
+ raise AirflowException(
657
+ f"Unable to pause cluster since cluster is currently in status: {cluster_state}"
658
+ )
659
+ else:
660
+ self.defer(
661
+ trigger=RedshiftPauseClusterTrigger(
662
+ cluster_identifier=self.cluster_identifier,
663
+ waiter_delay=self.poll_interval,
664
+ waiter_max_attempts=self.max_attempts,
665
+ aws_conn_id=self.aws_conn_id,
666
+ ),
667
+ method_name="execute_complete",
668
+ # timeout is set to ensure that if a trigger dies, the timeout does not restart
669
+ # 60 seconds is added to allow the trigger to exit gracefully (i.e. yield TriggerEvent)
670
+ timeout=timedelta(seconds=self.max_attempts * self.poll_interval + 60),
671
+ )
654
672
  else:
655
- self.defer(
656
- trigger=RedshiftPauseClusterTrigger(
657
- cluster_identifier=self.cluster_identifier,
658
- waiter_delay=self.poll_interval,
659
- waiter_max_attempts=self.max_attempts,
660
- aws_conn_id=self.aws_conn_id,
673
+ waiter = redshift_hook.get_waiter("cluster_paused")
674
+ waiter.wait(
675
+ ClusterIdentifier=self.cluster_identifier,
676
+ WaiterConfig=prune_dict(
677
+ {
678
+ "Delay": self.poll_interval,
679
+ "MaxAttempts": self.max_attempts,
680
+ }
661
681
  ),
662
- method_name="execute_complete",
663
- # timeout is set to ensure that if a trigger dies, the timeout does not restart
664
- # 60 seconds is added to allow the trigger to exit gracefully (i.e. yield TriggerEvent)
665
- timeout=timedelta(seconds=self.max_attempts * self.poll_interval + 60),
666
682
  )
667
683
 
668
684
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
@@ -36,7 +36,6 @@ from airflow.providers.amazon.aws.hooks.sagemaker import (
36
36
  )
37
37
  from airflow.providers.amazon.aws.triggers.sagemaker import (
38
38
  SageMakerPipelineTrigger,
39
- SageMakerTrainingPrintLogTrigger,
40
39
  SageMakerTrigger,
41
40
  )
42
41
  from airflow.providers.amazon.aws.utils import trim_none_values, validate_execute_complete_event
@@ -1195,25 +1194,15 @@ class SageMakerTrainingOperator(SageMakerBaseOperator):
1195
1194
  if self.max_ingestion_time:
1196
1195
  timeout = datetime.timedelta(seconds=self.max_ingestion_time)
1197
1196
 
1198
- trigger: SageMakerTrainingPrintLogTrigger | SageMakerTrigger
1199
- if self.print_log:
1200
- trigger = SageMakerTrainingPrintLogTrigger(
1201
- job_name=self.config["TrainingJobName"],
1202
- poke_interval=self.check_interval,
1203
- aws_conn_id=self.aws_conn_id,
1204
- )
1205
- else:
1206
- trigger = SageMakerTrigger(
1197
+ self.defer(
1198
+ timeout=timeout,
1199
+ trigger=SageMakerTrigger(
1207
1200
  job_name=self.config["TrainingJobName"],
1208
1201
  job_type="Training",
1209
1202
  poke_interval=self.check_interval,
1210
1203
  max_attempts=self.max_attempts,
1211
1204
  aws_conn_id=self.aws_conn_id,
1212
- )
1213
-
1214
- self.defer(
1215
- timeout=timeout,
1216
- trigger=trigger,
1205
+ ),
1217
1206
  method_name="execute_complete",
1218
1207
  )
1219
1208
 
@@ -25,8 +25,9 @@ from functools import cached_property
25
25
  from typing import Any, AsyncIterator
26
26
 
27
27
  from botocore.exceptions import WaiterError
28
+ from deprecated import deprecated
28
29
 
29
- from airflow.exceptions import AirflowException
30
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
30
31
  from airflow.providers.amazon.aws.hooks.sagemaker import LogState, SageMakerHook
31
32
  from airflow.providers.amazon.aws.utils.waiter_with_logging import async_wait
32
33
  from airflow.triggers.base import BaseTrigger, TriggerEvent
@@ -199,6 +200,13 @@ class SageMakerPipelineTrigger(BaseTrigger):
199
200
  raise AirflowException("Waiter error: max attempts reached")
200
201
 
201
202
 
203
+ @deprecated(
204
+ reason=(
205
+ "`airflow.providers.amazon.aws.triggers.sagemaker.SageMakerTrainingPrintLogTrigger` "
206
+ "has been deprecated and will be removed in future. Please use ``SageMakerTrigger`` instead."
207
+ ),
208
+ category=AirflowProviderDeprecationWarning,
209
+ )
202
210
  class SageMakerTrainingPrintLogTrigger(BaseTrigger):
203
211
  """
204
212
  SageMakerTrainingPrintLogTrigger is fired as deferred class with params to run the task in triggerer.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-amazon
3
- Version: 8.27.0rc1
3
+ Version: 8.27.0rc2
4
4
  Summary: Provider package apache-airflow-providers-amazon for Apache Airflow
5
5
  Keywords: airflow-provider,amazon,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -125,7 +125,7 @@ Provides-Extra: ssh
125
125
 
126
126
  Package ``apache-airflow-providers-amazon``
127
127
 
128
- Release: ``8.27.0.rc1``
128
+ Release: ``8.27.0.rc2``
129
129
 
130
130
 
131
131
  Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
@@ -114,7 +114,7 @@ airflow/providers/amazon/aws/operators/dms.py,sha256=6RhUtbELAjp0LLkUWl73kdcH4MR
114
114
  airflow/providers/amazon/aws/operators/ec2.py,sha256=aQj6cL3nZzu0tcn3dq6RBSPsByZe8fNtn6qcpQYtlNI,17051
115
115
  airflow/providers/amazon/aws/operators/ecs.py,sha256=pmm05ugkdyUNwkfY1X-mQdT8pqui0qJumwt92tEaEgU,32601
116
116
  airflow/providers/amazon/aws/operators/eks.py,sha256=0xHtPzxZx3ymr6-iqGvAoZsnA24PrjYRSAiFzuxrl3U,50761
117
- airflow/providers/amazon/aws/operators/emr.py,sha256=tygUmL90ebpita9eKg7sMygHWW1BVomGfc_wnaHjOpE,84804
117
+ airflow/providers/amazon/aws/operators/emr.py,sha256=XNafysiROWBcMxAPK_9MQplcY-itOivKYnljffQo6eE,84909
118
118
  airflow/providers/amazon/aws/operators/eventbridge.py,sha256=e686XFhVi54DbaCk7oVc0fhvH6GIPU3p8jgyCie1yBU,10394
119
119
  airflow/providers/amazon/aws/operators/glacier.py,sha256=zxwC6lLk6sWerjlogXq6HgNOJx4h0hkqpGpqn23hJWk,3654
120
120
  airflow/providers/amazon/aws/operators/glue.py,sha256=m8hdF6eTyzsK3onOqt6Td0dGshhgf_XU1f4EtMb42LU,28390
@@ -125,10 +125,10 @@ airflow/providers/amazon/aws/operators/lambda_function.py,sha256=96KtK5KUpMPW2i8
125
125
  airflow/providers/amazon/aws/operators/neptune.py,sha256=on5oNX5K4yHfW1POE0eeZujta71vkJdVL07vucGjX-4,14751
126
126
  airflow/providers/amazon/aws/operators/quicksight.py,sha256=jc3Eof19UfLt5IqbQswRzaHaK8h0ACLY99i_1Prtq10,4089
127
127
  airflow/providers/amazon/aws/operators/rds.py,sha256=zGiUIwpO2EdUByCYkgFwLbFCQhg_TCTTHVuNnee6X_g,39325
128
- airflow/providers/amazon/aws/operators/redshift_cluster.py,sha256=tYTmkXbITvRMaf2R8m9Cx5f2zYF2w1whNfO-l08wKjY,35755
128
+ airflow/providers/amazon/aws/operators/redshift_cluster.py,sha256=rmBHCssxrYEJ8EnENY-AnzC004lbtHvxXHpy69sHtV0,36681
129
129
  airflow/providers/amazon/aws/operators/redshift_data.py,sha256=wK-vTDcn0MqOuF9e-71JYIEkLKihah6oGU-p_8VT2HI,8612
130
130
  airflow/providers/amazon/aws/operators/s3.py,sha256=8mzkCBGnAbb5CHf0PFokQwdEmio_D3u99JnNoqTBUW4,35844
131
- airflow/providers/amazon/aws/operators/sagemaker.py,sha256=MJ-LceYW72Mqtpy5rb_i_oxG9BCUAXrAc7IQOatR85Y,84053
131
+ airflow/providers/amazon/aws/operators/sagemaker.py,sha256=0ZP6vyPHMOVkco80sViyDtwhguawSJFo5l1GLOvunQc,83614
132
132
  airflow/providers/amazon/aws/operators/sns.py,sha256=Rttd015UhLo4pCplGybxtLhflyu_26IFzYP7WTmQFk8,3730
133
133
  airflow/providers/amazon/aws/operators/sqs.py,sha256=0KkhhIblMggNHLxAyrv5dbWcaXvdSWQA2AOQP2CzOlo,4327
134
134
  airflow/providers/amazon/aws/operators/step_function.py,sha256=eXZAxZqG5VNPaFVEchyL4vKmOh54jc83ZjrIZDeld34,9515
@@ -207,7 +207,7 @@ airflow/providers/amazon/aws/triggers/rds.py,sha256=i7FvDkHjg7yr-PQsl2ymn_Lu8B2Z
207
207
  airflow/providers/amazon/aws/triggers/redshift_cluster.py,sha256=4VlZgtpfCbe9C2d8IW2qK6QmvFIhs8Guciuq8TZ5GbU,12920
208
208
  airflow/providers/amazon/aws/triggers/redshift_data.py,sha256=hRWsyFVt0F6yEMnSb-5pZekUcuWhnpw8tNXzf8Im6cI,4245
209
209
  airflow/providers/amazon/aws/triggers/s3.py,sha256=J2MvoiQoQ3GKf8OtFF9awvY6SwVXnhKG_m3ZJ1Z7iwU,9444
210
- airflow/providers/amazon/aws/triggers/sagemaker.py,sha256=C52lhVsiDEiw-1Kswc0kvNDOyPdDvV5bF7vjbuBN45o,11542
210
+ airflow/providers/amazon/aws/triggers/sagemaker.py,sha256=a8gck8xLEV1vQsp1mu8a6Z08pwidGd500sU2byva5-Q,11890
211
211
  airflow/providers/amazon/aws/triggers/sqs.py,sha256=tVA1i8XzV5AqbVQAdWrgrLKoZF8ewqgEwV7ggk1hrQM,8257
212
212
  airflow/providers/amazon/aws/triggers/step_function.py,sha256=M1HGdrnxL_T9KSCBNy2t531xMNJaFc-Y792T9cSmLGM,2685
213
213
  airflow/providers/amazon/aws/utils/__init__.py,sha256=yUkoHb2LuqSyHvj-HAhc2r2s04Kv_PhdyLMq52KarO8,3878
@@ -249,7 +249,7 @@ airflow/providers/amazon/aws/waiters/rds.json,sha256=HNmNQm5J-VaFHzjWb1pE5P7-Ix-
249
249
  airflow/providers/amazon/aws/waiters/redshift.json,sha256=jOBotCgbkko1b_CHcGEbhhRvusgt0YSzVuFiZrqVP30,1742
250
250
  airflow/providers/amazon/aws/waiters/sagemaker.json,sha256=JPHuQtUFZ1B7EMLfVmCRevNZ9jgpB71LM0dva8ZEO9A,5254
251
251
  airflow/providers/amazon/aws/waiters/stepfunctions.json,sha256=aBaAZaGv8ZZGdN-2gvYEbq3fL_WHI_7s6SSDL-nWS1A,1034
252
- apache_airflow_providers_amazon-8.27.0rc1.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
253
- apache_airflow_providers_amazon-8.27.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
254
- apache_airflow_providers_amazon-8.27.0rc1.dist-info/METADATA,sha256=ZRHb0Da4c3DYUezf690tX4_PpC6VoOQiWtivY82doB4,10844
255
- apache_airflow_providers_amazon-8.27.0rc1.dist-info/RECORD,,
252
+ apache_airflow_providers_amazon-8.27.0rc2.dist-info/entry_points.txt,sha256=vlc0ZzhBkMrav1maTRofgksnAw4SwoQLFX9cmnTgktk,102
253
+ apache_airflow_providers_amazon-8.27.0rc2.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
254
+ apache_airflow_providers_amazon-8.27.0rc2.dist-info/METADATA,sha256=AZjFZHwmOK2Qdc3jrmyBgvDHz-U1KKlKQ1HCDUbOo2A,10844
255
+ apache_airflow_providers_amazon-8.27.0rc2.dist-info/RECORD,,