databricks-sdk 0.64.0__py3-none-any.whl → 0.66.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -873,11 +873,16 @@ class Continuous:
873
873
  pause_status: Optional[PauseStatus] = None
874
874
  """Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED."""
875
875
 
876
+ task_retry_mode: Optional[TaskRetryMode] = None
877
+ """Indicate whether the continuous job is applying task level retries or not. Defaults to NEVER."""
878
+
876
879
  def as_dict(self) -> dict:
877
880
  """Serializes the Continuous into a dictionary suitable for use as a JSON request body."""
878
881
  body = {}
879
882
  if self.pause_status is not None:
880
883
  body["pause_status"] = self.pause_status.value
884
+ if self.task_retry_mode is not None:
885
+ body["task_retry_mode"] = self.task_retry_mode.value
881
886
  return body
882
887
 
883
888
  def as_shallow_dict(self) -> dict:
@@ -885,12 +890,17 @@ class Continuous:
885
890
  body = {}
886
891
  if self.pause_status is not None:
887
892
  body["pause_status"] = self.pause_status
893
+ if self.task_retry_mode is not None:
894
+ body["task_retry_mode"] = self.task_retry_mode
888
895
  return body
889
896
 
890
897
  @classmethod
891
898
  def from_dict(cls, d: Dict[str, Any]) -> Continuous:
892
899
  """Deserializes the Continuous from a dictionary."""
893
- return cls(pause_status=_enum(d, "pause_status", PauseStatus))
900
+ return cls(
901
+ pause_status=_enum(d, "pause_status", PauseStatus),
902
+ task_retry_mode=_enum(d, "task_retry_mode", TaskRetryMode),
903
+ )
894
904
 
895
905
 
896
906
  @dataclass
@@ -1641,9 +1651,7 @@ class ExportRunOutput:
1641
1651
 
1642
1652
  views: Optional[List[ViewItem]] = None
1643
1653
  """The exported content in HTML format (one for every view item). To extract the HTML notebook from
1644
- the JSON response, download and run this [Python script].
1645
-
1646
- [Python script]: https://docs.databricks.com/en/_static/examples/extract.py"""
1654
+ the JSON response, download and run this [Python script](/_static/examples/extract.py)."""
1647
1655
 
1648
1656
  def as_dict(self) -> dict:
1649
1657
  """Serializes the ExportRunOutput into a dictionary suitable for use as a JSON request body."""
@@ -5649,7 +5657,7 @@ class RunTask:
5649
5657
  clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None
5650
5658
  """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present.
5651
5659
 
5652
- [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html"""
5660
+ [clean rooms]: https://docs.databricks.com/clean-rooms/index.html"""
5653
5661
 
5654
5662
  cleanup_duration: Optional[int] = None
5655
5663
  """The time in milliseconds it took to terminate the cluster and clean up any associated artifacts.
@@ -5686,9 +5694,6 @@ class RunTask:
5686
5694
  description: Optional[str] = None
5687
5695
  """An optional description for this task."""
5688
5696
 
5689
- disabled: Optional[bool] = None
5690
- """Deprecated, field was never used in production."""
5691
-
5692
5697
  effective_performance_target: Optional[PerformanceTarget] = None
5693
5698
  """The actual performance target used by the serverless run during execution. This can differ from
5694
5699
  the client-set performance target on the request depending on whether the performance mode is
@@ -5800,21 +5805,9 @@ class RunTask:
5800
5805
  """The task runs a Python file when the `spark_python_task` field is present."""
5801
5806
 
5802
5807
  spark_submit_task: Optional[SparkSubmitTask] = None
5803
- """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
5804
- This task can run only on new clusters and is not compatible with serverless compute.
5805
-
5806
- In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
5807
- `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
5808
- configurations.
5809
-
5810
- `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you
5811
- _cannot_ specify them in parameters.
5812
-
5813
- By default, the Spark submit job uses all available memory (excluding reserved memory for
5814
- Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value
5815
- to leave some room for off-heap usage.
5816
-
5817
- The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
5808
+ """(Legacy) The task runs the spark-submit script when the spark_submit_task field is present.
5809
+ Databricks recommends using the spark_jar_task instead; see [Spark Submit task for
5810
+ jobs](/jobs/spark-submit)."""
5818
5811
 
5819
5812
  sql_task: Optional[SqlTask] = None
5820
5813
  """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
@@ -5863,8 +5856,6 @@ class RunTask:
5863
5856
  body["depends_on"] = [v.as_dict() for v in self.depends_on]
5864
5857
  if self.description is not None:
5865
5858
  body["description"] = self.description
5866
- if self.disabled is not None:
5867
- body["disabled"] = self.disabled
5868
5859
  if self.effective_performance_target is not None:
5869
5860
  body["effective_performance_target"] = self.effective_performance_target.value
5870
5861
  if self.email_notifications:
@@ -5962,8 +5953,6 @@ class RunTask:
5962
5953
  body["depends_on"] = self.depends_on
5963
5954
  if self.description is not None:
5964
5955
  body["description"] = self.description
5965
- if self.disabled is not None:
5966
- body["disabled"] = self.disabled
5967
5956
  if self.effective_performance_target is not None:
5968
5957
  body["effective_performance_target"] = self.effective_performance_target
5969
5958
  if self.email_notifications:
@@ -6051,7 +6040,6 @@ class RunTask:
6051
6040
  dbt_task=_from_dict(d, "dbt_task", DbtTask),
6052
6041
  depends_on=_repeated_dict(d, "depends_on", TaskDependency),
6053
6042
  description=d.get("description", None),
6054
- disabled=d.get("disabled", None),
6055
6043
  effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget),
6056
6044
  email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications),
6057
6045
  end_time=d.get("end_time", None),
@@ -6906,7 +6894,7 @@ class SubmitTask:
6906
6894
  clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None
6907
6895
  """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present.
6908
6896
 
6909
- [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html"""
6897
+ [clean rooms]: https://docs.databricks.com/clean-rooms/index.html"""
6910
6898
 
6911
6899
  condition_task: Optional[ConditionTask] = None
6912
6900
  """The task evaluates a condition that can be used to control the execution of other tasks when the
@@ -6993,21 +6981,9 @@ class SubmitTask:
6993
6981
  """The task runs a Python file when the `spark_python_task` field is present."""
6994
6982
 
6995
6983
  spark_submit_task: Optional[SparkSubmitTask] = None
6996
- """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
6997
- This task can run only on new clusters and is not compatible with serverless compute.
6998
-
6999
- In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
7000
- `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
7001
- configurations.
7002
-
7003
- `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you
7004
- _cannot_ specify them in parameters.
7005
-
7006
- By default, the Spark submit job uses all available memory (excluding reserved memory for
7007
- Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value
7008
- to leave some room for off-heap usage.
7009
-
7010
- The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
6984
+ """(Legacy) The task runs the spark-submit script when the spark_submit_task field is present.
6985
+ Databricks recommends using the spark_jar_task instead; see [Spark Submit task for
6986
+ jobs](/jobs/spark-submit)."""
7011
6987
 
7012
6988
  sql_task: Optional[SqlTask] = None
7013
6989
  """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
@@ -7397,7 +7373,7 @@ class Task:
7397
7373
  clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None
7398
7374
  """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present.
7399
7375
 
7400
- [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html"""
7376
+ [clean rooms]: https://docs.databricks.com/clean-rooms/index.html"""
7401
7377
 
7402
7378
  condition_task: Optional[ConditionTask] = None
7403
7379
  """The task evaluates a condition that can be used to control the execution of other tasks when the
@@ -7428,6 +7404,10 @@ class Task:
7428
7404
  disable_auto_optimization: Optional[bool] = None
7429
7405
  """An option to disable auto optimization in serverless"""
7430
7406
 
7407
+ disabled: Optional[bool] = None
7408
+ """An optional flag to disable the task. If set to true, the task will not run even if it is part
7409
+ of a job."""
7410
+
7431
7411
  email_notifications: Optional[TaskEmailNotifications] = None
7432
7412
  """An optional set of email addresses that is notified when runs of this task begin or complete as
7433
7413
  well as when this task is deleted. The default behavior is to not send any emails."""
@@ -7510,21 +7490,9 @@ class Task:
7510
7490
  """The task runs a Python file when the `spark_python_task` field is present."""
7511
7491
 
7512
7492
  spark_submit_task: Optional[SparkSubmitTask] = None
7513
- """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
7514
- This task can run only on new clusters and is not compatible with serverless compute.
7515
-
7516
- In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
7517
- `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
7518
- configurations.
7519
-
7520
- `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you
7521
- _cannot_ specify them in parameters.
7522
-
7523
- By default, the Spark submit job uses all available memory (excluding reserved memory for
7524
- Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value
7525
- to leave some room for off-heap usage.
7526
-
7527
- The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
7493
+ """(Legacy) The task runs the spark-submit script when the spark_submit_task field is present.
7494
+ Databricks recommends using the spark_jar_task instead; see [Spark Submit task for
7495
+ jobs](/jobs/spark-submit)."""
7528
7496
 
7529
7497
  sql_task: Optional[SqlTask] = None
7530
7498
  """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
@@ -7558,6 +7526,8 @@ class Task:
7558
7526
  body["description"] = self.description
7559
7527
  if self.disable_auto_optimization is not None:
7560
7528
  body["disable_auto_optimization"] = self.disable_auto_optimization
7529
+ if self.disabled is not None:
7530
+ body["disabled"] = self.disabled
7561
7531
  if self.email_notifications:
7562
7532
  body["email_notifications"] = self.email_notifications.as_dict()
7563
7533
  if self.environment_key is not None:
@@ -7633,6 +7603,8 @@ class Task:
7633
7603
  body["description"] = self.description
7634
7604
  if self.disable_auto_optimization is not None:
7635
7605
  body["disable_auto_optimization"] = self.disable_auto_optimization
7606
+ if self.disabled is not None:
7607
+ body["disabled"] = self.disabled
7636
7608
  if self.email_notifications:
7637
7609
  body["email_notifications"] = self.email_notifications
7638
7610
  if self.environment_key is not None:
@@ -7700,6 +7672,7 @@ class Task:
7700
7672
  depends_on=_repeated_dict(d, "depends_on", TaskDependency),
7701
7673
  description=d.get("description", None),
7702
7674
  disable_auto_optimization=d.get("disable_auto_optimization", None),
7675
+ disabled=d.get("disabled", None),
7703
7676
  email_notifications=_from_dict(d, "email_notifications", TaskEmailNotifications),
7704
7677
  environment_key=d.get("environment_key", None),
7705
7678
  existing_cluster_id=d.get("existing_cluster_id", None),
@@ -7891,6 +7864,16 @@ class TaskNotificationSettings:
7891
7864
  )
7892
7865
 
7893
7866
 
7867
+ class TaskRetryMode(Enum):
7868
+ """task retry mode of the continuous job * NEVER: The failed task will not be retried. *
7869
+ ON_FAILURE: Retry a failed task if at least one other task in the job is still running its first
7870
+ attempt. When this condition is no longer met or the retry limit is reached, the job run is
7871
+ cancelled and a new run is started."""
7872
+
7873
+ NEVER = "NEVER"
7874
+ ON_FAILURE = "ON_FAILURE"
7875
+
7876
+
7894
7877
  class TerminationCodeCode(Enum):
7895
7878
  """The code indicates why the run was terminated. Additional codes might be introduced in future
7896
7879
  releases. * `SUCCESS`: The run was completed successfully. * `SUCCESS_WITH_FAILURES`: The run