databricks-sdk 0.35.0__py3-none-any.whl → 0.37.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -2482,8 +2482,9 @@ class RepairRun:
2482
2482
  be specified in conjunction with notebook_params. The JSON representation of this field (for
2483
2483
  example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
2484
2484
 
2485
- Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
2486
- information about job runs."""
2485
+ Use [Task parameter variables] to set parameters containing information about job runs.
2486
+
2487
+ [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
2487
2488
 
2488
2489
  job_parameters: Optional[Dict[str, str]] = None
2489
2490
  """Job-level parameters used in the run. for example `"param": "overriding_val"`"""
@@ -2916,9 +2917,6 @@ class Run:
2916
2917
  overriding_parameters: Optional[RunParameters] = None
2917
2918
  """The parameters used for this run."""
2918
2919
 
2919
- prev_page_token: Optional[str] = None
2920
- """A token that can be used to list the previous page of sub-resources."""
2921
-
2922
2920
  queue_duration: Optional[int] = None
2923
2921
  """The time in milliseconds that the run has spent in the queue."""
2924
2922
 
@@ -3005,7 +3003,6 @@ class Run:
3005
3003
  if self.original_attempt_run_id is not None:
3006
3004
  body['original_attempt_run_id'] = self.original_attempt_run_id
3007
3005
  if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict()
3008
- if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
3009
3006
  if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
3010
3007
  if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history]
3011
3008
  if self.run_duration is not None: body['run_duration'] = self.run_duration
@@ -3044,7 +3041,6 @@ class Run:
3044
3041
  number_in_job=d.get('number_in_job', None),
3045
3042
  original_attempt_run_id=d.get('original_attempt_run_id', None),
3046
3043
  overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters),
3047
- prev_page_token=d.get('prev_page_token', None),
3048
3044
  queue_duration=d.get('queue_duration', None),
3049
3045
  repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem),
3050
3046
  run_duration=d.get('run_duration', None),
@@ -3190,8 +3186,9 @@ class RunJobTask:
3190
3186
  be specified in conjunction with notebook_params. The JSON representation of this field (for
3191
3187
  example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
3192
3188
 
3193
- Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
3194
- information about job runs."""
3189
+ Use [Task parameter variables] to set parameters containing information about job runs.
3190
+
3191
+ [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
3195
3192
 
3196
3193
  job_parameters: Optional[Dict[str, str]] = None
3197
3194
  """Job-level parameters used to trigger the job."""
@@ -3350,8 +3347,9 @@ class RunNow:
3350
3347
  be specified in conjunction with notebook_params. The JSON representation of this field (for
3351
3348
  example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
3352
3349
 
3353
- Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
3354
- information about job runs."""
3350
+ Use [Task parameter variables] to set parameters containing information about job runs.
3351
+
3352
+ [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
3355
3353
 
3356
3354
  job_parameters: Optional[Dict[str, str]] = None
3357
3355
  """Job-level parameters used in the run. for example `"param": "overriding_val"`"""
@@ -3563,8 +3561,9 @@ class RunParameters:
3563
3561
  be specified in conjunction with notebook_params. The JSON representation of this field (for
3564
3562
  example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
3565
3563
 
3566
- Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
3567
- information about job runs."""
3564
+ Use [Task parameter variables] to set parameters containing information about job runs.
3565
+
3566
+ [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
3568
3567
 
3569
3568
  notebook_params: Optional[Dict[str, str]] = None
3570
3569
  """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
@@ -3774,13 +3773,13 @@ class RunTask:
3774
3773
  once the Jobs service has requested a cluster for the run."""
3775
3774
 
3776
3775
  condition_task: Optional[RunConditionTask] = None
3777
- """If condition_task, specifies a condition with an outcome that can be used to control the
3778
- execution of other tasks. Does not require a cluster to execute and does not support retries or
3779
- notifications."""
3776
+ """The task evaluates a condition that can be used to control the execution of other tasks when the
3777
+ `condition_task` field is present. The condition task does not require a cluster to execute and
3778
+ does not support retries or notifications."""
3780
3779
 
3781
3780
  dbt_task: Optional[DbtTask] = None
3782
- """If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and
3783
- the ability to use a serverless or a pro SQL warehouse."""
3781
+ """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task
3782
+ requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse."""
3784
3783
 
3785
3784
  depends_on: Optional[List[TaskDependency]] = None
3786
3785
  """An optional array of objects specifying the dependency graph of the task. All tasks specified in
@@ -3815,7 +3814,8 @@ class RunTask:
3815
3814
  responding. We suggest running jobs and tasks on new clusters for greater reliability"""
3816
3815
 
3817
3816
  for_each_task: Optional[RunForEachTask] = None
3818
- """If for_each_task, indicates that this task must execute the nested task within it."""
3817
+ """The task executes a nested task for every input provided when the `for_each_task` field is
3818
+ present."""
3819
3819
 
3820
3820
  git_source: Optional[GitSource] = None
3821
3821
  """An optional specification for a remote Git repository containing the source code used by tasks.
@@ -3837,18 +3837,18 @@ class RunTask:
3837
3837
  """If new_cluster, a description of a new cluster that is created for each run."""
3838
3838
 
3839
3839
  notebook_task: Optional[NotebookTask] = None
3840
- """If notebook_task, indicates that this task must run a notebook. This field may not be specified
3841
- in conjunction with spark_jar_task."""
3840
+ """The task runs a notebook when the `notebook_task` field is present."""
3842
3841
 
3843
3842
  notification_settings: Optional[TaskNotificationSettings] = None
3844
3843
  """Optional notification settings that are used when sending notifications to each of the
3845
3844
  `email_notifications` and `webhook_notifications` for this task run."""
3846
3845
 
3847
3846
  pipeline_task: Optional[PipelineTask] = None
3848
- """If pipeline_task, indicates that this task must execute a Pipeline."""
3847
+ """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines
3848
+ configured to use triggered more are supported."""
3849
3849
 
3850
3850
  python_wheel_task: Optional[PythonWheelTask] = None
3851
- """If python_wheel_task, indicates that this job must execute a PythonWheel."""
3851
+ """The task runs a Python wheel when the `python_wheel_task` field is present."""
3852
3852
 
3853
3853
  queue_duration: Optional[int] = None
3854
3854
  """The time in milliseconds that the run has spent in the queue."""
@@ -3868,7 +3868,7 @@ class RunTask:
3868
3868
  :method:jobs/create for a list of possible values."""
3869
3869
 
3870
3870
  run_job_task: Optional[RunJobTask] = None
3871
- """If run_job_task, indicates that this task must execute another job."""
3871
+ """The task triggers another job when the `run_job_task` field is present."""
3872
3872
 
3873
3873
  run_page_url: Optional[str] = None
3874
3874
 
@@ -3880,14 +3880,14 @@ class RunTask:
3880
3880
  duration of a multitask job run is the value of the `run_duration` field."""
3881
3881
 
3882
3882
  spark_jar_task: Optional[SparkJarTask] = None
3883
- """If spark_jar_task, indicates that this task must run a JAR."""
3883
+ """The task runs a JAR when the `spark_jar_task` field is present."""
3884
3884
 
3885
3885
  spark_python_task: Optional[SparkPythonTask] = None
3886
- """If spark_python_task, indicates that this task must run a Python file."""
3886
+ """The task runs a Python file when the `spark_python_task` field is present."""
3887
3887
 
3888
3888
  spark_submit_task: Optional[SparkSubmitTask] = None
3889
- """If `spark_submit_task`, indicates that this task must be launched by the spark submit script.
3890
- This task can run only on new clusters.
3889
+ """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
3890
+ This task can run only on new clusters and is not compatible with serverless compute.
3891
3891
 
3892
3892
  In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
3893
3893
  `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
@@ -3903,7 +3903,8 @@ class RunTask:
3903
3903
  The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
3904
3904
 
3905
3905
  sql_task: Optional[SqlTask] = None
3906
- """If sql_task, indicates that this job must execute a SQL task."""
3906
+ """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
3907
+ the `sql_task` field is present."""
3907
3908
 
3908
3909
  start_time: Optional[int] = None
3909
3910
  """The time at which this run was started in epoch milliseconds (milliseconds since 1/1/1970 UTC).
@@ -4664,13 +4665,13 @@ class SubmitTask:
4664
4665
  used to reference the tasks to be updated or reset."""
4665
4666
 
4666
4667
  condition_task: Optional[ConditionTask] = None
4667
- """If condition_task, specifies a condition with an outcome that can be used to control the
4668
- execution of other tasks. Does not require a cluster to execute and does not support retries or
4669
- notifications."""
4668
+ """The task evaluates a condition that can be used to control the execution of other tasks when the
4669
+ `condition_task` field is present. The condition task does not require a cluster to execute and
4670
+ does not support retries or notifications."""
4670
4671
 
4671
4672
  dbt_task: Optional[DbtTask] = None
4672
- """If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and
4673
- the ability to use a serverless or a pro SQL warehouse."""
4673
+ """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task
4674
+ requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse."""
4674
4675
 
4675
4676
  depends_on: Optional[List[TaskDependency]] = None
4676
4677
  """An optional array of objects specifying the dependency graph of the task. All tasks specified in
@@ -4694,7 +4695,8 @@ class SubmitTask:
4694
4695
  responding. We suggest running jobs and tasks on new clusters for greater reliability"""
4695
4696
 
4696
4697
  for_each_task: Optional[ForEachTask] = None
4697
- """If for_each_task, indicates that this task must execute the nested task within it."""
4698
+ """The task executes a nested task for every input provided when the `for_each_task` field is
4699
+ present."""
4698
4700
 
4699
4701
  health: Optional[JobsHealthRules] = None
4700
4702
  """An optional set of health rules that can be defined for this job."""
@@ -4707,18 +4709,18 @@ class SubmitTask:
4707
4709
  """If new_cluster, a description of a new cluster that is created for each run."""
4708
4710
 
4709
4711
  notebook_task: Optional[NotebookTask] = None
4710
- """If notebook_task, indicates that this task must run a notebook. This field may not be specified
4711
- in conjunction with spark_jar_task."""
4712
+ """The task runs a notebook when the `notebook_task` field is present."""
4712
4713
 
4713
4714
  notification_settings: Optional[TaskNotificationSettings] = None
4714
4715
  """Optional notification settings that are used when sending notifications to each of the
4715
4716
  `email_notifications` and `webhook_notifications` for this task run."""
4716
4717
 
4717
4718
  pipeline_task: Optional[PipelineTask] = None
4718
- """If pipeline_task, indicates that this task must execute a Pipeline."""
4719
+ """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines
4720
+ configured to use triggered more are supported."""
4719
4721
 
4720
4722
  python_wheel_task: Optional[PythonWheelTask] = None
4721
- """If python_wheel_task, indicates that this job must execute a PythonWheel."""
4723
+ """The task runs a Python wheel when the `python_wheel_task` field is present."""
4722
4724
 
4723
4725
  run_if: Optional[RunIf] = None
4724
4726
  """An optional value indicating the condition that determines whether the task should be run once
@@ -4726,17 +4728,17 @@ class SubmitTask:
4726
4728
  :method:jobs/create for a list of possible values."""
4727
4729
 
4728
4730
  run_job_task: Optional[RunJobTask] = None
4729
- """If run_job_task, indicates that this task must execute another job."""
4731
+ """The task triggers another job when the `run_job_task` field is present."""
4730
4732
 
4731
4733
  spark_jar_task: Optional[SparkJarTask] = None
4732
- """If spark_jar_task, indicates that this task must run a JAR."""
4734
+ """The task runs a JAR when the `spark_jar_task` field is present."""
4733
4735
 
4734
4736
  spark_python_task: Optional[SparkPythonTask] = None
4735
- """If spark_python_task, indicates that this task must run a Python file."""
4737
+ """The task runs a Python file when the `spark_python_task` field is present."""
4736
4738
 
4737
4739
  spark_submit_task: Optional[SparkSubmitTask] = None
4738
- """If `spark_submit_task`, indicates that this task must be launched by the spark submit script.
4739
- This task can run only on new clusters.
4740
+ """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
4741
+ This task can run only on new clusters and is not compatible with serverless compute.
4740
4742
 
4741
4743
  In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
4742
4744
  `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
@@ -4752,7 +4754,8 @@ class SubmitTask:
4752
4754
  The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
4753
4755
 
4754
4756
  sql_task: Optional[SqlTask] = None
4755
- """If sql_task, indicates that this job must execute a SQL task."""
4757
+ """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
4758
+ the `sql_task` field is present."""
4756
4759
 
4757
4760
  timeout_seconds: Optional[int] = None
4758
4761
  """An optional timeout applied to each run of this job task. A value of `0` means no timeout."""
@@ -4866,13 +4869,13 @@ class Task:
4866
4869
  used to reference the tasks to be updated or reset."""
4867
4870
 
4868
4871
  condition_task: Optional[ConditionTask] = None
4869
- """If condition_task, specifies a condition with an outcome that can be used to control the
4870
- execution of other tasks. Does not require a cluster to execute and does not support retries or
4871
- notifications."""
4872
+ """The task evaluates a condition that can be used to control the execution of other tasks when the
4873
+ `condition_task` field is present. The condition task does not require a cluster to execute and
4874
+ does not support retries or notifications."""
4872
4875
 
4873
4876
  dbt_task: Optional[DbtTask] = None
4874
- """If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and
4875
- the ability to use a serverless or a pro SQL warehouse."""
4877
+ """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task
4878
+ requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse."""
4876
4879
 
4877
4880
  depends_on: Optional[List[TaskDependency]] = None
4878
4881
  """An optional array of objects specifying the dependency graph of the task. All tasks specified in
@@ -4900,7 +4903,8 @@ class Task:
4900
4903
  responding. We suggest running jobs and tasks on new clusters for greater reliability"""
4901
4904
 
4902
4905
  for_each_task: Optional[ForEachTask] = None
4903
- """If for_each_task, indicates that this task must execute the nested task within it."""
4906
+ """The task executes a nested task for every input provided when the `for_each_task` field is
4907
+ present."""
4904
4908
 
4905
4909
  health: Optional[JobsHealthRules] = None
4906
4910
  """An optional set of health rules that can be defined for this job."""
@@ -4927,18 +4931,18 @@ class Task:
4927
4931
  """If new_cluster, a description of a new cluster that is created for each run."""
4928
4932
 
4929
4933
  notebook_task: Optional[NotebookTask] = None
4930
- """If notebook_task, indicates that this task must run a notebook. This field may not be specified
4931
- in conjunction with spark_jar_task."""
4934
+ """The task runs a notebook when the `notebook_task` field is present."""
4932
4935
 
4933
4936
  notification_settings: Optional[TaskNotificationSettings] = None
4934
4937
  """Optional notification settings that are used when sending notifications to each of the
4935
4938
  `email_notifications` and `webhook_notifications` for this task."""
4936
4939
 
4937
4940
  pipeline_task: Optional[PipelineTask] = None
4938
- """If pipeline_task, indicates that this task must execute a Pipeline."""
4941
+ """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines
4942
+ configured to use triggered more are supported."""
4939
4943
 
4940
4944
  python_wheel_task: Optional[PythonWheelTask] = None
4941
- """If python_wheel_task, indicates that this job must execute a PythonWheel."""
4945
+ """The task runs a Python wheel when the `python_wheel_task` field is present."""
4942
4946
 
4943
4947
  retry_on_timeout: Optional[bool] = None
4944
4948
  """An optional policy to specify whether to retry a job when it times out. The default behavior is
@@ -4954,17 +4958,17 @@ class Task:
4954
4958
  least one dependency failed * `ALL_FAILED`: ALl dependencies have failed"""
4955
4959
 
4956
4960
  run_job_task: Optional[RunJobTask] = None
4957
- """If run_job_task, indicates that this task must execute another job."""
4961
+ """The task triggers another job when the `run_job_task` field is present."""
4958
4962
 
4959
4963
  spark_jar_task: Optional[SparkJarTask] = None
4960
- """If spark_jar_task, indicates that this task must run a JAR."""
4964
+ """The task runs a JAR when the `spark_jar_task` field is present."""
4961
4965
 
4962
4966
  spark_python_task: Optional[SparkPythonTask] = None
4963
- """If spark_python_task, indicates that this task must run a Python file."""
4967
+ """The task runs a Python file when the `spark_python_task` field is present."""
4964
4968
 
4965
4969
  spark_submit_task: Optional[SparkSubmitTask] = None
4966
- """If `spark_submit_task`, indicates that this task must be launched by the spark submit script.
4967
- This task can run only on new clusters.
4970
+ """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
4971
+ This task can run only on new clusters and is not compatible with serverless compute.
4968
4972
 
4969
4973
  In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
4970
4974
  `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
@@ -4980,7 +4984,8 @@ class Task:
4980
4984
  The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
4981
4985
 
4982
4986
  sql_task: Optional[SqlTask] = None
4983
- """If sql_task, indicates that this job must execute a SQL task."""
4987
+ """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
4988
+ the `sql_task` field is present."""
4984
4989
 
4985
4990
  timeout_seconds: Optional[int] = None
4986
4991
  """An optional timeout applied to each run of this job task. A value of `0` means no timeout."""
@@ -5922,8 +5927,8 @@ class JobsAPI:
5922
5927
  :param include_resolved_values: bool (optional)
5923
5928
  Whether to include resolved parameter values in the response.
5924
5929
  :param page_token: str (optional)
5925
- To list the next page or the previous page of job tasks, set this field to the value of the
5926
- `next_page_token` or `prev_page_token` returned in the GetJob response.
5930
+ To list the next page of job tasks, set this field to the value of the `next_page_token` returned in
5931
+ the GetJob response.
5927
5932
 
5928
5933
  :returns: :class:`Run`
5929
5934
  """
@@ -6111,8 +6116,9 @@ class JobsAPI:
6111
6116
  in conjunction with notebook_params. The JSON representation of this field (for example
6112
6117
  `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
6113
6118
 
6114
- Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
6115
- information about job runs.
6119
+ Use [Task parameter variables] to set parameters containing information about job runs.
6120
+
6121
+ [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
6116
6122
  :param job_parameters: Dict[str,str] (optional)
6117
6123
  Job-level parameters used in the run. for example `"param": "overriding_val"`
6118
6124
  :param latest_repair_id: int (optional)
@@ -6304,8 +6310,9 @@ class JobsAPI:
6304
6310
  in conjunction with notebook_params. The JSON representation of this field (for example
6305
6311
  `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
6306
6312
 
6307
- Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
6308
- information about job runs.
6313
+ Use [Task parameter variables] to set parameters containing information about job runs.
6314
+
6315
+ [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
6309
6316
  :param job_parameters: Dict[str,str] (optional)
6310
6317
  Job-level parameters used in the run. for example `"param": "overriding_val"`
6311
6318
  :param notebook_params: Dict[str,str] (optional)
@@ -6423,7 +6430,8 @@ class JobsAPI:
6423
6430
  access_control_list: Optional[List[JobAccessControlRequest]] = None) -> JobPermissions:
6424
6431
  """Set job permissions.
6425
6432
 
6426
- Sets permissions on a job. Jobs can inherit permissions from their root object.
6433
+ Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
6434
+ permissions if none are specified. Objects can inherit permissions from their root object.
6427
6435
 
6428
6436
  :param job_id: str
6429
6437
  The job for which to get or manage permissions.
@@ -56,6 +56,7 @@ class AssetType(Enum):
56
56
  ASSET_TYPE_MEDIA = 'ASSET_TYPE_MEDIA'
57
57
  ASSET_TYPE_MODEL = 'ASSET_TYPE_MODEL'
58
58
  ASSET_TYPE_NOTEBOOK = 'ASSET_TYPE_NOTEBOOK'
59
+ ASSET_TYPE_PARTNER_INTEGRATION = 'ASSET_TYPE_PARTNER_INTEGRATION'
59
60
 
60
61
 
61
62
  @dataclass
@@ -4596,7 +4596,8 @@ class ExperimentsAPI:
4596
4596
  ) -> ExperimentPermissions:
4597
4597
  """Set experiment permissions.
4598
4598
 
4599
- Sets permissions on an experiment. Experiments can inherit permissions from their root object.
4599
+ Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
4600
+ permissions if none are specified. Objects can inherit permissions from their root object.
4600
4601
 
4601
4602
  :param experiment_id: str
4602
4603
  The experiment for which to get or manage permissions.
@@ -5571,8 +5572,8 @@ class ModelRegistryAPI:
5571
5572
  ) -> RegisteredModelPermissions:
5572
5573
  """Set registered model permissions.
5573
5574
 
5574
- Sets permissions on a registered model. Registered models can inherit permissions from their root
5575
- object.
5575
+ Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
5576
+ permissions if none are specified. Objects can inherit permissions from their root object.
5576
5577
 
5577
5578
  :param registered_model_id: str
5578
5579
  The registered model for which to get or manage permissions.
@@ -389,19 +389,24 @@ class GetPublishedAppsOutput:
389
389
 
390
390
  @dataclass
391
391
  class ListServicePrincipalSecretsResponse:
392
+ next_page_token: Optional[str] = None
393
+ """A token, which can be sent as `page_token` to retrieve the next page."""
394
+
392
395
  secrets: Optional[List[SecretInfo]] = None
393
396
  """List of the secrets"""
394
397
 
395
398
  def as_dict(self) -> dict:
396
399
  """Serializes the ListServicePrincipalSecretsResponse into a dictionary suitable for use as a JSON request body."""
397
400
  body = {}
401
+ if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
398
402
  if self.secrets: body['secrets'] = [v.as_dict() for v in self.secrets]
399
403
  return body
400
404
 
401
405
  @classmethod
402
406
  def from_dict(cls, d: Dict[str, any]) -> ListServicePrincipalSecretsResponse:
403
407
  """Deserializes the ListServicePrincipalSecretsResponse from a dictionary."""
404
- return cls(secrets=_repeated_dict(d, 'secrets', SecretInfo))
408
+ return cls(next_page_token=d.get('next_page_token', None),
409
+ secrets=_repeated_dict(d, 'secrets', SecretInfo))
405
410
 
406
411
 
407
412
  @dataclass
@@ -960,7 +965,7 @@ class ServicePrincipalSecretsAPI:
960
965
  f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets/{secret_id}',
961
966
  headers=headers)
962
967
 
963
- def list(self, service_principal_id: int) -> Iterator[SecretInfo]:
968
+ def list(self, service_principal_id: int, *, page_token: Optional[str] = None) -> Iterator[SecretInfo]:
964
969
  """List service principal secrets.
965
970
 
966
971
  List all secrets associated with the given service principal. This operation only returns information
@@ -968,15 +973,30 @@ class ServicePrincipalSecretsAPI:
968
973
 
969
974
  :param service_principal_id: int
970
975
  The service principal ID.
976
+ :param page_token: str (optional)
977
+ An opaque page token which was the `next_page_token` in the response of the previous request to list
978
+ the secrets for this service principal. Provide this token to retrieve the next page of secret
979
+ entries. When providing a `page_token`, all other parameters provided to the request must match the
980
+ previous request. To list all of the secrets for a service principal, it is necessary to continue
981
+ requesting pages of entries until the response contains no `next_page_token`. Note that the number
982
+ of entries returned must not be used to determine when the listing is complete.
971
983
 
972
984
  :returns: Iterator over :class:`SecretInfo`
973
985
  """
974
986
 
987
+ query = {}
988
+ if page_token is not None: query['page_token'] = page_token
975
989
  headers = {'Accept': 'application/json', }
976
990
 
977
- json = self._api.do(
978
- 'GET',
979
- f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets',
980
- headers=headers)
981
- parsed = ListServicePrincipalSecretsResponse.from_dict(json).secrets
982
- return parsed if parsed is not None else []
991
+ while True:
992
+ json = self._api.do(
993
+ 'GET',
994
+ f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets',
995
+ query=query,
996
+ headers=headers)
997
+ if 'secrets' in json:
998
+ for v in json['secrets']:
999
+ yield SecretInfo.from_dict(v)
1000
+ if 'next_page_token' not in json or not json['next_page_token']:
1001
+ return
1002
+ query['page_token'] = json['next_page_token']
@@ -615,6 +615,10 @@ class IngestionConfig:
615
615
  @dataclass
616
616
  class IngestionGatewayPipelineDefinition:
617
617
  connection_id: Optional[str] = None
618
+ """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection this gateway
619
+ pipeline uses to communicate with the source."""
620
+
621
+ connection_name: Optional[str] = None
618
622
  """Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the
619
623
  source."""
620
624
 
@@ -633,6 +637,7 @@ class IngestionGatewayPipelineDefinition:
633
637
  """Serializes the IngestionGatewayPipelineDefinition into a dictionary suitable for use as a JSON request body."""
634
638
  body = {}
635
639
  if self.connection_id is not None: body['connection_id'] = self.connection_id
640
+ if self.connection_name is not None: body['connection_name'] = self.connection_name
636
641
  if self.gateway_storage_catalog is not None:
637
642
  body['gateway_storage_catalog'] = self.gateway_storage_catalog
638
643
  if self.gateway_storage_name is not None: body['gateway_storage_name'] = self.gateway_storage_name
@@ -644,6 +649,7 @@ class IngestionGatewayPipelineDefinition:
644
649
  def from_dict(cls, d: Dict[str, any]) -> IngestionGatewayPipelineDefinition:
645
650
  """Deserializes the IngestionGatewayPipelineDefinition from a dictionary."""
646
651
  return cls(connection_id=d.get('connection_id', None),
652
+ connection_name=d.get('connection_name', None),
647
653
  gateway_storage_catalog=d.get('gateway_storage_catalog', None),
648
654
  gateway_storage_name=d.get('gateway_storage_name', None),
649
655
  gateway_storage_schema=d.get('gateway_storage_schema', None))
@@ -2122,13 +2128,13 @@ class PipelinesAPI:
2122
2128
  def __init__(self, api_client):
2123
2129
  self._api = api_client
2124
2130
 
2125
- def wait_get_pipeline_idle(
2131
+ def wait_get_pipeline_running(
2126
2132
  self,
2127
2133
  pipeline_id: str,
2128
2134
  timeout=timedelta(minutes=20),
2129
2135
  callback: Optional[Callable[[GetPipelineResponse], None]] = None) -> GetPipelineResponse:
2130
2136
  deadline = time.time() + timeout.total_seconds()
2131
- target_states = (PipelineState.IDLE, )
2137
+ target_states = (PipelineState.RUNNING, )
2132
2138
  failure_states = (PipelineState.FAILED, )
2133
2139
  status_message = 'polling...'
2134
2140
  attempt = 1
@@ -2141,7 +2147,7 @@ class PipelinesAPI:
2141
2147
  if callback:
2142
2148
  callback(poll)
2143
2149
  if status in failure_states:
2144
- msg = f'failed to reach IDLE, got {status}: {status_message}'
2150
+ msg = f'failed to reach RUNNING, got {status}: {status_message}'
2145
2151
  raise OperationFailed(msg)
2146
2152
  prefix = f"pipeline_id={pipeline_id}"
2147
2153
  sleep = attempt
@@ -2153,13 +2159,13 @@ class PipelinesAPI:
2153
2159
  attempt += 1
2154
2160
  raise TimeoutError(f'timed out after {timeout}: {status_message}')
2155
2161
 
2156
- def wait_get_pipeline_running(
2162
+ def wait_get_pipeline_idle(
2157
2163
  self,
2158
2164
  pipeline_id: str,
2159
2165
  timeout=timedelta(minutes=20),
2160
2166
  callback: Optional[Callable[[GetPipelineResponse], None]] = None) -> GetPipelineResponse:
2161
2167
  deadline = time.time() + timeout.total_seconds()
2162
- target_states = (PipelineState.RUNNING, )
2168
+ target_states = (PipelineState.IDLE, )
2163
2169
  failure_states = (PipelineState.FAILED, )
2164
2170
  status_message = 'polling...'
2165
2171
  attempt = 1
@@ -2172,7 +2178,7 @@ class PipelinesAPI:
2172
2178
  if callback:
2173
2179
  callback(poll)
2174
2180
  if status in failure_states:
2175
- msg = f'failed to reach RUNNING, got {status}: {status_message}'
2181
+ msg = f'failed to reach IDLE, got {status}: {status_message}'
2176
2182
  raise OperationFailed(msg)
2177
2183
  prefix = f"pipeline_id={pipeline_id}"
2178
2184
  sleep = attempt
@@ -2518,7 +2524,8 @@ class PipelinesAPI:
2518
2524
  access_control_list: Optional[List[PipelineAccessControlRequest]] = None) -> PipelinePermissions:
2519
2525
  """Set pipeline permissions.
2520
2526
 
2521
- Sets permissions on a pipeline. Pipelines can inherit permissions from their root object.
2527
+ Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
2528
+ permissions if none are specified. Objects can inherit permissions from their root object.
2522
2529
 
2523
2530
  :param pipeline_id: str
2524
2531
  The pipeline for which to get or manage permissions.