databricks-sdk 0.36.0__py3-none-any.whl → 0.38.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +22 -29
- databricks/sdk/_base_client.py +61 -14
- databricks/sdk/config.py +10 -9
- databricks/sdk/credentials_provider.py +6 -5
- databricks/sdk/mixins/jobs.py +49 -0
- databricks/sdk/service/apps.py +50 -186
- databricks/sdk/service/billing.py +1 -1
- databricks/sdk/service/catalog.py +952 -45
- databricks/sdk/service/compute.py +23 -20
- databricks/sdk/service/dashboards.py +31 -281
- databricks/sdk/service/iam.py +6 -4
- databricks/sdk/service/jobs.py +93 -76
- databricks/sdk/service/marketplace.py +1 -0
- databricks/sdk/service/ml.py +4 -3
- databricks/sdk/service/oauth2.py +29 -8
- databricks/sdk/service/pipelines.py +94 -20
- databricks/sdk/service/provisioning.py +68 -0
- databricks/sdk/service/serving.py +2 -2
- databricks/sdk/service/settings.py +322 -2
- databricks/sdk/service/sharing.py +2 -618
- databricks/sdk/service/sql.py +7 -7
- databricks/sdk/service/workspace.py +7 -4
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.36.0.dist-info → databricks_sdk-0.38.0.dist-info}/METADATA +1 -1
- {databricks_sdk-0.36.0.dist-info → databricks_sdk-0.38.0.dist-info}/RECORD +29 -28
- {databricks_sdk-0.36.0.dist-info → databricks_sdk-0.38.0.dist-info}/WHEEL +1 -1
- {databricks_sdk-0.36.0.dist-info → databricks_sdk-0.38.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.36.0.dist-info → databricks_sdk-0.38.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.36.0.dist-info → databricks_sdk-0.38.0.dist-info}/top_level.txt +0 -0
databricks/sdk/service/jobs.py
CHANGED
|
@@ -574,8 +574,7 @@ class CreateJob:
|
|
|
574
574
|
"""Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
|
|
575
575
|
as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
|
|
576
576
|
|
|
577
|
-
|
|
578
|
-
an error is thrown."""
|
|
577
|
+
Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown."""
|
|
579
578
|
|
|
580
579
|
schedule: Optional[CronSchedule] = None
|
|
581
580
|
"""An optional periodic schedule for this job. The default behavior is that the job only runs when
|
|
@@ -1752,8 +1751,7 @@ class JobRunAs:
|
|
|
1752
1751
|
"""Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
|
|
1753
1752
|
as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
|
|
1754
1753
|
|
|
1755
|
-
|
|
1756
|
-
an error is thrown."""
|
|
1754
|
+
Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown."""
|
|
1757
1755
|
|
|
1758
1756
|
service_principal_name: Optional[str] = None
|
|
1759
1757
|
"""Application ID of an active service principal. Setting this field requires the
|
|
@@ -1861,8 +1859,7 @@ class JobSettings:
|
|
|
1861
1859
|
"""Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
|
|
1862
1860
|
as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
|
|
1863
1861
|
|
|
1864
|
-
|
|
1865
|
-
an error is thrown."""
|
|
1862
|
+
Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown."""
|
|
1866
1863
|
|
|
1867
1864
|
schedule: Optional[CronSchedule] = None
|
|
1868
1865
|
"""An optional periodic schedule for this job. The default behavior is that the job only runs when
|
|
@@ -2482,8 +2479,9 @@ class RepairRun:
|
|
|
2482
2479
|
be specified in conjunction with notebook_params. The JSON representation of this field (for
|
|
2483
2480
|
example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
2484
2481
|
|
|
2485
|
-
Use [Task parameter variables]
|
|
2486
|
-
|
|
2482
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
2483
|
+
|
|
2484
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
2487
2485
|
|
|
2488
2486
|
job_parameters: Optional[Dict[str, str]] = None
|
|
2489
2487
|
"""Job-level parameters used in the run. for example `"param": "overriding_val"`"""
|
|
@@ -2916,9 +2914,6 @@ class Run:
|
|
|
2916
2914
|
overriding_parameters: Optional[RunParameters] = None
|
|
2917
2915
|
"""The parameters used for this run."""
|
|
2918
2916
|
|
|
2919
|
-
prev_page_token: Optional[str] = None
|
|
2920
|
-
"""A token that can be used to list the previous page of sub-resources."""
|
|
2921
|
-
|
|
2922
2917
|
queue_duration: Optional[int] = None
|
|
2923
2918
|
"""The time in milliseconds that the run has spent in the queue."""
|
|
2924
2919
|
|
|
@@ -3005,7 +3000,6 @@ class Run:
|
|
|
3005
3000
|
if self.original_attempt_run_id is not None:
|
|
3006
3001
|
body['original_attempt_run_id'] = self.original_attempt_run_id
|
|
3007
3002
|
if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict()
|
|
3008
|
-
if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
|
|
3009
3003
|
if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
|
|
3010
3004
|
if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history]
|
|
3011
3005
|
if self.run_duration is not None: body['run_duration'] = self.run_duration
|
|
@@ -3044,7 +3038,6 @@ class Run:
|
|
|
3044
3038
|
number_in_job=d.get('number_in_job', None),
|
|
3045
3039
|
original_attempt_run_id=d.get('original_attempt_run_id', None),
|
|
3046
3040
|
overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters),
|
|
3047
|
-
prev_page_token=d.get('prev_page_token', None),
|
|
3048
3041
|
queue_duration=d.get('queue_duration', None),
|
|
3049
3042
|
repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem),
|
|
3050
3043
|
run_duration=d.get('run_duration', None),
|
|
@@ -3190,8 +3183,9 @@ class RunJobTask:
|
|
|
3190
3183
|
be specified in conjunction with notebook_params. The JSON representation of this field (for
|
|
3191
3184
|
example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
3192
3185
|
|
|
3193
|
-
Use [Task parameter variables]
|
|
3194
|
-
|
|
3186
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
3187
|
+
|
|
3188
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
3195
3189
|
|
|
3196
3190
|
job_parameters: Optional[Dict[str, str]] = None
|
|
3197
3191
|
"""Job-level parameters used to trigger the job."""
|
|
@@ -3350,8 +3344,9 @@ class RunNow:
|
|
|
3350
3344
|
be specified in conjunction with notebook_params. The JSON representation of this field (for
|
|
3351
3345
|
example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
3352
3346
|
|
|
3353
|
-
Use [Task parameter variables]
|
|
3354
|
-
|
|
3347
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
3348
|
+
|
|
3349
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
3355
3350
|
|
|
3356
3351
|
job_parameters: Optional[Dict[str, str]] = None
|
|
3357
3352
|
"""Job-level parameters used in the run. for example `"param": "overriding_val"`"""
|
|
@@ -3373,6 +3368,10 @@ class RunNow:
|
|
|
3373
3368
|
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
3374
3369
|
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
|
|
3375
3370
|
|
|
3371
|
+
only: Optional[List[str]] = None
|
|
3372
|
+
"""A list of task keys to run inside of the job. If this field is not provided, all tasks in the
|
|
3373
|
+
job will be run."""
|
|
3374
|
+
|
|
3376
3375
|
pipeline_params: Optional[PipelineParams] = None
|
|
3377
3376
|
"""Controls whether the pipeline should perform a full refresh"""
|
|
3378
3377
|
|
|
@@ -3427,6 +3426,7 @@ class RunNow:
|
|
|
3427
3426
|
if self.job_id is not None: body['job_id'] = self.job_id
|
|
3428
3427
|
if self.job_parameters: body['job_parameters'] = self.job_parameters
|
|
3429
3428
|
if self.notebook_params: body['notebook_params'] = self.notebook_params
|
|
3429
|
+
if self.only: body['only'] = [v for v in self.only]
|
|
3430
3430
|
if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict()
|
|
3431
3431
|
if self.python_named_params: body['python_named_params'] = self.python_named_params
|
|
3432
3432
|
if self.python_params: body['python_params'] = [v for v in self.python_params]
|
|
@@ -3444,6 +3444,7 @@ class RunNow:
|
|
|
3444
3444
|
job_id=d.get('job_id', None),
|
|
3445
3445
|
job_parameters=d.get('job_parameters', None),
|
|
3446
3446
|
notebook_params=d.get('notebook_params', None),
|
|
3447
|
+
only=d.get('only', None),
|
|
3447
3448
|
pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams),
|
|
3448
3449
|
python_named_params=d.get('python_named_params', None),
|
|
3449
3450
|
python_params=d.get('python_params', None),
|
|
@@ -3563,8 +3564,9 @@ class RunParameters:
|
|
|
3563
3564
|
be specified in conjunction with notebook_params. The JSON representation of this field (for
|
|
3564
3565
|
example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
3565
3566
|
|
|
3566
|
-
Use [Task parameter variables]
|
|
3567
|
-
|
|
3567
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
3568
|
+
|
|
3569
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
3568
3570
|
|
|
3569
3571
|
notebook_params: Optional[Dict[str, str]] = None
|
|
3570
3572
|
"""A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
|
|
@@ -3774,13 +3776,13 @@ class RunTask:
|
|
|
3774
3776
|
once the Jobs service has requested a cluster for the run."""
|
|
3775
3777
|
|
|
3776
3778
|
condition_task: Optional[RunConditionTask] = None
|
|
3777
|
-
"""
|
|
3778
|
-
|
|
3779
|
-
notifications."""
|
|
3779
|
+
"""The task evaluates a condition that can be used to control the execution of other tasks when the
|
|
3780
|
+
`condition_task` field is present. The condition task does not require a cluster to execute and
|
|
3781
|
+
does not support retries or notifications."""
|
|
3780
3782
|
|
|
3781
3783
|
dbt_task: Optional[DbtTask] = None
|
|
3782
|
-
"""
|
|
3783
|
-
the ability to use a serverless or a pro SQL warehouse."""
|
|
3784
|
+
"""The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task
|
|
3785
|
+
requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse."""
|
|
3784
3786
|
|
|
3785
3787
|
depends_on: Optional[List[TaskDependency]] = None
|
|
3786
3788
|
"""An optional array of objects specifying the dependency graph of the task. All tasks specified in
|
|
@@ -3815,7 +3817,8 @@ class RunTask:
|
|
|
3815
3817
|
responding. We suggest running jobs and tasks on new clusters for greater reliability"""
|
|
3816
3818
|
|
|
3817
3819
|
for_each_task: Optional[RunForEachTask] = None
|
|
3818
|
-
"""
|
|
3820
|
+
"""The task executes a nested task for every input provided when the `for_each_task` field is
|
|
3821
|
+
present."""
|
|
3819
3822
|
|
|
3820
3823
|
git_source: Optional[GitSource] = None
|
|
3821
3824
|
"""An optional specification for a remote Git repository containing the source code used by tasks.
|
|
@@ -3837,18 +3840,18 @@ class RunTask:
|
|
|
3837
3840
|
"""If new_cluster, a description of a new cluster that is created for each run."""
|
|
3838
3841
|
|
|
3839
3842
|
notebook_task: Optional[NotebookTask] = None
|
|
3840
|
-
"""
|
|
3841
|
-
in conjunction with spark_jar_task."""
|
|
3843
|
+
"""The task runs a notebook when the `notebook_task` field is present."""
|
|
3842
3844
|
|
|
3843
3845
|
notification_settings: Optional[TaskNotificationSettings] = None
|
|
3844
3846
|
"""Optional notification settings that are used when sending notifications to each of the
|
|
3845
3847
|
`email_notifications` and `webhook_notifications` for this task run."""
|
|
3846
3848
|
|
|
3847
3849
|
pipeline_task: Optional[PipelineTask] = None
|
|
3848
|
-
"""
|
|
3850
|
+
"""The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines
|
|
3851
|
+
configured to use triggered more are supported."""
|
|
3849
3852
|
|
|
3850
3853
|
python_wheel_task: Optional[PythonWheelTask] = None
|
|
3851
|
-
"""
|
|
3854
|
+
"""The task runs a Python wheel when the `python_wheel_task` field is present."""
|
|
3852
3855
|
|
|
3853
3856
|
queue_duration: Optional[int] = None
|
|
3854
3857
|
"""The time in milliseconds that the run has spent in the queue."""
|
|
@@ -3868,7 +3871,7 @@ class RunTask:
|
|
|
3868
3871
|
:method:jobs/create for a list of possible values."""
|
|
3869
3872
|
|
|
3870
3873
|
run_job_task: Optional[RunJobTask] = None
|
|
3871
|
-
"""
|
|
3874
|
+
"""The task triggers another job when the `run_job_task` field is present."""
|
|
3872
3875
|
|
|
3873
3876
|
run_page_url: Optional[str] = None
|
|
3874
3877
|
|
|
@@ -3880,14 +3883,14 @@ class RunTask:
|
|
|
3880
3883
|
duration of a multitask job run is the value of the `run_duration` field."""
|
|
3881
3884
|
|
|
3882
3885
|
spark_jar_task: Optional[SparkJarTask] = None
|
|
3883
|
-
"""
|
|
3886
|
+
"""The task runs a JAR when the `spark_jar_task` field is present."""
|
|
3884
3887
|
|
|
3885
3888
|
spark_python_task: Optional[SparkPythonTask] = None
|
|
3886
|
-
"""
|
|
3889
|
+
"""The task runs a Python file when the `spark_python_task` field is present."""
|
|
3887
3890
|
|
|
3888
3891
|
spark_submit_task: Optional[SparkSubmitTask] = None
|
|
3889
|
-
"""
|
|
3890
|
-
This task can run only on new clusters.
|
|
3892
|
+
"""(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
|
|
3893
|
+
This task can run only on new clusters and is not compatible with serverless compute.
|
|
3891
3894
|
|
|
3892
3895
|
In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
|
|
3893
3896
|
`--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
|
|
@@ -3903,7 +3906,8 @@ class RunTask:
|
|
|
3903
3906
|
The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
|
|
3904
3907
|
|
|
3905
3908
|
sql_task: Optional[SqlTask] = None
|
|
3906
|
-
"""
|
|
3909
|
+
"""The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
|
|
3910
|
+
the `sql_task` field is present."""
|
|
3907
3911
|
|
|
3908
3912
|
start_time: Optional[int] = None
|
|
3909
3913
|
"""The time at which this run was started in epoch milliseconds (milliseconds since 1/1/1970 UTC).
|
|
@@ -4664,13 +4668,13 @@ class SubmitTask:
|
|
|
4664
4668
|
used to reference the tasks to be updated or reset."""
|
|
4665
4669
|
|
|
4666
4670
|
condition_task: Optional[ConditionTask] = None
|
|
4667
|
-
"""
|
|
4668
|
-
|
|
4669
|
-
notifications."""
|
|
4671
|
+
"""The task evaluates a condition that can be used to control the execution of other tasks when the
|
|
4672
|
+
`condition_task` field is present. The condition task does not require a cluster to execute and
|
|
4673
|
+
does not support retries or notifications."""
|
|
4670
4674
|
|
|
4671
4675
|
dbt_task: Optional[DbtTask] = None
|
|
4672
|
-
"""
|
|
4673
|
-
the ability to use a serverless or a pro SQL warehouse."""
|
|
4676
|
+
"""The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task
|
|
4677
|
+
requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse."""
|
|
4674
4678
|
|
|
4675
4679
|
depends_on: Optional[List[TaskDependency]] = None
|
|
4676
4680
|
"""An optional array of objects specifying the dependency graph of the task. All tasks specified in
|
|
@@ -4694,7 +4698,8 @@ class SubmitTask:
|
|
|
4694
4698
|
responding. We suggest running jobs and tasks on new clusters for greater reliability"""
|
|
4695
4699
|
|
|
4696
4700
|
for_each_task: Optional[ForEachTask] = None
|
|
4697
|
-
"""
|
|
4701
|
+
"""The task executes a nested task for every input provided when the `for_each_task` field is
|
|
4702
|
+
present."""
|
|
4698
4703
|
|
|
4699
4704
|
health: Optional[JobsHealthRules] = None
|
|
4700
4705
|
"""An optional set of health rules that can be defined for this job."""
|
|
@@ -4707,18 +4712,18 @@ class SubmitTask:
|
|
|
4707
4712
|
"""If new_cluster, a description of a new cluster that is created for each run."""
|
|
4708
4713
|
|
|
4709
4714
|
notebook_task: Optional[NotebookTask] = None
|
|
4710
|
-
"""
|
|
4711
|
-
in conjunction with spark_jar_task."""
|
|
4715
|
+
"""The task runs a notebook when the `notebook_task` field is present."""
|
|
4712
4716
|
|
|
4713
4717
|
notification_settings: Optional[TaskNotificationSettings] = None
|
|
4714
4718
|
"""Optional notification settings that are used when sending notifications to each of the
|
|
4715
4719
|
`email_notifications` and `webhook_notifications` for this task run."""
|
|
4716
4720
|
|
|
4717
4721
|
pipeline_task: Optional[PipelineTask] = None
|
|
4718
|
-
"""
|
|
4722
|
+
"""The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines
|
|
4723
|
+
configured to use triggered more are supported."""
|
|
4719
4724
|
|
|
4720
4725
|
python_wheel_task: Optional[PythonWheelTask] = None
|
|
4721
|
-
"""
|
|
4726
|
+
"""The task runs a Python wheel when the `python_wheel_task` field is present."""
|
|
4722
4727
|
|
|
4723
4728
|
run_if: Optional[RunIf] = None
|
|
4724
4729
|
"""An optional value indicating the condition that determines whether the task should be run once
|
|
@@ -4726,17 +4731,17 @@ class SubmitTask:
|
|
|
4726
4731
|
:method:jobs/create for a list of possible values."""
|
|
4727
4732
|
|
|
4728
4733
|
run_job_task: Optional[RunJobTask] = None
|
|
4729
|
-
"""
|
|
4734
|
+
"""The task triggers another job when the `run_job_task` field is present."""
|
|
4730
4735
|
|
|
4731
4736
|
spark_jar_task: Optional[SparkJarTask] = None
|
|
4732
|
-
"""
|
|
4737
|
+
"""The task runs a JAR when the `spark_jar_task` field is present."""
|
|
4733
4738
|
|
|
4734
4739
|
spark_python_task: Optional[SparkPythonTask] = None
|
|
4735
|
-
"""
|
|
4740
|
+
"""The task runs a Python file when the `spark_python_task` field is present."""
|
|
4736
4741
|
|
|
4737
4742
|
spark_submit_task: Optional[SparkSubmitTask] = None
|
|
4738
|
-
"""
|
|
4739
|
-
This task can run only on new clusters.
|
|
4743
|
+
"""(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
|
|
4744
|
+
This task can run only on new clusters and is not compatible with serverless compute.
|
|
4740
4745
|
|
|
4741
4746
|
In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
|
|
4742
4747
|
`--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
|
|
@@ -4752,7 +4757,8 @@ class SubmitTask:
|
|
|
4752
4757
|
The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
|
|
4753
4758
|
|
|
4754
4759
|
sql_task: Optional[SqlTask] = None
|
|
4755
|
-
"""
|
|
4760
|
+
"""The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
|
|
4761
|
+
the `sql_task` field is present."""
|
|
4756
4762
|
|
|
4757
4763
|
timeout_seconds: Optional[int] = None
|
|
4758
4764
|
"""An optional timeout applied to each run of this job task. A value of `0` means no timeout."""
|
|
@@ -4866,13 +4872,13 @@ class Task:
|
|
|
4866
4872
|
used to reference the tasks to be updated or reset."""
|
|
4867
4873
|
|
|
4868
4874
|
condition_task: Optional[ConditionTask] = None
|
|
4869
|
-
"""
|
|
4870
|
-
|
|
4871
|
-
notifications."""
|
|
4875
|
+
"""The task evaluates a condition that can be used to control the execution of other tasks when the
|
|
4876
|
+
`condition_task` field is present. The condition task does not require a cluster to execute and
|
|
4877
|
+
does not support retries or notifications."""
|
|
4872
4878
|
|
|
4873
4879
|
dbt_task: Optional[DbtTask] = None
|
|
4874
|
-
"""
|
|
4875
|
-
the ability to use a serverless or a pro SQL warehouse."""
|
|
4880
|
+
"""The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task
|
|
4881
|
+
requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse."""
|
|
4876
4882
|
|
|
4877
4883
|
depends_on: Optional[List[TaskDependency]] = None
|
|
4878
4884
|
"""An optional array of objects specifying the dependency graph of the task. All tasks specified in
|
|
@@ -4900,7 +4906,8 @@ class Task:
|
|
|
4900
4906
|
responding. We suggest running jobs and tasks on new clusters for greater reliability"""
|
|
4901
4907
|
|
|
4902
4908
|
for_each_task: Optional[ForEachTask] = None
|
|
4903
|
-
"""
|
|
4909
|
+
"""The task executes a nested task for every input provided when the `for_each_task` field is
|
|
4910
|
+
present."""
|
|
4904
4911
|
|
|
4905
4912
|
health: Optional[JobsHealthRules] = None
|
|
4906
4913
|
"""An optional set of health rules that can be defined for this job."""
|
|
@@ -4927,18 +4934,18 @@ class Task:
|
|
|
4927
4934
|
"""If new_cluster, a description of a new cluster that is created for each run."""
|
|
4928
4935
|
|
|
4929
4936
|
notebook_task: Optional[NotebookTask] = None
|
|
4930
|
-
"""
|
|
4931
|
-
in conjunction with spark_jar_task."""
|
|
4937
|
+
"""The task runs a notebook when the `notebook_task` field is present."""
|
|
4932
4938
|
|
|
4933
4939
|
notification_settings: Optional[TaskNotificationSettings] = None
|
|
4934
4940
|
"""Optional notification settings that are used when sending notifications to each of the
|
|
4935
4941
|
`email_notifications` and `webhook_notifications` for this task."""
|
|
4936
4942
|
|
|
4937
4943
|
pipeline_task: Optional[PipelineTask] = None
|
|
4938
|
-
"""
|
|
4944
|
+
"""The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines
|
|
4945
|
+
configured to use triggered more are supported."""
|
|
4939
4946
|
|
|
4940
4947
|
python_wheel_task: Optional[PythonWheelTask] = None
|
|
4941
|
-
"""
|
|
4948
|
+
"""The task runs a Python wheel when the `python_wheel_task` field is present."""
|
|
4942
4949
|
|
|
4943
4950
|
retry_on_timeout: Optional[bool] = None
|
|
4944
4951
|
"""An optional policy to specify whether to retry a job when it times out. The default behavior is
|
|
@@ -4954,17 +4961,17 @@ class Task:
|
|
|
4954
4961
|
least one dependency failed * `ALL_FAILED`: ALl dependencies have failed"""
|
|
4955
4962
|
|
|
4956
4963
|
run_job_task: Optional[RunJobTask] = None
|
|
4957
|
-
"""
|
|
4964
|
+
"""The task triggers another job when the `run_job_task` field is present."""
|
|
4958
4965
|
|
|
4959
4966
|
spark_jar_task: Optional[SparkJarTask] = None
|
|
4960
|
-
"""
|
|
4967
|
+
"""The task runs a JAR when the `spark_jar_task` field is present."""
|
|
4961
4968
|
|
|
4962
4969
|
spark_python_task: Optional[SparkPythonTask] = None
|
|
4963
|
-
"""
|
|
4970
|
+
"""The task runs a Python file when the `spark_python_task` field is present."""
|
|
4964
4971
|
|
|
4965
4972
|
spark_submit_task: Optional[SparkSubmitTask] = None
|
|
4966
|
-
"""
|
|
4967
|
-
This task can run only on new clusters.
|
|
4973
|
+
"""(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
|
|
4974
|
+
This task can run only on new clusters and is not compatible with serverless compute.
|
|
4968
4975
|
|
|
4969
4976
|
In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
|
|
4970
4977
|
`--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
|
|
@@ -4980,7 +4987,8 @@ class Task:
|
|
|
4980
4987
|
The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
|
|
4981
4988
|
|
|
4982
4989
|
sql_task: Optional[SqlTask] = None
|
|
4983
|
-
"""
|
|
4990
|
+
"""The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
|
|
4991
|
+
the `sql_task` field is present."""
|
|
4984
4992
|
|
|
4985
4993
|
timeout_seconds: Optional[int] = None
|
|
4986
4994
|
"""An optional timeout applied to each run of this job task. A value of `0` means no timeout."""
|
|
@@ -5749,8 +5757,7 @@ class JobsAPI:
|
|
|
5749
5757
|
Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If
|
|
5750
5758
|
not specified, the job/pipeline runs as the user who created the job/pipeline.
|
|
5751
5759
|
|
|
5752
|
-
|
|
5753
|
-
error is thrown.
|
|
5760
|
+
Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.
|
|
5754
5761
|
:param schedule: :class:`CronSchedule` (optional)
|
|
5755
5762
|
An optional periodic schedule for this job. The default behavior is that the job only runs when
|
|
5756
5763
|
triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.
|
|
@@ -5922,8 +5929,8 @@ class JobsAPI:
|
|
|
5922
5929
|
:param include_resolved_values: bool (optional)
|
|
5923
5930
|
Whether to include resolved parameter values in the response.
|
|
5924
5931
|
:param page_token: str (optional)
|
|
5925
|
-
To list the next page
|
|
5926
|
-
|
|
5932
|
+
To list the next page of job tasks, set this field to the value of the `next_page_token` returned in
|
|
5933
|
+
the GetJob response.
|
|
5927
5934
|
|
|
5928
5935
|
:returns: :class:`Run`
|
|
5929
5936
|
"""
|
|
@@ -6111,8 +6118,9 @@ class JobsAPI:
|
|
|
6111
6118
|
in conjunction with notebook_params. The JSON representation of this field (for example
|
|
6112
6119
|
`{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
6113
6120
|
|
|
6114
|
-
Use [Task parameter variables]
|
|
6115
|
-
|
|
6121
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
6122
|
+
|
|
6123
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
6116
6124
|
:param job_parameters: Dict[str,str] (optional)
|
|
6117
6125
|
Job-level parameters used in the run. for example `"param": "overriding_val"`
|
|
6118
6126
|
:param latest_repair_id: int (optional)
|
|
@@ -6269,6 +6277,7 @@ class JobsAPI:
|
|
|
6269
6277
|
jar_params: Optional[List[str]] = None,
|
|
6270
6278
|
job_parameters: Optional[Dict[str, str]] = None,
|
|
6271
6279
|
notebook_params: Optional[Dict[str, str]] = None,
|
|
6280
|
+
only: Optional[List[str]] = None,
|
|
6272
6281
|
pipeline_params: Optional[PipelineParams] = None,
|
|
6273
6282
|
python_named_params: Optional[Dict[str, str]] = None,
|
|
6274
6283
|
python_params: Optional[List[str]] = None,
|
|
@@ -6304,8 +6313,9 @@ class JobsAPI:
|
|
|
6304
6313
|
in conjunction with notebook_params. The JSON representation of this field (for example
|
|
6305
6314
|
`{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
6306
6315
|
|
|
6307
|
-
Use [Task parameter variables]
|
|
6308
|
-
|
|
6316
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
6317
|
+
|
|
6318
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
6309
6319
|
:param job_parameters: Dict[str,str] (optional)
|
|
6310
6320
|
Job-level parameters used in the run. for example `"param": "overriding_val"`
|
|
6311
6321
|
:param notebook_params: Dict[str,str] (optional)
|
|
@@ -6324,6 +6334,9 @@ class JobsAPI:
|
|
|
6324
6334
|
|
|
6325
6335
|
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
6326
6336
|
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
|
|
6337
|
+
:param only: List[str] (optional)
|
|
6338
|
+
A list of task keys to run inside of the job. If this field is not provided, all tasks in the job
|
|
6339
|
+
will be run.
|
|
6327
6340
|
:param pipeline_params: :class:`PipelineParams` (optional)
|
|
6328
6341
|
Controls whether the pipeline should perform a full refresh
|
|
6329
6342
|
:param python_named_params: Dict[str,str] (optional)
|
|
@@ -6375,6 +6388,7 @@ class JobsAPI:
|
|
|
6375
6388
|
if job_id is not None: body['job_id'] = job_id
|
|
6376
6389
|
if job_parameters is not None: body['job_parameters'] = job_parameters
|
|
6377
6390
|
if notebook_params is not None: body['notebook_params'] = notebook_params
|
|
6391
|
+
if only is not None: body['only'] = [v for v in only]
|
|
6378
6392
|
if pipeline_params is not None: body['pipeline_params'] = pipeline_params.as_dict()
|
|
6379
6393
|
if python_named_params is not None: body['python_named_params'] = python_named_params
|
|
6380
6394
|
if python_params is not None: body['python_params'] = [v for v in python_params]
|
|
@@ -6396,6 +6410,7 @@ class JobsAPI:
|
|
|
6396
6410
|
jar_params: Optional[List[str]] = None,
|
|
6397
6411
|
job_parameters: Optional[Dict[str, str]] = None,
|
|
6398
6412
|
notebook_params: Optional[Dict[str, str]] = None,
|
|
6413
|
+
only: Optional[List[str]] = None,
|
|
6399
6414
|
pipeline_params: Optional[PipelineParams] = None,
|
|
6400
6415
|
python_named_params: Optional[Dict[str, str]] = None,
|
|
6401
6416
|
python_params: Optional[List[str]] = None,
|
|
@@ -6409,6 +6424,7 @@ class JobsAPI:
|
|
|
6409
6424
|
job_id=job_id,
|
|
6410
6425
|
job_parameters=job_parameters,
|
|
6411
6426
|
notebook_params=notebook_params,
|
|
6427
|
+
only=only,
|
|
6412
6428
|
pipeline_params=pipeline_params,
|
|
6413
6429
|
python_named_params=python_named_params,
|
|
6414
6430
|
python_params=python_params,
|
|
@@ -6423,7 +6439,8 @@ class JobsAPI:
|
|
|
6423
6439
|
access_control_list: Optional[List[JobAccessControlRequest]] = None) -> JobPermissions:
|
|
6424
6440
|
"""Set job permissions.
|
|
6425
6441
|
|
|
6426
|
-
Sets permissions on
|
|
6442
|
+
Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
|
|
6443
|
+
permissions if none are specified. Objects can inherit permissions from their root object.
|
|
6427
6444
|
|
|
6428
6445
|
:param job_id: str
|
|
6429
6446
|
The job for which to get or manage permissions.
|
databricks/sdk/service/ml.py
CHANGED
|
@@ -4596,7 +4596,8 @@ class ExperimentsAPI:
|
|
|
4596
4596
|
) -> ExperimentPermissions:
|
|
4597
4597
|
"""Set experiment permissions.
|
|
4598
4598
|
|
|
4599
|
-
Sets permissions on an
|
|
4599
|
+
Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
|
|
4600
|
+
permissions if none are specified. Objects can inherit permissions from their root object.
|
|
4600
4601
|
|
|
4601
4602
|
:param experiment_id: str
|
|
4602
4603
|
The experiment for which to get or manage permissions.
|
|
@@ -5571,8 +5572,8 @@ class ModelRegistryAPI:
|
|
|
5571
5572
|
) -> RegisteredModelPermissions:
|
|
5572
5573
|
"""Set registered model permissions.
|
|
5573
5574
|
|
|
5574
|
-
Sets permissions on
|
|
5575
|
-
object.
|
|
5575
|
+
Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
|
|
5576
|
+
permissions if none are specified. Objects can inherit permissions from their root object.
|
|
5576
5577
|
|
|
5577
5578
|
:param registered_model_id: str
|
|
5578
5579
|
The registered model for which to get or manage permissions.
|
databricks/sdk/service/oauth2.py
CHANGED
|
@@ -389,19 +389,24 @@ class GetPublishedAppsOutput:
|
|
|
389
389
|
|
|
390
390
|
@dataclass
|
|
391
391
|
class ListServicePrincipalSecretsResponse:
|
|
392
|
+
next_page_token: Optional[str] = None
|
|
393
|
+
"""A token, which can be sent as `page_token` to retrieve the next page."""
|
|
394
|
+
|
|
392
395
|
secrets: Optional[List[SecretInfo]] = None
|
|
393
396
|
"""List of the secrets"""
|
|
394
397
|
|
|
395
398
|
def as_dict(self) -> dict:
|
|
396
399
|
"""Serializes the ListServicePrincipalSecretsResponse into a dictionary suitable for use as a JSON request body."""
|
|
397
400
|
body = {}
|
|
401
|
+
if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
|
|
398
402
|
if self.secrets: body['secrets'] = [v.as_dict() for v in self.secrets]
|
|
399
403
|
return body
|
|
400
404
|
|
|
401
405
|
@classmethod
|
|
402
406
|
def from_dict(cls, d: Dict[str, any]) -> ListServicePrincipalSecretsResponse:
|
|
403
407
|
"""Deserializes the ListServicePrincipalSecretsResponse from a dictionary."""
|
|
404
|
-
return cls(
|
|
408
|
+
return cls(next_page_token=d.get('next_page_token', None),
|
|
409
|
+
secrets=_repeated_dict(d, 'secrets', SecretInfo))
|
|
405
410
|
|
|
406
411
|
|
|
407
412
|
@dataclass
|
|
@@ -661,6 +666,7 @@ class CustomAppIntegrationAPI:
|
|
|
661
666
|
Gets the Custom OAuth App Integration for the given integration id.
|
|
662
667
|
|
|
663
668
|
:param integration_id: str
|
|
669
|
+
The OAuth app integration ID.
|
|
664
670
|
|
|
665
671
|
:returns: :class:`GetCustomAppIntegrationOutput`
|
|
666
672
|
"""
|
|
@@ -960,7 +966,7 @@ class ServicePrincipalSecretsAPI:
|
|
|
960
966
|
f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets/{secret_id}',
|
|
961
967
|
headers=headers)
|
|
962
968
|
|
|
963
|
-
def list(self, service_principal_id: int) -> Iterator[SecretInfo]:
|
|
969
|
+
def list(self, service_principal_id: int, *, page_token: Optional[str] = None) -> Iterator[SecretInfo]:
|
|
964
970
|
"""List service principal secrets.
|
|
965
971
|
|
|
966
972
|
List all secrets associated with the given service principal. This operation only returns information
|
|
@@ -968,15 +974,30 @@ class ServicePrincipalSecretsAPI:
|
|
|
968
974
|
|
|
969
975
|
:param service_principal_id: int
|
|
970
976
|
The service principal ID.
|
|
977
|
+
:param page_token: str (optional)
|
|
978
|
+
An opaque page token which was the `next_page_token` in the response of the previous request to list
|
|
979
|
+
the secrets for this service principal. Provide this token to retrieve the next page of secret
|
|
980
|
+
entries. When providing a `page_token`, all other parameters provided to the request must match the
|
|
981
|
+
previous request. To list all of the secrets for a service principal, it is necessary to continue
|
|
982
|
+
requesting pages of entries until the response contains no `next_page_token`. Note that the number
|
|
983
|
+
of entries returned must not be used to determine when the listing is complete.
|
|
971
984
|
|
|
972
985
|
:returns: Iterator over :class:`SecretInfo`
|
|
973
986
|
"""
|
|
974
987
|
|
|
988
|
+
query = {}
|
|
989
|
+
if page_token is not None: query['page_token'] = page_token
|
|
975
990
|
headers = {'Accept': 'application/json', }
|
|
976
991
|
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
982
|
-
|
|
992
|
+
while True:
|
|
993
|
+
json = self._api.do(
|
|
994
|
+
'GET',
|
|
995
|
+
f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets',
|
|
996
|
+
query=query,
|
|
997
|
+
headers=headers)
|
|
998
|
+
if 'secrets' in json:
|
|
999
|
+
for v in json['secrets']:
|
|
1000
|
+
yield SecretInfo.from_dict(v)
|
|
1001
|
+
if 'next_page_token' not in json or not json['next_page_token']:
|
|
1002
|
+
return
|
|
1003
|
+
query['page_token'] = json['next_page_token']
|