databricks-sdk 0.25.1__py3-none-any.whl → 0.27.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +3 -3
- databricks/sdk/_property.py +42 -0
- databricks/sdk/dbutils.py +18 -20
- databricks/sdk/mixins/files.py +324 -93
- databricks/sdk/service/catalog.py +17 -40
- databricks/sdk/service/compute.py +76 -47
- databricks/sdk/service/jobs.py +26 -4
- databricks/sdk/service/pipelines.py +210 -0
- databricks/sdk/service/serving.py +538 -230
- databricks/sdk/service/settings.py +191 -189
- databricks/sdk/service/sharing.py +2 -0
- databricks/sdk/service/sql.py +47 -8
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.25.1.dist-info → databricks_sdk-0.27.0.dist-info}/METADATA +1 -1
- {databricks_sdk-0.25.1.dist-info → databricks_sdk-0.27.0.dist-info}/RECORD +19 -18
- {databricks_sdk-0.25.1.dist-info → databricks_sdk-0.27.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.25.1.dist-info → databricks_sdk-0.27.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.25.1.dist-info → databricks_sdk-0.27.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.25.1.dist-info → databricks_sdk-0.27.0.dist-info}/top_level.txt +0 -0
|
@@ -1637,6 +1637,28 @@ class ClusterSpec:
|
|
|
1637
1637
|
workload_type=_from_dict(d, 'workload_type', WorkloadType))
|
|
1638
1638
|
|
|
1639
1639
|
|
|
1640
|
+
@dataclass
|
|
1641
|
+
class ClusterStatusResponse:
|
|
1642
|
+
cluster_id: Optional[str] = None
|
|
1643
|
+
"""Unique identifier for the cluster."""
|
|
1644
|
+
|
|
1645
|
+
library_statuses: Optional[List[LibraryFullStatus]] = None
|
|
1646
|
+
"""Status of all libraries on the cluster."""
|
|
1647
|
+
|
|
1648
|
+
def as_dict(self) -> dict:
|
|
1649
|
+
"""Serializes the ClusterStatusResponse into a dictionary suitable for use as a JSON request body."""
|
|
1650
|
+
body = {}
|
|
1651
|
+
if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
|
|
1652
|
+
if self.library_statuses: body['library_statuses'] = [v.as_dict() for v in self.library_statuses]
|
|
1653
|
+
return body
|
|
1654
|
+
|
|
1655
|
+
@classmethod
|
|
1656
|
+
def from_dict(cls, d: Dict[str, any]) -> ClusterStatusResponse:
|
|
1657
|
+
"""Deserializes the ClusterStatusResponse from a dictionary."""
|
|
1658
|
+
return cls(cluster_id=d.get('cluster_id', None),
|
|
1659
|
+
library_statuses=_repeated_dict(d, 'library_statuses', LibraryFullStatus))
|
|
1660
|
+
|
|
1661
|
+
|
|
1640
1662
|
@dataclass
|
|
1641
1663
|
class Command:
|
|
1642
1664
|
cluster_id: Optional[str] = None
|
|
@@ -2988,8 +3010,9 @@ class Environment:
|
|
|
2988
3010
|
supported. Next ID: 5"""
|
|
2989
3011
|
|
|
2990
3012
|
client: str
|
|
2991
|
-
"""
|
|
2992
|
-
|
|
3013
|
+
"""Client version used by the environment The client is the user-facing environment of the runtime.
|
|
3014
|
+
Each client comes with a specific set of pre-installed libraries. The version is a string,
|
|
3015
|
+
consisting of the major client version."""
|
|
2993
3016
|
|
|
2994
3017
|
dependencies: Optional[List[str]] = None
|
|
2995
3018
|
"""List of pip dependencies, as supported by the version of pip in this environment. Each
|
|
@@ -4223,7 +4246,14 @@ class InstancePoolGcpAttributes:
|
|
|
4223
4246
|
be of a form like "us-west1-a". The provided availability zone must be in the same region as the
|
|
4224
4247
|
Databricks workspace. For example, "us-west1-a" is not a valid zone id if the Databricks
|
|
4225
4248
|
workspace resides in the "us-east1" region. This is an optional field at instance pool creation,
|
|
4226
|
-
and if not specified, a default zone will be used.
|
|
4249
|
+
and if not specified, a default zone will be used.
|
|
4250
|
+
|
|
4251
|
+
This field can be one of the following: - "HA" => High availability, spread nodes across
|
|
4252
|
+
availability zones for a Databricks deployment region - A GCP availability zone => Pick One of
|
|
4253
|
+
the available zones for (machine type + region) from
|
|
4254
|
+
https://cloud.google.com/compute/docs/regions-zones (e.g. "us-west1-a").
|
|
4255
|
+
|
|
4256
|
+
If empty, Databricks picks an availability zone to schedule the cluster on."""
|
|
4227
4257
|
|
|
4228
4258
|
def as_dict(self) -> dict:
|
|
4229
4259
|
"""Serializes the InstancePoolGcpAttributes into a dictionary suitable for use as a JSON request body."""
|
|
@@ -4453,16 +4483,18 @@ class Library:
|
|
|
4453
4483
|
"""Specification of a CRAN library to be installed as part of the library"""
|
|
4454
4484
|
|
|
4455
4485
|
egg: Optional[str] = None
|
|
4456
|
-
"""URI of the egg to
|
|
4457
|
-
|
|
4458
|
-
|
|
4459
|
-
|
|
4486
|
+
"""URI of the egg library to install. Supported URIs include Workspace paths, Unity Catalog Volumes
|
|
4487
|
+
paths, and S3 URIs. For example: `{ "egg": "/Workspace/path/to/library.egg" }`, `{ "egg" :
|
|
4488
|
+
"/Volumes/path/to/library.egg" }` or `{ "egg": "s3://my-bucket/library.egg" }`. If S3 is used,
|
|
4489
|
+
please make sure the cluster has read access on the library. You may need to launch the cluster
|
|
4490
|
+
with an IAM role to access the S3 URI."""
|
|
4460
4491
|
|
|
4461
4492
|
jar: Optional[str] = None
|
|
4462
|
-
"""URI of the
|
|
4463
|
-
|
|
4464
|
-
|
|
4465
|
-
cluster
|
|
4493
|
+
"""URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes
|
|
4494
|
+
paths, and S3 URIs. For example: `{ "jar": "/Workspace/path/to/library.jar" }`, `{ "jar" :
|
|
4495
|
+
"/Volumes/path/to/library.jar" }` or `{ "jar": "s3://my-bucket/library.jar" }`. If S3 is used,
|
|
4496
|
+
please make sure the cluster has read access on the library. You may need to launch the cluster
|
|
4497
|
+
with an IAM role to access the S3 URI."""
|
|
4466
4498
|
|
|
4467
4499
|
maven: Optional[MavenLibrary] = None
|
|
4468
4500
|
"""Specification of a maven library to be installed. For example: `{ "coordinates":
|
|
@@ -4471,10 +4503,17 @@ class Library:
|
|
|
4471
4503
|
pypi: Optional[PythonPyPiLibrary] = None
|
|
4472
4504
|
"""Specification of a PyPi library to be installed. For example: `{ "package": "simplejson" }`"""
|
|
4473
4505
|
|
|
4506
|
+
requirements: Optional[str] = None
|
|
4507
|
+
"""URI of the requirements.txt file to install. Only Workspace paths and Unity Catalog Volumes
|
|
4508
|
+
paths are supported. For example: `{ "requirements": "/Workspace/path/to/requirements.txt" }` or
|
|
4509
|
+
`{ "requirements" : "/Volumes/path/to/requirements.txt" }`"""
|
|
4510
|
+
|
|
4474
4511
|
whl: Optional[str] = None
|
|
4475
|
-
"""URI of the wheel to
|
|
4476
|
-
|
|
4477
|
-
library.
|
|
4512
|
+
"""URI of the wheel library to install. Supported URIs include Workspace paths, Unity Catalog
|
|
4513
|
+
Volumes paths, and S3 URIs. For example: `{ "whl": "/Workspace/path/to/library.whl" }`, `{ "whl"
|
|
4514
|
+
: "/Volumes/path/to/library.whl" }` or `{ "whl": "s3://my-bucket/library.whl" }`. If S3 is used,
|
|
4515
|
+
please make sure the cluster has read access on the library. You may need to launch the cluster
|
|
4516
|
+
with an IAM role to access the S3 URI."""
|
|
4478
4517
|
|
|
4479
4518
|
def as_dict(self) -> dict:
|
|
4480
4519
|
"""Serializes the Library into a dictionary suitable for use as a JSON request body."""
|
|
@@ -4484,6 +4523,7 @@ class Library:
|
|
|
4484
4523
|
if self.jar is not None: body['jar'] = self.jar
|
|
4485
4524
|
if self.maven: body['maven'] = self.maven.as_dict()
|
|
4486
4525
|
if self.pypi: body['pypi'] = self.pypi.as_dict()
|
|
4526
|
+
if self.requirements is not None: body['requirements'] = self.requirements
|
|
4487
4527
|
if self.whl is not None: body['whl'] = self.whl
|
|
4488
4528
|
return body
|
|
4489
4529
|
|
|
@@ -4495,11 +4535,14 @@ class Library:
|
|
|
4495
4535
|
jar=d.get('jar', None),
|
|
4496
4536
|
maven=_from_dict(d, 'maven', MavenLibrary),
|
|
4497
4537
|
pypi=_from_dict(d, 'pypi', PythonPyPiLibrary),
|
|
4538
|
+
requirements=d.get('requirements', None),
|
|
4498
4539
|
whl=d.get('whl', None))
|
|
4499
4540
|
|
|
4500
4541
|
|
|
4501
4542
|
@dataclass
|
|
4502
4543
|
class LibraryFullStatus:
|
|
4544
|
+
"""The status of the library on a specific cluster."""
|
|
4545
|
+
|
|
4503
4546
|
is_library_for_all_clusters: Optional[bool] = None
|
|
4504
4547
|
"""Whether the library was set to be installed on all clusters via the libraries UI."""
|
|
4505
4548
|
|
|
@@ -4509,7 +4552,7 @@ class LibraryFullStatus:
|
|
|
4509
4552
|
messages: Optional[List[str]] = None
|
|
4510
4553
|
"""All the info and warning messages that have occurred so far for this library."""
|
|
4511
4554
|
|
|
4512
|
-
status: Optional[
|
|
4555
|
+
status: Optional[LibraryInstallStatus] = None
|
|
4513
4556
|
"""Status of installing the library on the cluster."""
|
|
4514
4557
|
|
|
4515
4558
|
def as_dict(self) -> dict:
|
|
@@ -4528,17 +4571,18 @@ class LibraryFullStatus:
|
|
|
4528
4571
|
return cls(is_library_for_all_clusters=d.get('is_library_for_all_clusters', None),
|
|
4529
4572
|
library=_from_dict(d, 'library', Library),
|
|
4530
4573
|
messages=d.get('messages', None),
|
|
4531
|
-
status=_enum(d, 'status',
|
|
4574
|
+
status=_enum(d, 'status', LibraryInstallStatus))
|
|
4532
4575
|
|
|
4533
4576
|
|
|
4534
|
-
class
|
|
4535
|
-
"""
|
|
4577
|
+
class LibraryInstallStatus(Enum):
|
|
4578
|
+
"""The status of a library on a specific cluster."""
|
|
4536
4579
|
|
|
4537
4580
|
FAILED = 'FAILED'
|
|
4538
4581
|
INSTALLED = 'INSTALLED'
|
|
4539
4582
|
INSTALLING = 'INSTALLING'
|
|
4540
4583
|
PENDING = 'PENDING'
|
|
4541
4584
|
RESOLVING = 'RESOLVING'
|
|
4585
|
+
RESTORED = 'RESTORED'
|
|
4542
4586
|
SKIPPED = 'SKIPPED'
|
|
4543
4587
|
UNINSTALL_ON_RESTART = 'UNINSTALL_ON_RESTART'
|
|
4544
4588
|
|
|
@@ -8070,16 +8114,13 @@ class LibrariesAPI:
|
|
|
8070
8114
|
cluster.
|
|
8071
8115
|
|
|
8072
8116
|
To make third-party or custom code available to notebooks and jobs running on your clusters, you can
|
|
8073
|
-
install a library. Libraries can be written in Python, Java, Scala, and R. You can upload
|
|
8074
|
-
|
|
8117
|
+
install a library. Libraries can be written in Python, Java, Scala, and R. You can upload Python, Java,
|
|
8118
|
+
Scala and R libraries and point to external packages in PyPI, Maven, and CRAN repositories.
|
|
8075
8119
|
|
|
8076
8120
|
Cluster libraries can be used by all notebooks running on a cluster. You can install a cluster library
|
|
8077
8121
|
directly from a public repository such as PyPI or Maven, using a previously installed workspace library,
|
|
8078
8122
|
or using an init script.
|
|
8079
8123
|
|
|
8080
|
-
When you install a library on a cluster, a notebook already attached to that cluster will not immediately
|
|
8081
|
-
see the new library. You must first detach and then reattach the notebook to the cluster.
|
|
8082
|
-
|
|
8083
8124
|
When you uninstall a library from a cluster, the library is removed only when you restart the cluster.
|
|
8084
8125
|
Until you restart the cluster, the status of the uninstalled library appears as Uninstall pending restart."""
|
|
8085
8126
|
|
|
@@ -8089,9 +8130,8 @@ class LibrariesAPI:
|
|
|
8089
8130
|
def all_cluster_statuses(self) -> ListAllClusterLibraryStatusesResponse:
|
|
8090
8131
|
"""Get all statuses.
|
|
8091
8132
|
|
|
8092
|
-
Get the status of all libraries on all clusters. A status
|
|
8093
|
-
|
|
8094
|
-
all clusters via the libraries UI.
|
|
8133
|
+
Get the status of all libraries on all clusters. A status is returned for all libraries installed on
|
|
8134
|
+
this cluster via the API or the libraries UI.
|
|
8095
8135
|
|
|
8096
8136
|
:returns: :class:`ListAllClusterLibraryStatusesResponse`
|
|
8097
8137
|
"""
|
|
@@ -8104,18 +8144,11 @@ class LibrariesAPI:
|
|
|
8104
8144
|
def cluster_status(self, cluster_id: str) -> Iterator[LibraryFullStatus]:
|
|
8105
8145
|
"""Get status.
|
|
8106
8146
|
|
|
8107
|
-
Get the status of libraries on a cluster. A status
|
|
8108
|
-
|
|
8109
|
-
|
|
8110
|
-
|
|
8111
|
-
|
|
8112
|
-
order will be order in which the libraries were added to the cluster.
|
|
8113
|
-
|
|
8114
|
-
2. Libraries set to be installed on all clusters are returned next. Within this group there is no
|
|
8115
|
-
order guarantee.
|
|
8116
|
-
|
|
8117
|
-
3. Libraries that were previously requested on this cluster or on all clusters, but now marked for
|
|
8118
|
-
removal. Within this group there is no order guarantee.
|
|
8147
|
+
Get the status of libraries on a cluster. A status is returned for all libraries installed on this
|
|
8148
|
+
cluster via the API or the libraries UI. The order of returned libraries is as follows: 1. Libraries
|
|
8149
|
+
set to be installed on this cluster, in the order that the libraries were added to the cluster, are
|
|
8150
|
+
returned first. 2. Libraries that were previously requested to be installed on this cluster or, but
|
|
8151
|
+
are now marked for removal, in no particular order, are returned last.
|
|
8119
8152
|
|
|
8120
8153
|
:param cluster_id: str
|
|
8121
8154
|
Unique identifier of the cluster whose status should be retrieved.
|
|
@@ -8128,17 +8161,14 @@ class LibrariesAPI:
|
|
|
8128
8161
|
headers = {'Accept': 'application/json', }
|
|
8129
8162
|
|
|
8130
8163
|
json = self._api.do('GET', '/api/2.0/libraries/cluster-status', query=query, headers=headers)
|
|
8131
|
-
parsed =
|
|
8164
|
+
parsed = ClusterStatusResponse.from_dict(json).library_statuses
|
|
8132
8165
|
return parsed if parsed is not None else []
|
|
8133
8166
|
|
|
8134
8167
|
def install(self, cluster_id: str, libraries: List[Library]):
|
|
8135
8168
|
"""Add a library.
|
|
8136
8169
|
|
|
8137
|
-
Add libraries to
|
|
8138
|
-
|
|
8139
|
-
|
|
8140
|
-
**Note**: The actual set of libraries to be installed on a cluster is the union of the libraries
|
|
8141
|
-
specified via this method and the libraries set to be installed on all clusters via the libraries UI.
|
|
8170
|
+
Add libraries to install on a cluster. The installation is asynchronous; it happens in the background
|
|
8171
|
+
after the completion of this request.
|
|
8142
8172
|
|
|
8143
8173
|
:param cluster_id: str
|
|
8144
8174
|
Unique identifier for the cluster on which to install these libraries.
|
|
@@ -8157,9 +8187,8 @@ class LibrariesAPI:
|
|
|
8157
8187
|
def uninstall(self, cluster_id: str, libraries: List[Library]):
|
|
8158
8188
|
"""Uninstall libraries.
|
|
8159
8189
|
|
|
8160
|
-
Set libraries to
|
|
8161
|
-
restarted.
|
|
8162
|
-
an error.
|
|
8190
|
+
Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is
|
|
8191
|
+
restarted. A request to uninstall a library that is not currently installed is ignored.
|
|
8163
8192
|
|
|
8164
8193
|
:param cluster_id: str
|
|
8165
8194
|
Unique identifier for the cluster on which to uninstall these libraries.
|
databricks/sdk/service/jobs.py
CHANGED
|
@@ -1963,12 +1963,20 @@ class NotebookTask:
|
|
|
1963
1963
|
`git_source` is defined and `WORKSPACE` otherwise. * `WORKSPACE`: Notebook is located in
|
|
1964
1964
|
Databricks workspace. * `GIT`: Notebook is located in cloud Git provider."""
|
|
1965
1965
|
|
|
1966
|
+
warehouse_id: Optional[str] = None
|
|
1967
|
+
"""Optional `warehouse_id` to run the notebook on a SQL warehouse. Classic SQL warehouses are NOT
|
|
1968
|
+
supported, please use serverless or pro SQL warehouses.
|
|
1969
|
+
|
|
1970
|
+
Note that SQL warehouses only support SQL cells; if the notebook contains non-SQL cells, the run
|
|
1971
|
+
will fail."""
|
|
1972
|
+
|
|
1966
1973
|
def as_dict(self) -> dict:
|
|
1967
1974
|
"""Serializes the NotebookTask into a dictionary suitable for use as a JSON request body."""
|
|
1968
1975
|
body = {}
|
|
1969
1976
|
if self.base_parameters: body['base_parameters'] = self.base_parameters
|
|
1970
1977
|
if self.notebook_path is not None: body['notebook_path'] = self.notebook_path
|
|
1971
1978
|
if self.source is not None: body['source'] = self.source.value
|
|
1979
|
+
if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
|
|
1972
1980
|
return body
|
|
1973
1981
|
|
|
1974
1982
|
@classmethod
|
|
@@ -1976,7 +1984,8 @@ class NotebookTask:
|
|
|
1976
1984
|
"""Deserializes the NotebookTask from a dictionary."""
|
|
1977
1985
|
return cls(base_parameters=d.get('base_parameters', None),
|
|
1978
1986
|
notebook_path=d.get('notebook_path', None),
|
|
1979
|
-
source=_enum(d, 'source', Source)
|
|
1987
|
+
source=_enum(d, 'source', Source),
|
|
1988
|
+
warehouse_id=d.get('warehouse_id', None))
|
|
1980
1989
|
|
|
1981
1990
|
|
|
1982
1991
|
class PauseStatus(Enum):
|
|
@@ -2493,6 +2502,8 @@ class ResolvedValues:
|
|
|
2493
2502
|
|
|
2494
2503
|
@dataclass
|
|
2495
2504
|
class Run:
|
|
2505
|
+
"""Run was retrieved successfully"""
|
|
2506
|
+
|
|
2496
2507
|
attempt_number: Optional[int] = None
|
|
2497
2508
|
"""The sequence number of this run attempt for a triggered job run. The initial attempt of a run
|
|
2498
2509
|
has an attempt_number of 0\. If the initial run attempt fails, and the job has a retry policy
|
|
@@ -3953,9 +3964,7 @@ class SqlTask:
|
|
|
3953
3964
|
"""If dashboard, indicates that this job must refresh a SQL dashboard."""
|
|
3954
3965
|
|
|
3955
3966
|
file: Optional[SqlTaskFile] = None
|
|
3956
|
-
"""If file, indicates that this job runs a SQL file in a remote Git repository.
|
|
3957
|
-
statement is supported in a file. Multiple SQL statements separated by semicolons (;) are not
|
|
3958
|
-
permitted."""
|
|
3967
|
+
"""If file, indicates that this job runs a SQL file in a remote Git repository."""
|
|
3959
3968
|
|
|
3960
3969
|
parameters: Optional[Dict[str, str]] = None
|
|
3961
3970
|
"""Parameters to be used for each run of this job. The SQL alert task does not support custom
|
|
@@ -4175,6 +4184,10 @@ class SubmitRun:
|
|
|
4175
4184
|
queue: Optional[QueueSettings] = None
|
|
4176
4185
|
"""The queue settings of the one-time run."""
|
|
4177
4186
|
|
|
4187
|
+
run_as: Optional[JobRunAs] = None
|
|
4188
|
+
"""Specifies the user or service principal that the job runs as. If not specified, the job runs as
|
|
4189
|
+
the user who submits the request."""
|
|
4190
|
+
|
|
4178
4191
|
run_job_task: Optional[RunJobTask] = None
|
|
4179
4192
|
"""If run_job_task, indicates that this task must execute another job."""
|
|
4180
4193
|
|
|
@@ -4231,6 +4244,7 @@ class SubmitRun:
|
|
|
4231
4244
|
if self.pipeline_task: body['pipeline_task'] = self.pipeline_task.as_dict()
|
|
4232
4245
|
if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict()
|
|
4233
4246
|
if self.queue: body['queue'] = self.queue.as_dict()
|
|
4247
|
+
if self.run_as: body['run_as'] = self.run_as.as_dict()
|
|
4234
4248
|
if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict()
|
|
4235
4249
|
if self.run_name is not None: body['run_name'] = self.run_name
|
|
4236
4250
|
if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict()
|
|
@@ -4257,6 +4271,7 @@ class SubmitRun:
|
|
|
4257
4271
|
pipeline_task=_from_dict(d, 'pipeline_task', PipelineTask),
|
|
4258
4272
|
python_wheel_task=_from_dict(d, 'python_wheel_task', PythonWheelTask),
|
|
4259
4273
|
queue=_from_dict(d, 'queue', QueueSettings),
|
|
4274
|
+
run_as=_from_dict(d, 'run_as', JobRunAs),
|
|
4260
4275
|
run_job_task=_from_dict(d, 'run_job_task', RunJobTask),
|
|
4261
4276
|
run_name=d.get('run_name', None),
|
|
4262
4277
|
spark_jar_task=_from_dict(d, 'spark_jar_task', SparkJarTask),
|
|
@@ -5890,6 +5905,7 @@ class JobsAPI:
|
|
|
5890
5905
|
pipeline_task: Optional[PipelineTask] = None,
|
|
5891
5906
|
python_wheel_task: Optional[PythonWheelTask] = None,
|
|
5892
5907
|
queue: Optional[QueueSettings] = None,
|
|
5908
|
+
run_as: Optional[JobRunAs] = None,
|
|
5893
5909
|
run_job_task: Optional[RunJobTask] = None,
|
|
5894
5910
|
run_name: Optional[str] = None,
|
|
5895
5911
|
spark_jar_task: Optional[SparkJarTask] = None,
|
|
@@ -5951,6 +5967,9 @@ class JobsAPI:
|
|
|
5951
5967
|
If python_wheel_task, indicates that this job must execute a PythonWheel.
|
|
5952
5968
|
:param queue: :class:`QueueSettings` (optional)
|
|
5953
5969
|
The queue settings of the one-time run.
|
|
5970
|
+
:param run_as: :class:`JobRunAs` (optional)
|
|
5971
|
+
Specifies the user or service principal that the job runs as. If not specified, the job runs as the
|
|
5972
|
+
user who submits the request.
|
|
5954
5973
|
:param run_job_task: :class:`RunJobTask` (optional)
|
|
5955
5974
|
If run_job_task, indicates that this task must execute another job.
|
|
5956
5975
|
:param run_name: str (optional)
|
|
@@ -6001,6 +6020,7 @@ class JobsAPI:
|
|
|
6001
6020
|
if pipeline_task is not None: body['pipeline_task'] = pipeline_task.as_dict()
|
|
6002
6021
|
if python_wheel_task is not None: body['python_wheel_task'] = python_wheel_task.as_dict()
|
|
6003
6022
|
if queue is not None: body['queue'] = queue.as_dict()
|
|
6023
|
+
if run_as is not None: body['run_as'] = run_as.as_dict()
|
|
6004
6024
|
if run_job_task is not None: body['run_job_task'] = run_job_task.as_dict()
|
|
6005
6025
|
if run_name is not None: body['run_name'] = run_name
|
|
6006
6026
|
if spark_jar_task is not None: body['spark_jar_task'] = spark_jar_task.as_dict()
|
|
@@ -6032,6 +6052,7 @@ class JobsAPI:
|
|
|
6032
6052
|
pipeline_task: Optional[PipelineTask] = None,
|
|
6033
6053
|
python_wheel_task: Optional[PythonWheelTask] = None,
|
|
6034
6054
|
queue: Optional[QueueSettings] = None,
|
|
6055
|
+
run_as: Optional[JobRunAs] = None,
|
|
6035
6056
|
run_job_task: Optional[RunJobTask] = None,
|
|
6036
6057
|
run_name: Optional[str] = None,
|
|
6037
6058
|
spark_jar_task: Optional[SparkJarTask] = None,
|
|
@@ -6054,6 +6075,7 @@ class JobsAPI:
|
|
|
6054
6075
|
pipeline_task=pipeline_task,
|
|
6055
6076
|
python_wheel_task=python_wheel_task,
|
|
6056
6077
|
queue=queue,
|
|
6078
|
+
run_as=run_as,
|
|
6057
6079
|
run_job_task=run_job_task,
|
|
6058
6080
|
run_name=run_name,
|
|
6059
6081
|
spark_jar_task=spark_jar_task,
|