databricks-sdk 0.27.1__py3-none-any.whl → 0.29.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +16 -12
- databricks/sdk/azure.py +0 -27
- databricks/sdk/config.py +71 -19
- databricks/sdk/core.py +27 -0
- databricks/sdk/credentials_provider.py +121 -44
- databricks/sdk/dbutils.py +81 -3
- databricks/sdk/environments.py +34 -1
- databricks/sdk/errors/__init__.py +1 -0
- databricks/sdk/errors/mapper.py +4 -0
- databricks/sdk/errors/private_link.py +60 -0
- databricks/sdk/oauth.py +8 -6
- databricks/sdk/service/catalog.py +774 -632
- databricks/sdk/service/compute.py +91 -116
- databricks/sdk/service/dashboards.py +707 -2
- databricks/sdk/service/jobs.py +126 -163
- databricks/sdk/service/marketplace.py +145 -31
- databricks/sdk/service/oauth2.py +22 -0
- databricks/sdk/service/pipelines.py +119 -4
- databricks/sdk/service/serving.py +217 -64
- databricks/sdk/service/settings.py +1 -0
- databricks/sdk/service/sharing.py +36 -2
- databricks/sdk/service/sql.py +103 -24
- databricks/sdk/service/vectorsearch.py +263 -1
- databricks/sdk/service/workspace.py +8 -4
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.29.0.dist-info}/METADATA +2 -1
- databricks_sdk-0.29.0.dist-info/RECORD +57 -0
- databricks_sdk-0.27.1.dist-info/RECORD +0 -56
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.29.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.29.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.29.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.29.0.dist-info}/top_level.txt +0 -0
databricks/sdk/service/jobs.py
CHANGED
|
@@ -940,17 +940,23 @@ class ForEachTaskErrorMessageStats:
|
|
|
940
940
|
error_message: Optional[str] = None
|
|
941
941
|
"""Describes the error message occured during the iterations."""
|
|
942
942
|
|
|
943
|
+
termination_category: Optional[str] = None
|
|
944
|
+
"""Describes the termination reason for the error message."""
|
|
945
|
+
|
|
943
946
|
def as_dict(self) -> dict:
|
|
944
947
|
"""Serializes the ForEachTaskErrorMessageStats into a dictionary suitable for use as a JSON request body."""
|
|
945
948
|
body = {}
|
|
946
949
|
if self.count is not None: body['count'] = self.count
|
|
947
950
|
if self.error_message is not None: body['error_message'] = self.error_message
|
|
951
|
+
if self.termination_category is not None: body['termination_category'] = self.termination_category
|
|
948
952
|
return body
|
|
949
953
|
|
|
950
954
|
@classmethod
|
|
951
955
|
def from_dict(cls, d: Dict[str, any]) -> ForEachTaskErrorMessageStats:
|
|
952
956
|
"""Deserializes the ForEachTaskErrorMessageStats from a dictionary."""
|
|
953
|
-
return cls(count=d.get('count', None),
|
|
957
|
+
return cls(count=d.get('count', None),
|
|
958
|
+
error_message=d.get('error_message', None),
|
|
959
|
+
termination_category=d.get('termination_category', None))
|
|
954
960
|
|
|
955
961
|
|
|
956
962
|
@dataclass
|
|
@@ -1315,6 +1321,13 @@ class JobEmailNotifications:
|
|
|
1315
1321
|
"""A list of email addresses to be notified when a run begins. If not specified on job creation,
|
|
1316
1322
|
reset, or update, the list is empty, and notifications are not sent."""
|
|
1317
1323
|
|
|
1324
|
+
on_streaming_backlog_exceeded: Optional[List[str]] = None
|
|
1325
|
+
"""A list of email addresses to notify when any streaming backlog thresholds are exceeded for any
|
|
1326
|
+
stream. Streaming backlog thresholds can be set in the `health` field using the following
|
|
1327
|
+
metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or
|
|
1328
|
+
`STREAMING_BACKLOG_FILES`. Alerting is based on the 10-minute average of these metrics. If the
|
|
1329
|
+
issue persists, notifications are resent every 30 minutes."""
|
|
1330
|
+
|
|
1318
1331
|
on_success: Optional[List[str]] = None
|
|
1319
1332
|
"""A list of email addresses to be notified when a run successfully completes. A run is considered
|
|
1320
1333
|
to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS`
|
|
@@ -1332,6 +1345,8 @@ class JobEmailNotifications:
|
|
|
1332
1345
|
]
|
|
1333
1346
|
if self.on_failure: body['on_failure'] = [v for v in self.on_failure]
|
|
1334
1347
|
if self.on_start: body['on_start'] = [v for v in self.on_start]
|
|
1348
|
+
if self.on_streaming_backlog_exceeded:
|
|
1349
|
+
body['on_streaming_backlog_exceeded'] = [v for v in self.on_streaming_backlog_exceeded]
|
|
1335
1350
|
if self.on_success: body['on_success'] = [v for v in self.on_success]
|
|
1336
1351
|
return body
|
|
1337
1352
|
|
|
@@ -1343,6 +1358,7 @@ class JobEmailNotifications:
|
|
|
1343
1358
|
None),
|
|
1344
1359
|
on_failure=d.get('on_failure', None),
|
|
1345
1360
|
on_start=d.get('on_start', None),
|
|
1361
|
+
on_streaming_backlog_exceeded=d.get('on_streaming_backlog_exceeded', None),
|
|
1346
1362
|
on_success=d.get('on_success', None))
|
|
1347
1363
|
|
|
1348
1364
|
|
|
@@ -1352,9 +1368,8 @@ class JobEnvironment:
|
|
|
1352
1368
|
"""The key of an environment. It has to be unique within a job."""
|
|
1353
1369
|
|
|
1354
1370
|
spec: Optional[compute.Environment] = None
|
|
1355
|
-
"""The
|
|
1356
|
-
|
|
1357
|
-
supported. Next ID: 5"""
|
|
1371
|
+
"""The environment entity used to preserve serverless environment side panel and jobs' environment
|
|
1372
|
+
for non-notebook task. In this minimal environment spec, only pip dependencies are supported."""
|
|
1358
1373
|
|
|
1359
1374
|
def as_dict(self) -> dict:
|
|
1360
1375
|
"""Serializes the JobEnvironment into a dictionary suitable for use as a JSON request body."""
|
|
@@ -1783,9 +1798,21 @@ class JobSourceDirtyState(Enum):
|
|
|
1783
1798
|
|
|
1784
1799
|
|
|
1785
1800
|
class JobsHealthMetric(Enum):
|
|
1786
|
-
"""Specifies the health metric that is being evaluated for a particular health rule.
|
|
1801
|
+
"""Specifies the health metric that is being evaluated for a particular health rule.
|
|
1802
|
+
|
|
1803
|
+
* `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`:
|
|
1804
|
+
An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric
|
|
1805
|
+
is in Private Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag
|
|
1806
|
+
across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_SECONDS`: An
|
|
1807
|
+
estimate of the maximum consumer delay across all streams. This metric is in Private Preview. *
|
|
1808
|
+
`STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all
|
|
1809
|
+
streams. This metric is in Private Preview."""
|
|
1787
1810
|
|
|
1788
1811
|
RUN_DURATION_SECONDS = 'RUN_DURATION_SECONDS'
|
|
1812
|
+
STREAMING_BACKLOG_BYTES = 'STREAMING_BACKLOG_BYTES'
|
|
1813
|
+
STREAMING_BACKLOG_FILES = 'STREAMING_BACKLOG_FILES'
|
|
1814
|
+
STREAMING_BACKLOG_RECORDS = 'STREAMING_BACKLOG_RECORDS'
|
|
1815
|
+
STREAMING_BACKLOG_SECONDS = 'STREAMING_BACKLOG_SECONDS'
|
|
1789
1816
|
|
|
1790
1817
|
|
|
1791
1818
|
class JobsHealthOperator(Enum):
|
|
@@ -1797,7 +1824,15 @@ class JobsHealthOperator(Enum):
|
|
|
1797
1824
|
@dataclass
|
|
1798
1825
|
class JobsHealthRule:
|
|
1799
1826
|
metric: JobsHealthMetric
|
|
1800
|
-
"""Specifies the health metric that is being evaluated for a particular health rule.
|
|
1827
|
+
"""Specifies the health metric that is being evaluated for a particular health rule.
|
|
1828
|
+
|
|
1829
|
+
* `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`:
|
|
1830
|
+
An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric
|
|
1831
|
+
is in Private Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag
|
|
1832
|
+
across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_SECONDS`: An
|
|
1833
|
+
estimate of the maximum consumer delay across all streams. This metric is in Private Preview. *
|
|
1834
|
+
`STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all
|
|
1835
|
+
streams. This metric is in Private Preview."""
|
|
1801
1836
|
|
|
1802
1837
|
op: JobsHealthOperator
|
|
1803
1838
|
"""Specifies the operator used to compare the health metric value with the specified threshold."""
|
|
@@ -1994,6 +2029,36 @@ class PauseStatus(Enum):
|
|
|
1994
2029
|
UNPAUSED = 'UNPAUSED'
|
|
1995
2030
|
|
|
1996
2031
|
|
|
2032
|
+
@dataclass
|
|
2033
|
+
class PeriodicTriggerConfiguration:
|
|
2034
|
+
interval: int
|
|
2035
|
+
"""The interval at which the trigger should run."""
|
|
2036
|
+
|
|
2037
|
+
unit: PeriodicTriggerConfigurationTimeUnit
|
|
2038
|
+
"""The unit of time for the interval."""
|
|
2039
|
+
|
|
2040
|
+
def as_dict(self) -> dict:
|
|
2041
|
+
"""Serializes the PeriodicTriggerConfiguration into a dictionary suitable for use as a JSON request body."""
|
|
2042
|
+
body = {}
|
|
2043
|
+
if self.interval is not None: body['interval'] = self.interval
|
|
2044
|
+
if self.unit is not None: body['unit'] = self.unit.value
|
|
2045
|
+
return body
|
|
2046
|
+
|
|
2047
|
+
@classmethod
|
|
2048
|
+
def from_dict(cls, d: Dict[str, any]) -> PeriodicTriggerConfiguration:
|
|
2049
|
+
"""Deserializes the PeriodicTriggerConfiguration from a dictionary."""
|
|
2050
|
+
return cls(interval=d.get('interval', None),
|
|
2051
|
+
unit=_enum(d, 'unit', PeriodicTriggerConfigurationTimeUnit))
|
|
2052
|
+
|
|
2053
|
+
|
|
2054
|
+
class PeriodicTriggerConfigurationTimeUnit(Enum):
|
|
2055
|
+
|
|
2056
|
+
DAYS = 'DAYS'
|
|
2057
|
+
HOURS = 'HOURS'
|
|
2058
|
+
TIME_UNIT_UNSPECIFIED = 'TIME_UNIT_UNSPECIFIED'
|
|
2059
|
+
WEEKS = 'WEEKS'
|
|
2060
|
+
|
|
2061
|
+
|
|
1997
2062
|
@dataclass
|
|
1998
2063
|
class PipelineParams:
|
|
1999
2064
|
full_refresh: Optional[bool] = None
|
|
@@ -2179,8 +2244,6 @@ class RepairRun:
|
|
|
2179
2244
|
pipeline_params: Optional[PipelineParams] = None
|
|
2180
2245
|
|
|
2181
2246
|
python_named_params: Optional[Dict[str, str]] = None
|
|
2182
|
-
"""A map from keys to values for jobs with Python wheel task, for example `"python_named_params":
|
|
2183
|
-
{"name": "task", "data": "dbfs:/path/to/data.json"}`."""
|
|
2184
2247
|
|
|
2185
2248
|
python_params: Optional[List[str]] = None
|
|
2186
2249
|
"""A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe",
|
|
@@ -2856,8 +2919,6 @@ class RunJobTask:
|
|
|
2856
2919
|
pipeline_params: Optional[PipelineParams] = None
|
|
2857
2920
|
|
|
2858
2921
|
python_named_params: Optional[Dict[str, str]] = None
|
|
2859
|
-
"""A map from keys to values for jobs with Python wheel task, for example `"python_named_params":
|
|
2860
|
-
{"name": "task", "data": "dbfs:/path/to/data.json"}`."""
|
|
2861
2922
|
|
|
2862
2923
|
python_params: Optional[List[str]] = None
|
|
2863
2924
|
"""A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe",
|
|
@@ -3006,8 +3067,6 @@ class RunNow:
|
|
|
3006
3067
|
pipeline_params: Optional[PipelineParams] = None
|
|
3007
3068
|
|
|
3008
3069
|
python_named_params: Optional[Dict[str, str]] = None
|
|
3009
|
-
"""A map from keys to values for jobs with Python wheel task, for example `"python_named_params":
|
|
3010
|
-
{"name": "task", "data": "dbfs:/path/to/data.json"}`."""
|
|
3011
3070
|
|
|
3012
3071
|
python_params: Optional[List[str]] = None
|
|
3013
3072
|
"""A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe",
|
|
@@ -3217,8 +3276,6 @@ class RunParameters:
|
|
|
3217
3276
|
pipeline_params: Optional[PipelineParams] = None
|
|
3218
3277
|
|
|
3219
3278
|
python_named_params: Optional[Dict[str, str]] = None
|
|
3220
|
-
"""A map from keys to values for jobs with Python wheel task, for example `"python_named_params":
|
|
3221
|
-
{"name": "task", "data": "dbfs:/path/to/data.json"}`."""
|
|
3222
3279
|
|
|
3223
3280
|
python_params: Optional[List[str]] = None
|
|
3224
3281
|
"""A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe",
|
|
@@ -3398,6 +3455,10 @@ class RunTask:
|
|
|
3398
3455
|
"""The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This
|
|
3399
3456
|
field is set to 0 if the job is still running."""
|
|
3400
3457
|
|
|
3458
|
+
environment_key: Optional[str] = None
|
|
3459
|
+
"""The key that references an environment spec in a job. This field is required for Python script,
|
|
3460
|
+
Python wheel and dbt tasks when using serverless compute."""
|
|
3461
|
+
|
|
3401
3462
|
execution_duration: Optional[int] = None
|
|
3402
3463
|
"""The time in milliseconds it took to execute the commands in the JAR or notebook until they
|
|
3403
3464
|
completed, failed, timed out, were cancelled, or encountered an unexpected error. The duration
|
|
@@ -3529,6 +3590,7 @@ class RunTask:
|
|
|
3529
3590
|
if self.description is not None: body['description'] = self.description
|
|
3530
3591
|
if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
|
|
3531
3592
|
if self.end_time is not None: body['end_time'] = self.end_time
|
|
3593
|
+
if self.environment_key is not None: body['environment_key'] = self.environment_key
|
|
3532
3594
|
if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
|
|
3533
3595
|
if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
|
|
3534
3596
|
if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict()
|
|
@@ -3571,6 +3633,7 @@ class RunTask:
|
|
|
3571
3633
|
description=d.get('description', None),
|
|
3572
3634
|
email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
|
|
3573
3635
|
end_time=d.get('end_time', None),
|
|
3636
|
+
environment_key=d.get('environment_key', None),
|
|
3574
3637
|
execution_duration=d.get('execution_duration', None),
|
|
3575
3638
|
existing_cluster_id=d.get('existing_cluster_id', None),
|
|
3576
3639
|
for_each_task=_from_dict(d, 'for_each_task', RunForEachTask),
|
|
@@ -4128,18 +4191,12 @@ class SubmitRun:
|
|
|
4128
4191
|
access_control_list: Optional[List[iam.AccessControlRequest]] = None
|
|
4129
4192
|
"""List of permissions to set on the job."""
|
|
4130
4193
|
|
|
4131
|
-
condition_task: Optional[ConditionTask] = None
|
|
4132
|
-
"""If condition_task, specifies a condition with an outcome that can be used to control the
|
|
4133
|
-
execution of other tasks. Does not require a cluster to execute and does not support retries or
|
|
4134
|
-
notifications."""
|
|
4135
|
-
|
|
4136
|
-
dbt_task: Optional[DbtTask] = None
|
|
4137
|
-
"""If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and
|
|
4138
|
-
the ability to use a serverless or a pro SQL warehouse."""
|
|
4139
|
-
|
|
4140
4194
|
email_notifications: Optional[JobEmailNotifications] = None
|
|
4141
4195
|
"""An optional set of email addresses notified when the run begins or completes."""
|
|
4142
4196
|
|
|
4197
|
+
environments: Optional[List[JobEnvironment]] = None
|
|
4198
|
+
"""A list of task execution environment specifications that can be referenced by tasks of this run."""
|
|
4199
|
+
|
|
4143
4200
|
git_source: Optional[GitSource] = None
|
|
4144
4201
|
"""An optional specification for a remote Git repository containing the source code used by tasks.
|
|
4145
4202
|
Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
|
|
@@ -4167,20 +4224,10 @@ class SubmitRun:
|
|
|
4167
4224
|
|
|
4168
4225
|
[How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html"""
|
|
4169
4226
|
|
|
4170
|
-
notebook_task: Optional[NotebookTask] = None
|
|
4171
|
-
"""If notebook_task, indicates that this task must run a notebook. This field may not be specified
|
|
4172
|
-
in conjunction with spark_jar_task."""
|
|
4173
|
-
|
|
4174
4227
|
notification_settings: Optional[JobNotificationSettings] = None
|
|
4175
4228
|
"""Optional notification settings that are used when sending notifications to each of the
|
|
4176
4229
|
`email_notifications` and `webhook_notifications` for this run."""
|
|
4177
4230
|
|
|
4178
|
-
pipeline_task: Optional[PipelineTask] = None
|
|
4179
|
-
"""If pipeline_task, indicates that this task must execute a Pipeline."""
|
|
4180
|
-
|
|
4181
|
-
python_wheel_task: Optional[PythonWheelTask] = None
|
|
4182
|
-
"""If python_wheel_task, indicates that this job must execute a PythonWheel."""
|
|
4183
|
-
|
|
4184
4231
|
queue: Optional[QueueSettings] = None
|
|
4185
4232
|
"""The queue settings of the one-time run."""
|
|
4186
4233
|
|
|
@@ -4188,38 +4235,9 @@ class SubmitRun:
|
|
|
4188
4235
|
"""Specifies the user or service principal that the job runs as. If not specified, the job runs as
|
|
4189
4236
|
the user who submits the request."""
|
|
4190
4237
|
|
|
4191
|
-
run_job_task: Optional[RunJobTask] = None
|
|
4192
|
-
"""If run_job_task, indicates that this task must execute another job."""
|
|
4193
|
-
|
|
4194
4238
|
run_name: Optional[str] = None
|
|
4195
4239
|
"""An optional name for the run. The default value is `Untitled`."""
|
|
4196
4240
|
|
|
4197
|
-
spark_jar_task: Optional[SparkJarTask] = None
|
|
4198
|
-
"""If spark_jar_task, indicates that this task must run a JAR."""
|
|
4199
|
-
|
|
4200
|
-
spark_python_task: Optional[SparkPythonTask] = None
|
|
4201
|
-
"""If spark_python_task, indicates that this task must run a Python file."""
|
|
4202
|
-
|
|
4203
|
-
spark_submit_task: Optional[SparkSubmitTask] = None
|
|
4204
|
-
"""If `spark_submit_task`, indicates that this task must be launched by the spark submit script.
|
|
4205
|
-
This task can run only on new clusters.
|
|
4206
|
-
|
|
4207
|
-
In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
|
|
4208
|
-
`--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
|
|
4209
|
-
configurations.
|
|
4210
|
-
|
|
4211
|
-
`master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you
|
|
4212
|
-
_cannot_ specify them in parameters.
|
|
4213
|
-
|
|
4214
|
-
By default, the Spark submit job uses all available memory (excluding reserved memory for
|
|
4215
|
-
Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value
|
|
4216
|
-
to leave some room for off-heap usage.
|
|
4217
|
-
|
|
4218
|
-
The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
|
|
4219
|
-
|
|
4220
|
-
sql_task: Optional[SqlTask] = None
|
|
4221
|
-
"""If sql_task, indicates that this job must execute a SQL task."""
|
|
4222
|
-
|
|
4223
4241
|
tasks: Optional[List[SubmitTask]] = None
|
|
4224
4242
|
|
|
4225
4243
|
timeout_seconds: Optional[int] = None
|
|
@@ -4233,24 +4251,15 @@ class SubmitRun:
|
|
|
4233
4251
|
body = {}
|
|
4234
4252
|
if self.access_control_list:
|
|
4235
4253
|
body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
|
|
4236
|
-
if self.condition_task: body['condition_task'] = self.condition_task.as_dict()
|
|
4237
|
-
if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
|
|
4238
4254
|
if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
|
|
4255
|
+
if self.environments: body['environments'] = [v.as_dict() for v in self.environments]
|
|
4239
4256
|
if self.git_source: body['git_source'] = self.git_source.as_dict()
|
|
4240
4257
|
if self.health: body['health'] = self.health.as_dict()
|
|
4241
4258
|
if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token
|
|
4242
|
-
if self.notebook_task: body['notebook_task'] = self.notebook_task.as_dict()
|
|
4243
4259
|
if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict()
|
|
4244
|
-
if self.pipeline_task: body['pipeline_task'] = self.pipeline_task.as_dict()
|
|
4245
|
-
if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict()
|
|
4246
4260
|
if self.queue: body['queue'] = self.queue.as_dict()
|
|
4247
4261
|
if self.run_as: body['run_as'] = self.run_as.as_dict()
|
|
4248
|
-
if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict()
|
|
4249
4262
|
if self.run_name is not None: body['run_name'] = self.run_name
|
|
4250
|
-
if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict()
|
|
4251
|
-
if self.spark_python_task: body['spark_python_task'] = self.spark_python_task.as_dict()
|
|
4252
|
-
if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task.as_dict()
|
|
4253
|
-
if self.sql_task: body['sql_task'] = self.sql_task.as_dict()
|
|
4254
4263
|
if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks]
|
|
4255
4264
|
if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
|
|
4256
4265
|
if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
|
|
@@ -4260,24 +4269,15 @@ class SubmitRun:
|
|
|
4260
4269
|
def from_dict(cls, d: Dict[str, any]) -> SubmitRun:
|
|
4261
4270
|
"""Deserializes the SubmitRun from a dictionary."""
|
|
4262
4271
|
return cls(access_control_list=_repeated_dict(d, 'access_control_list', iam.AccessControlRequest),
|
|
4263
|
-
condition_task=_from_dict(d, 'condition_task', ConditionTask),
|
|
4264
|
-
dbt_task=_from_dict(d, 'dbt_task', DbtTask),
|
|
4265
4272
|
email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
|
|
4273
|
+
environments=_repeated_dict(d, 'environments', JobEnvironment),
|
|
4266
4274
|
git_source=_from_dict(d, 'git_source', GitSource),
|
|
4267
4275
|
health=_from_dict(d, 'health', JobsHealthRules),
|
|
4268
4276
|
idempotency_token=d.get('idempotency_token', None),
|
|
4269
|
-
notebook_task=_from_dict(d, 'notebook_task', NotebookTask),
|
|
4270
4277
|
notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings),
|
|
4271
|
-
pipeline_task=_from_dict(d, 'pipeline_task', PipelineTask),
|
|
4272
|
-
python_wheel_task=_from_dict(d, 'python_wheel_task', PythonWheelTask),
|
|
4273
4278
|
queue=_from_dict(d, 'queue', QueueSettings),
|
|
4274
4279
|
run_as=_from_dict(d, 'run_as', JobRunAs),
|
|
4275
|
-
run_job_task=_from_dict(d, 'run_job_task', RunJobTask),
|
|
4276
4280
|
run_name=d.get('run_name', None),
|
|
4277
|
-
spark_jar_task=_from_dict(d, 'spark_jar_task', SparkJarTask),
|
|
4278
|
-
spark_python_task=_from_dict(d, 'spark_python_task', SparkPythonTask),
|
|
4279
|
-
spark_submit_task=_from_dict(d, 'spark_submit_task', SparkSubmitTask),
|
|
4280
|
-
sql_task=_from_dict(d, 'sql_task', SqlTask),
|
|
4281
4281
|
tasks=_repeated_dict(d, 'tasks', SubmitTask),
|
|
4282
4282
|
timeout_seconds=d.get('timeout_seconds', None),
|
|
4283
4283
|
webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications))
|
|
@@ -4314,6 +4314,10 @@ class SubmitTask:
|
|
|
4314
4314
|
execution of other tasks. Does not require a cluster to execute and does not support retries or
|
|
4315
4315
|
notifications."""
|
|
4316
4316
|
|
|
4317
|
+
dbt_task: Optional[DbtTask] = None
|
|
4318
|
+
"""If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and
|
|
4319
|
+
the ability to use a serverless or a pro SQL warehouse."""
|
|
4320
|
+
|
|
4317
4321
|
depends_on: Optional[List[TaskDependency]] = None
|
|
4318
4322
|
"""An optional array of objects specifying the dependency graph of the task. All tasks specified in
|
|
4319
4323
|
this field must complete successfully before executing this task. The key is `task_key`, and the
|
|
@@ -4326,6 +4330,10 @@ class SubmitTask:
|
|
|
4326
4330
|
"""An optional set of email addresses notified when the task run begins or completes. The default
|
|
4327
4331
|
behavior is to not send any emails."""
|
|
4328
4332
|
|
|
4333
|
+
environment_key: Optional[str] = None
|
|
4334
|
+
"""The key that references an environment spec in a job. This field is required for Python script,
|
|
4335
|
+
Python wheel and dbt tasks when using serverless compute."""
|
|
4336
|
+
|
|
4329
4337
|
existing_cluster_id: Optional[str] = None
|
|
4330
4338
|
"""If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running
|
|
4331
4339
|
jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops
|
|
@@ -4404,9 +4412,11 @@ class SubmitTask:
|
|
|
4404
4412
|
"""Serializes the SubmitTask into a dictionary suitable for use as a JSON request body."""
|
|
4405
4413
|
body = {}
|
|
4406
4414
|
if self.condition_task: body['condition_task'] = self.condition_task.as_dict()
|
|
4415
|
+
if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
|
|
4407
4416
|
if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on]
|
|
4408
4417
|
if self.description is not None: body['description'] = self.description
|
|
4409
4418
|
if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
|
|
4419
|
+
if self.environment_key is not None: body['environment_key'] = self.environment_key
|
|
4410
4420
|
if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
|
|
4411
4421
|
if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict()
|
|
4412
4422
|
if self.health: body['health'] = self.health.as_dict()
|
|
@@ -4431,9 +4441,11 @@ class SubmitTask:
|
|
|
4431
4441
|
def from_dict(cls, d: Dict[str, any]) -> SubmitTask:
|
|
4432
4442
|
"""Deserializes the SubmitTask from a dictionary."""
|
|
4433
4443
|
return cls(condition_task=_from_dict(d, 'condition_task', ConditionTask),
|
|
4444
|
+
dbt_task=_from_dict(d, 'dbt_task', DbtTask),
|
|
4434
4445
|
depends_on=_repeated_dict(d, 'depends_on', TaskDependency),
|
|
4435
4446
|
description=d.get('description', None),
|
|
4436
4447
|
email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
|
|
4448
|
+
environment_key=d.get('environment_key', None),
|
|
4437
4449
|
existing_cluster_id=d.get('existing_cluster_id', None),
|
|
4438
4450
|
for_each_task=_from_dict(d, 'for_each_task', ForEachTask),
|
|
4439
4451
|
health=_from_dict(d, 'health', JobsHealthRules),
|
|
@@ -4736,6 +4748,13 @@ class TaskEmailNotifications:
|
|
|
4736
4748
|
"""A list of email addresses to be notified when a run begins. If not specified on job creation,
|
|
4737
4749
|
reset, or update, the list is empty, and notifications are not sent."""
|
|
4738
4750
|
|
|
4751
|
+
on_streaming_backlog_exceeded: Optional[List[str]] = None
|
|
4752
|
+
"""A list of email addresses to notify when any streaming backlog thresholds are exceeded for any
|
|
4753
|
+
stream. Streaming backlog thresholds can be set in the `health` field using the following
|
|
4754
|
+
metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or
|
|
4755
|
+
`STREAMING_BACKLOG_FILES`. Alerting is based on the 10-minute average of these metrics. If the
|
|
4756
|
+
issue persists, notifications are resent every 30 minutes."""
|
|
4757
|
+
|
|
4739
4758
|
on_success: Optional[List[str]] = None
|
|
4740
4759
|
"""A list of email addresses to be notified when a run successfully completes. A run is considered
|
|
4741
4760
|
to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS`
|
|
@@ -4753,6 +4772,8 @@ class TaskEmailNotifications:
|
|
|
4753
4772
|
]
|
|
4754
4773
|
if self.on_failure: body['on_failure'] = [v for v in self.on_failure]
|
|
4755
4774
|
if self.on_start: body['on_start'] = [v for v in self.on_start]
|
|
4775
|
+
if self.on_streaming_backlog_exceeded:
|
|
4776
|
+
body['on_streaming_backlog_exceeded'] = [v for v in self.on_streaming_backlog_exceeded]
|
|
4756
4777
|
if self.on_success: body['on_success'] = [v for v in self.on_success]
|
|
4757
4778
|
return body
|
|
4758
4779
|
|
|
@@ -4764,6 +4785,7 @@ class TaskEmailNotifications:
|
|
|
4764
4785
|
None),
|
|
4765
4786
|
on_failure=d.get('on_failure', None),
|
|
4766
4787
|
on_start=d.get('on_start', None),
|
|
4788
|
+
on_streaming_backlog_exceeded=d.get('on_streaming_backlog_exceeded', None),
|
|
4767
4789
|
on_success=d.get('on_success', None))
|
|
4768
4790
|
|
|
4769
4791
|
|
|
@@ -4827,6 +4849,9 @@ class TriggerSettings:
|
|
|
4827
4849
|
pause_status: Optional[PauseStatus] = None
|
|
4828
4850
|
"""Whether this trigger is paused or not."""
|
|
4829
4851
|
|
|
4852
|
+
periodic: Optional[PeriodicTriggerConfiguration] = None
|
|
4853
|
+
"""Periodic trigger settings."""
|
|
4854
|
+
|
|
4830
4855
|
table: Optional[TableUpdateTriggerConfiguration] = None
|
|
4831
4856
|
"""Old table trigger settings name. Deprecated in favor of `table_update`."""
|
|
4832
4857
|
|
|
@@ -4837,6 +4862,7 @@ class TriggerSettings:
|
|
|
4837
4862
|
body = {}
|
|
4838
4863
|
if self.file_arrival: body['file_arrival'] = self.file_arrival.as_dict()
|
|
4839
4864
|
if self.pause_status is not None: body['pause_status'] = self.pause_status.value
|
|
4865
|
+
if self.periodic: body['periodic'] = self.periodic.as_dict()
|
|
4840
4866
|
if self.table: body['table'] = self.table.as_dict()
|
|
4841
4867
|
if self.table_update: body['table_update'] = self.table_update.as_dict()
|
|
4842
4868
|
return body
|
|
@@ -4846,6 +4872,7 @@ class TriggerSettings:
|
|
|
4846
4872
|
"""Deserializes the TriggerSettings from a dictionary."""
|
|
4847
4873
|
return cls(file_arrival=_from_dict(d, 'file_arrival', FileArrivalTriggerConfiguration),
|
|
4848
4874
|
pause_status=_enum(d, 'pause_status', PauseStatus),
|
|
4875
|
+
periodic=_from_dict(d, 'periodic', PeriodicTriggerConfiguration),
|
|
4849
4876
|
table=_from_dict(d, 'table', TableUpdateTriggerConfiguration),
|
|
4850
4877
|
table_update=_from_dict(d, 'table_update', TableUpdateTriggerConfiguration))
|
|
4851
4878
|
|
|
@@ -4993,6 +5020,14 @@ class WebhookNotifications:
|
|
|
4993
5020
|
"""An optional list of system notification IDs to call when the run starts. A maximum of 3
|
|
4994
5021
|
destinations can be specified for the `on_start` property."""
|
|
4995
5022
|
|
|
5023
|
+
on_streaming_backlog_exceeded: Optional[List[Webhook]] = None
|
|
5024
|
+
"""An optional list of system notification IDs to call when any streaming backlog thresholds are
|
|
5025
|
+
exceeded for any stream. Streaming backlog thresholds can be set in the `health` field using the
|
|
5026
|
+
following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`,
|
|
5027
|
+
`STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`. Alerting is based on the 10-minute
|
|
5028
|
+
average of these metrics. If the issue persists, notifications are resent every 30 minutes. A
|
|
5029
|
+
maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property."""
|
|
5030
|
+
|
|
4996
5031
|
on_success: Optional[List[Webhook]] = None
|
|
4997
5032
|
"""An optional list of system notification IDs to call when the run completes successfully. A
|
|
4998
5033
|
maximum of 3 destinations can be specified for the `on_success` property."""
|
|
@@ -5006,6 +5041,8 @@ class WebhookNotifications:
|
|
|
5006
5041
|
]
|
|
5007
5042
|
if self.on_failure: body['on_failure'] = [v.as_dict() for v in self.on_failure]
|
|
5008
5043
|
if self.on_start: body['on_start'] = [v.as_dict() for v in self.on_start]
|
|
5044
|
+
if self.on_streaming_backlog_exceeded:
|
|
5045
|
+
body['on_streaming_backlog_exceeded'] = [v.as_dict() for v in self.on_streaming_backlog_exceeded]
|
|
5009
5046
|
if self.on_success: body['on_success'] = [v.as_dict() for v in self.on_success]
|
|
5010
5047
|
return body
|
|
5011
5048
|
|
|
@@ -5016,6 +5053,7 @@ class WebhookNotifications:
|
|
|
5016
5053
|
d, 'on_duration_warning_threshold_exceeded', Webhook),
|
|
5017
5054
|
on_failure=_repeated_dict(d, 'on_failure', Webhook),
|
|
5018
5055
|
on_start=_repeated_dict(d, 'on_start', Webhook),
|
|
5056
|
+
on_streaming_backlog_exceeded=_repeated_dict(d, 'on_streaming_backlog_exceeded', Webhook),
|
|
5019
5057
|
on_success=_repeated_dict(d, 'on_success', Webhook))
|
|
5020
5058
|
|
|
5021
5059
|
|
|
@@ -5586,8 +5624,6 @@ class JobsAPI:
|
|
|
5586
5624
|
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
|
|
5587
5625
|
:param pipeline_params: :class:`PipelineParams` (optional)
|
|
5588
5626
|
:param python_named_params: Dict[str,str] (optional)
|
|
5589
|
-
A map from keys to values for jobs with Python wheel task, for example `"python_named_params":
|
|
5590
|
-
{"name": "task", "data": "dbfs:/path/to/data.json"}`.
|
|
5591
5627
|
:param python_params: List[str] (optional)
|
|
5592
5628
|
A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
|
|
5593
5629
|
The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it
|
|
@@ -5777,8 +5813,6 @@ class JobsAPI:
|
|
|
5777
5813
|
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
|
|
5778
5814
|
:param pipeline_params: :class:`PipelineParams` (optional)
|
|
5779
5815
|
:param python_named_params: Dict[str,str] (optional)
|
|
5780
|
-
A map from keys to values for jobs with Python wheel task, for example `"python_named_params":
|
|
5781
|
-
{"name": "task", "data": "dbfs:/path/to/data.json"}`.
|
|
5782
5816
|
:param python_params: List[str] (optional)
|
|
5783
5817
|
A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
|
|
5784
5818
|
The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it
|
|
@@ -5894,24 +5928,15 @@ class JobsAPI:
|
|
|
5894
5928
|
def submit(self,
|
|
5895
5929
|
*,
|
|
5896
5930
|
access_control_list: Optional[List[iam.AccessControlRequest]] = None,
|
|
5897
|
-
condition_task: Optional[ConditionTask] = None,
|
|
5898
|
-
dbt_task: Optional[DbtTask] = None,
|
|
5899
5931
|
email_notifications: Optional[JobEmailNotifications] = None,
|
|
5932
|
+
environments: Optional[List[JobEnvironment]] = None,
|
|
5900
5933
|
git_source: Optional[GitSource] = None,
|
|
5901
5934
|
health: Optional[JobsHealthRules] = None,
|
|
5902
5935
|
idempotency_token: Optional[str] = None,
|
|
5903
|
-
notebook_task: Optional[NotebookTask] = None,
|
|
5904
5936
|
notification_settings: Optional[JobNotificationSettings] = None,
|
|
5905
|
-
pipeline_task: Optional[PipelineTask] = None,
|
|
5906
|
-
python_wheel_task: Optional[PythonWheelTask] = None,
|
|
5907
5937
|
queue: Optional[QueueSettings] = None,
|
|
5908
5938
|
run_as: Optional[JobRunAs] = None,
|
|
5909
|
-
run_job_task: Optional[RunJobTask] = None,
|
|
5910
5939
|
run_name: Optional[str] = None,
|
|
5911
|
-
spark_jar_task: Optional[SparkJarTask] = None,
|
|
5912
|
-
spark_python_task: Optional[SparkPythonTask] = None,
|
|
5913
|
-
spark_submit_task: Optional[SparkSubmitTask] = None,
|
|
5914
|
-
sql_task: Optional[SqlTask] = None,
|
|
5915
5940
|
tasks: Optional[List[SubmitTask]] = None,
|
|
5916
5941
|
timeout_seconds: Optional[int] = None,
|
|
5917
5942
|
webhook_notifications: Optional[WebhookNotifications] = None) -> Wait[Run]:
|
|
@@ -5923,14 +5948,10 @@ class JobsAPI:
|
|
|
5923
5948
|
|
|
5924
5949
|
:param access_control_list: List[:class:`AccessControlRequest`] (optional)
|
|
5925
5950
|
List of permissions to set on the job.
|
|
5926
|
-
:param condition_task: :class:`ConditionTask` (optional)
|
|
5927
|
-
If condition_task, specifies a condition with an outcome that can be used to control the execution
|
|
5928
|
-
of other tasks. Does not require a cluster to execute and does not support retries or notifications.
|
|
5929
|
-
:param dbt_task: :class:`DbtTask` (optional)
|
|
5930
|
-
If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and the
|
|
5931
|
-
ability to use a serverless or a pro SQL warehouse.
|
|
5932
5951
|
:param email_notifications: :class:`JobEmailNotifications` (optional)
|
|
5933
5952
|
An optional set of email addresses notified when the run begins or completes.
|
|
5953
|
+
:param environments: List[:class:`JobEnvironment`] (optional)
|
|
5954
|
+
A list of task execution environment specifications that can be referenced by tasks of this run.
|
|
5934
5955
|
:param git_source: :class:`GitSource` (optional)
|
|
5935
5956
|
An optional specification for a remote Git repository containing the source code used by tasks.
|
|
5936
5957
|
Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
|
|
@@ -5955,47 +5976,16 @@ class JobsAPI:
|
|
|
5955
5976
|
For more information, see [How to ensure idempotency for jobs].
|
|
5956
5977
|
|
|
5957
5978
|
[How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html
|
|
5958
|
-
:param notebook_task: :class:`NotebookTask` (optional)
|
|
5959
|
-
If notebook_task, indicates that this task must run a notebook. This field may not be specified in
|
|
5960
|
-
conjunction with spark_jar_task.
|
|
5961
5979
|
:param notification_settings: :class:`JobNotificationSettings` (optional)
|
|
5962
5980
|
Optional notification settings that are used when sending notifications to each of the
|
|
5963
5981
|
`email_notifications` and `webhook_notifications` for this run.
|
|
5964
|
-
:param pipeline_task: :class:`PipelineTask` (optional)
|
|
5965
|
-
If pipeline_task, indicates that this task must execute a Pipeline.
|
|
5966
|
-
:param python_wheel_task: :class:`PythonWheelTask` (optional)
|
|
5967
|
-
If python_wheel_task, indicates that this job must execute a PythonWheel.
|
|
5968
5982
|
:param queue: :class:`QueueSettings` (optional)
|
|
5969
5983
|
The queue settings of the one-time run.
|
|
5970
5984
|
:param run_as: :class:`JobRunAs` (optional)
|
|
5971
5985
|
Specifies the user or service principal that the job runs as. If not specified, the job runs as the
|
|
5972
5986
|
user who submits the request.
|
|
5973
|
-
:param run_job_task: :class:`RunJobTask` (optional)
|
|
5974
|
-
If run_job_task, indicates that this task must execute another job.
|
|
5975
5987
|
:param run_name: str (optional)
|
|
5976
5988
|
An optional name for the run. The default value is `Untitled`.
|
|
5977
|
-
:param spark_jar_task: :class:`SparkJarTask` (optional)
|
|
5978
|
-
If spark_jar_task, indicates that this task must run a JAR.
|
|
5979
|
-
:param spark_python_task: :class:`SparkPythonTask` (optional)
|
|
5980
|
-
If spark_python_task, indicates that this task must run a Python file.
|
|
5981
|
-
:param spark_submit_task: :class:`SparkSubmitTask` (optional)
|
|
5982
|
-
If `spark_submit_task`, indicates that this task must be launched by the spark submit script. This
|
|
5983
|
-
task can run only on new clusters.
|
|
5984
|
-
|
|
5985
|
-
In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
|
|
5986
|
-
`--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
|
|
5987
|
-
configurations.
|
|
5988
|
-
|
|
5989
|
-
`master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you
|
|
5990
|
-
_cannot_ specify them in parameters.
|
|
5991
|
-
|
|
5992
|
-
By default, the Spark submit job uses all available memory (excluding reserved memory for Databricks
|
|
5993
|
-
services). You can set `--driver-memory`, and `--executor-memory` to a smaller value to leave some
|
|
5994
|
-
room for off-heap usage.
|
|
5995
|
-
|
|
5996
|
-
The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.
|
|
5997
|
-
:param sql_task: :class:`SqlTask` (optional)
|
|
5998
|
-
If sql_task, indicates that this job must execute a SQL task.
|
|
5999
5989
|
:param tasks: List[:class:`SubmitTask`] (optional)
|
|
6000
5990
|
:param timeout_seconds: int (optional)
|
|
6001
5991
|
An optional timeout applied to each run of this job. A value of `0` means no timeout.
|
|
@@ -6009,24 +5999,15 @@ class JobsAPI:
|
|
|
6009
5999
|
body = {}
|
|
6010
6000
|
if access_control_list is not None:
|
|
6011
6001
|
body['access_control_list'] = [v.as_dict() for v in access_control_list]
|
|
6012
|
-
if condition_task is not None: body['condition_task'] = condition_task.as_dict()
|
|
6013
|
-
if dbt_task is not None: body['dbt_task'] = dbt_task.as_dict()
|
|
6014
6002
|
if email_notifications is not None: body['email_notifications'] = email_notifications.as_dict()
|
|
6003
|
+
if environments is not None: body['environments'] = [v.as_dict() for v in environments]
|
|
6015
6004
|
if git_source is not None: body['git_source'] = git_source.as_dict()
|
|
6016
6005
|
if health is not None: body['health'] = health.as_dict()
|
|
6017
6006
|
if idempotency_token is not None: body['idempotency_token'] = idempotency_token
|
|
6018
|
-
if notebook_task is not None: body['notebook_task'] = notebook_task.as_dict()
|
|
6019
6007
|
if notification_settings is not None: body['notification_settings'] = notification_settings.as_dict()
|
|
6020
|
-
if pipeline_task is not None: body['pipeline_task'] = pipeline_task.as_dict()
|
|
6021
|
-
if python_wheel_task is not None: body['python_wheel_task'] = python_wheel_task.as_dict()
|
|
6022
6008
|
if queue is not None: body['queue'] = queue.as_dict()
|
|
6023
6009
|
if run_as is not None: body['run_as'] = run_as.as_dict()
|
|
6024
|
-
if run_job_task is not None: body['run_job_task'] = run_job_task.as_dict()
|
|
6025
6010
|
if run_name is not None: body['run_name'] = run_name
|
|
6026
|
-
if spark_jar_task is not None: body['spark_jar_task'] = spark_jar_task.as_dict()
|
|
6027
|
-
if spark_python_task is not None: body['spark_python_task'] = spark_python_task.as_dict()
|
|
6028
|
-
if spark_submit_task is not None: body['spark_submit_task'] = spark_submit_task.as_dict()
|
|
6029
|
-
if sql_task is not None: body['sql_task'] = sql_task.as_dict()
|
|
6030
6011
|
if tasks is not None: body['tasks'] = [v.as_dict() for v in tasks]
|
|
6031
6012
|
if timeout_seconds is not None: body['timeout_seconds'] = timeout_seconds
|
|
6032
6013
|
if webhook_notifications is not None: body['webhook_notifications'] = webhook_notifications.as_dict()
|
|
@@ -6041,47 +6022,29 @@ class JobsAPI:
|
|
|
6041
6022
|
self,
|
|
6042
6023
|
*,
|
|
6043
6024
|
access_control_list: Optional[List[iam.AccessControlRequest]] = None,
|
|
6044
|
-
condition_task: Optional[ConditionTask] = None,
|
|
6045
|
-
dbt_task: Optional[DbtTask] = None,
|
|
6046
6025
|
email_notifications: Optional[JobEmailNotifications] = None,
|
|
6026
|
+
environments: Optional[List[JobEnvironment]] = None,
|
|
6047
6027
|
git_source: Optional[GitSource] = None,
|
|
6048
6028
|
health: Optional[JobsHealthRules] = None,
|
|
6049
6029
|
idempotency_token: Optional[str] = None,
|
|
6050
|
-
notebook_task: Optional[NotebookTask] = None,
|
|
6051
6030
|
notification_settings: Optional[JobNotificationSettings] = None,
|
|
6052
|
-
pipeline_task: Optional[PipelineTask] = None,
|
|
6053
|
-
python_wheel_task: Optional[PythonWheelTask] = None,
|
|
6054
6031
|
queue: Optional[QueueSettings] = None,
|
|
6055
6032
|
run_as: Optional[JobRunAs] = None,
|
|
6056
|
-
run_job_task: Optional[RunJobTask] = None,
|
|
6057
6033
|
run_name: Optional[str] = None,
|
|
6058
|
-
spark_jar_task: Optional[SparkJarTask] = None,
|
|
6059
|
-
spark_python_task: Optional[SparkPythonTask] = None,
|
|
6060
|
-
spark_submit_task: Optional[SparkSubmitTask] = None,
|
|
6061
|
-
sql_task: Optional[SqlTask] = None,
|
|
6062
6034
|
tasks: Optional[List[SubmitTask]] = None,
|
|
6063
6035
|
timeout_seconds: Optional[int] = None,
|
|
6064
6036
|
webhook_notifications: Optional[WebhookNotifications] = None,
|
|
6065
6037
|
timeout=timedelta(minutes=20)) -> Run:
|
|
6066
6038
|
return self.submit(access_control_list=access_control_list,
|
|
6067
|
-
condition_task=condition_task,
|
|
6068
|
-
dbt_task=dbt_task,
|
|
6069
6039
|
email_notifications=email_notifications,
|
|
6040
|
+
environments=environments,
|
|
6070
6041
|
git_source=git_source,
|
|
6071
6042
|
health=health,
|
|
6072
6043
|
idempotency_token=idempotency_token,
|
|
6073
|
-
notebook_task=notebook_task,
|
|
6074
6044
|
notification_settings=notification_settings,
|
|
6075
|
-
pipeline_task=pipeline_task,
|
|
6076
|
-
python_wheel_task=python_wheel_task,
|
|
6077
6045
|
queue=queue,
|
|
6078
6046
|
run_as=run_as,
|
|
6079
|
-
run_job_task=run_job_task,
|
|
6080
6047
|
run_name=run_name,
|
|
6081
|
-
spark_jar_task=spark_jar_task,
|
|
6082
|
-
spark_python_task=spark_python_task,
|
|
6083
|
-
spark_submit_task=spark_submit_task,
|
|
6084
|
-
sql_task=sql_task,
|
|
6085
6048
|
tasks=tasks,
|
|
6086
6049
|
timeout_seconds=timeout_seconds,
|
|
6087
6050
|
webhook_notifications=webhook_notifications).result(timeout=timeout)
|