databricks-sdk 0.23.0__py3-none-any.whl → 0.25.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +79 -0
- databricks/sdk/core.py +1 -1
- databricks/sdk/errors/base.py +45 -1
- databricks/sdk/errors/mapper.py +9 -1
- databricks/sdk/errors/overrides.py +25 -0
- databricks/sdk/service/_internal.py +5 -0
- databricks/sdk/service/catalog.py +367 -244
- databricks/sdk/service/compute.py +78 -23
- databricks/sdk/service/dashboards.py +83 -0
- databricks/sdk/service/files.py +25 -12
- databricks/sdk/service/iam.py +19 -24
- databricks/sdk/service/jobs.py +659 -364
- databricks/sdk/service/marketplace.py +3571 -0
- databricks/sdk/service/serving.py +53 -53
- databricks/sdk/service/workspace.py +50 -10
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.23.0.dist-info → databricks_sdk-0.25.0.dist-info}/METADATA +19 -1
- {databricks_sdk-0.23.0.dist-info → databricks_sdk-0.25.0.dist-info}/RECORD +22 -20
- {databricks_sdk-0.23.0.dist-info → databricks_sdk-0.25.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.23.0.dist-info → databricks_sdk-0.25.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.23.0.dist-info → databricks_sdk-0.25.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.23.0.dist-info → databricks_sdk-0.25.0.dist-info}/top_level.txt +0 -0
databricks/sdk/service/jobs.py
CHANGED
|
@@ -80,6 +80,9 @@ class BaseRun:
|
|
|
80
80
|
"""The creator user name. This field won’t be included in the response if the user has already
|
|
81
81
|
been deleted."""
|
|
82
82
|
|
|
83
|
+
description: Optional[str] = None
|
|
84
|
+
"""Description of the run"""
|
|
85
|
+
|
|
83
86
|
end_time: Optional[int] = None
|
|
84
87
|
"""The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This
|
|
85
88
|
field is set to 0 if the job is still running."""
|
|
@@ -122,6 +125,12 @@ class BaseRun:
|
|
|
122
125
|
overriding_parameters: Optional[RunParameters] = None
|
|
123
126
|
"""The parameters used for this run."""
|
|
124
127
|
|
|
128
|
+
queue_duration: Optional[int] = None
|
|
129
|
+
"""The time in milliseconds that the run has spent in the queue."""
|
|
130
|
+
|
|
131
|
+
repair_history: Optional[List[RepairHistoryItem]] = None
|
|
132
|
+
"""The repair history of the run."""
|
|
133
|
+
|
|
125
134
|
run_duration: Optional[int] = None
|
|
126
135
|
"""The time in milliseconds it took the job run and all of its repairs to finish."""
|
|
127
136
|
|
|
@@ -135,9 +144,9 @@ class BaseRun:
|
|
|
135
144
|
"""The URL to the detail page of the run."""
|
|
136
145
|
|
|
137
146
|
run_type: Optional[RunType] = None
|
|
138
|
-
"""* `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. *
|
|
139
|
-
run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit
|
|
140
|
-
:method:jobs/submit.
|
|
147
|
+
"""The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. *
|
|
148
|
+
`WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit
|
|
149
|
+
run. A run created with :method:jobs/submit.
|
|
141
150
|
|
|
142
151
|
[dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow"""
|
|
143
152
|
|
|
@@ -175,6 +184,7 @@ class BaseRun:
|
|
|
175
184
|
triggered by a table update."""
|
|
176
185
|
|
|
177
186
|
trigger_info: Optional[TriggerInfo] = None
|
|
187
|
+
"""Additional details about what triggered the run"""
|
|
178
188
|
|
|
179
189
|
def as_dict(self) -> dict:
|
|
180
190
|
"""Serializes the BaseRun into a dictionary suitable for use as a JSON request body."""
|
|
@@ -184,6 +194,7 @@ class BaseRun:
|
|
|
184
194
|
if self.cluster_instance: body['cluster_instance'] = self.cluster_instance.as_dict()
|
|
185
195
|
if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict()
|
|
186
196
|
if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
|
|
197
|
+
if self.description is not None: body['description'] = self.description
|
|
187
198
|
if self.end_time is not None: body['end_time'] = self.end_time
|
|
188
199
|
if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
|
|
189
200
|
if self.git_source: body['git_source'] = self.git_source.as_dict()
|
|
@@ -194,6 +205,8 @@ class BaseRun:
|
|
|
194
205
|
if self.original_attempt_run_id is not None:
|
|
195
206
|
body['original_attempt_run_id'] = self.original_attempt_run_id
|
|
196
207
|
if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict()
|
|
208
|
+
if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
|
|
209
|
+
if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history]
|
|
197
210
|
if self.run_duration is not None: body['run_duration'] = self.run_duration
|
|
198
211
|
if self.run_id is not None: body['run_id'] = self.run_id
|
|
199
212
|
if self.run_name is not None: body['run_name'] = self.run_name
|
|
@@ -216,6 +229,7 @@ class BaseRun:
|
|
|
216
229
|
cluster_instance=_from_dict(d, 'cluster_instance', ClusterInstance),
|
|
217
230
|
cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec),
|
|
218
231
|
creator_user_name=d.get('creator_user_name', None),
|
|
232
|
+
description=d.get('description', None),
|
|
219
233
|
end_time=d.get('end_time', None),
|
|
220
234
|
execution_duration=d.get('execution_duration', None),
|
|
221
235
|
git_source=_from_dict(d, 'git_source', GitSource),
|
|
@@ -225,6 +239,8 @@ class BaseRun:
|
|
|
225
239
|
number_in_job=d.get('number_in_job', None),
|
|
226
240
|
original_attempt_run_id=d.get('original_attempt_run_id', None),
|
|
227
241
|
overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters),
|
|
242
|
+
queue_duration=d.get('queue_duration', None),
|
|
243
|
+
repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem),
|
|
228
244
|
run_duration=d.get('run_duration', None),
|
|
229
245
|
run_id=d.get('run_id', None),
|
|
230
246
|
run_name=d.get('run_name', None),
|
|
@@ -341,21 +357,26 @@ class ClusterInstance:
|
|
|
341
357
|
@dataclass
|
|
342
358
|
class ClusterSpec:
|
|
343
359
|
existing_cluster_id: Optional[str] = None
|
|
344
|
-
"""If existing_cluster_id, the ID of an existing cluster that is used for all runs
|
|
345
|
-
|
|
346
|
-
|
|
360
|
+
"""If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running
|
|
361
|
+
jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops
|
|
362
|
+
responding. We suggest running jobs and tasks on new clusters for greater reliability"""
|
|
363
|
+
|
|
364
|
+
job_cluster_key: Optional[str] = None
|
|
365
|
+
"""If job_cluster_key, this task is executed reusing the cluster specified in
|
|
366
|
+
`job.settings.job_clusters`."""
|
|
347
367
|
|
|
348
368
|
libraries: Optional[List[compute.Library]] = None
|
|
349
|
-
"""An optional list of libraries to be installed on the cluster
|
|
350
|
-
|
|
369
|
+
"""An optional list of libraries to be installed on the cluster. The default value is an empty
|
|
370
|
+
list."""
|
|
351
371
|
|
|
352
372
|
new_cluster: Optional[compute.ClusterSpec] = None
|
|
353
|
-
"""If new_cluster, a description of a cluster that is created for each run."""
|
|
373
|
+
"""If new_cluster, a description of a new cluster that is created for each run."""
|
|
354
374
|
|
|
355
375
|
def as_dict(self) -> dict:
|
|
356
376
|
"""Serializes the ClusterSpec into a dictionary suitable for use as a JSON request body."""
|
|
357
377
|
body = {}
|
|
358
378
|
if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
|
|
379
|
+
if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
|
|
359
380
|
if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
|
|
360
381
|
if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict()
|
|
361
382
|
return body
|
|
@@ -364,6 +385,7 @@ class ClusterSpec:
|
|
|
364
385
|
def from_dict(cls, d: Dict[str, any]) -> ClusterSpec:
|
|
365
386
|
"""Deserializes the ClusterSpec from a dictionary."""
|
|
366
387
|
return cls(existing_cluster_id=d.get('existing_cluster_id', None),
|
|
388
|
+
job_cluster_key=d.get('job_cluster_key', None),
|
|
367
389
|
libraries=_repeated_dict(d, 'libraries', compute.Library),
|
|
368
390
|
new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec))
|
|
369
391
|
|
|
@@ -376,11 +398,7 @@ class Condition(Enum):
|
|
|
376
398
|
|
|
377
399
|
@dataclass
|
|
378
400
|
class ConditionTask:
|
|
379
|
-
|
|
380
|
-
"""The left operand of the condition task. Can be either a string value or a job state or parameter
|
|
381
|
-
reference."""
|
|
382
|
-
|
|
383
|
-
op: Optional[ConditionTaskOp] = None
|
|
401
|
+
op: ConditionTaskOp
|
|
384
402
|
"""* `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that
|
|
385
403
|
`“12.0” == “12”` will evaluate to `false`. * `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`,
|
|
386
404
|
`LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands.
|
|
@@ -391,7 +409,11 @@ class ConditionTask:
|
|
|
391
409
|
If a task value was set to a boolean value, it will be serialized to `“true”` or
|
|
392
410
|
`“false”` for the comparison."""
|
|
393
411
|
|
|
394
|
-
|
|
412
|
+
left: str
|
|
413
|
+
"""The left operand of the condition task. Can be either a string value or a job state or parameter
|
|
414
|
+
reference."""
|
|
415
|
+
|
|
416
|
+
right: str
|
|
395
417
|
"""The right operand of the condition task. Can be either a string value or a job state or
|
|
396
418
|
parameter reference."""
|
|
397
419
|
|
|
@@ -450,9 +472,6 @@ class CreateJob:
|
|
|
450
472
|
access_control_list: Optional[List[iam.AccessControlRequest]] = None
|
|
451
473
|
"""List of permissions to set on the job."""
|
|
452
474
|
|
|
453
|
-
compute: Optional[List[JobCompute]] = None
|
|
454
|
-
"""A list of compute requirements that can be referenced by tasks of this job."""
|
|
455
|
-
|
|
456
475
|
continuous: Optional[Continuous] = None
|
|
457
476
|
"""An optional continuous property for this job. The continuous property will ensure that there is
|
|
458
477
|
always one run executing. Only one of `schedule` and `continuous` can be used."""
|
|
@@ -463,7 +482,7 @@ class CreateJob:
|
|
|
463
482
|
description: Optional[str] = None
|
|
464
483
|
"""An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding."""
|
|
465
484
|
|
|
466
|
-
edit_mode: Optional[
|
|
485
|
+
edit_mode: Optional[JobEditMode] = None
|
|
467
486
|
"""Edit mode of the job.
|
|
468
487
|
|
|
469
488
|
* `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is
|
|
@@ -473,6 +492,9 @@ class CreateJob:
|
|
|
473
492
|
"""An optional set of email addresses that is notified when runs of this job begin or complete as
|
|
474
493
|
well as when this job is deleted."""
|
|
475
494
|
|
|
495
|
+
environments: Optional[List[JobEnvironment]] = None
|
|
496
|
+
"""A list of task execution environment specifications that can be referenced by tasks of this job."""
|
|
497
|
+
|
|
476
498
|
format: Optional[Format] = None
|
|
477
499
|
"""Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls.
|
|
478
500
|
When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`."""
|
|
@@ -496,18 +518,14 @@ class CreateJob:
|
|
|
496
518
|
task settings."""
|
|
497
519
|
|
|
498
520
|
max_concurrent_runs: Optional[int] = None
|
|
499
|
-
"""An optional maximum allowed number of concurrent runs of the job.
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
are 4 concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active
|
|
508
|
-
runs. However, from then on, new runs are skipped unless there are fewer than 3 active runs.
|
|
509
|
-
|
|
510
|
-
This value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped."""
|
|
521
|
+
"""An optional maximum allowed number of concurrent runs of the job. Set this value if you want to
|
|
522
|
+
be able to execute multiple runs of the same job concurrently. This is useful for example if you
|
|
523
|
+
trigger your job on a frequent schedule and want to allow consecutive runs to overlap with each
|
|
524
|
+
other, or if you want to trigger multiple runs which differ by their input parameters. This
|
|
525
|
+
setting affects only new runs. For example, suppose the job’s concurrency is 4 and there are 4
|
|
526
|
+
concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active runs.
|
|
527
|
+
However, from then on, new runs are skipped unless there are fewer than 3 active runs. This
|
|
528
|
+
value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped."""
|
|
511
529
|
|
|
512
530
|
name: Optional[str] = None
|
|
513
531
|
"""An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding."""
|
|
@@ -558,12 +576,12 @@ class CreateJob:
|
|
|
558
576
|
body = {}
|
|
559
577
|
if self.access_control_list:
|
|
560
578
|
body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
|
|
561
|
-
if self.compute: body['compute'] = [v.as_dict() for v in self.compute]
|
|
562
579
|
if self.continuous: body['continuous'] = self.continuous.as_dict()
|
|
563
580
|
if self.deployment: body['deployment'] = self.deployment.as_dict()
|
|
564
581
|
if self.description is not None: body['description'] = self.description
|
|
565
582
|
if self.edit_mode is not None: body['edit_mode'] = self.edit_mode.value
|
|
566
583
|
if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
|
|
584
|
+
if self.environments: body['environments'] = [v.as_dict() for v in self.environments]
|
|
567
585
|
if self.format is not None: body['format'] = self.format.value
|
|
568
586
|
if self.git_source: body['git_source'] = self.git_source.as_dict()
|
|
569
587
|
if self.health: body['health'] = self.health.as_dict()
|
|
@@ -586,12 +604,12 @@ class CreateJob:
|
|
|
586
604
|
def from_dict(cls, d: Dict[str, any]) -> CreateJob:
|
|
587
605
|
"""Deserializes the CreateJob from a dictionary."""
|
|
588
606
|
return cls(access_control_list=_repeated_dict(d, 'access_control_list', iam.AccessControlRequest),
|
|
589
|
-
compute=_repeated_dict(d, 'compute', JobCompute),
|
|
590
607
|
continuous=_from_dict(d, 'continuous', Continuous),
|
|
591
608
|
deployment=_from_dict(d, 'deployment', JobDeployment),
|
|
592
609
|
description=d.get('description', None),
|
|
593
|
-
edit_mode=_enum(d, 'edit_mode',
|
|
610
|
+
edit_mode=_enum(d, 'edit_mode', JobEditMode),
|
|
594
611
|
email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
|
|
612
|
+
environments=_repeated_dict(d, 'environments', JobEnvironment),
|
|
595
613
|
format=_enum(d, 'format', Format),
|
|
596
614
|
git_source=_from_dict(d, 'git_source', GitSource),
|
|
597
615
|
health=_from_dict(d, 'health', JobsHealthRules),
|
|
@@ -610,18 +628,10 @@ class CreateJob:
|
|
|
610
628
|
webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications))
|
|
611
629
|
|
|
612
630
|
|
|
613
|
-
class CreateJobEditMode(Enum):
|
|
614
|
-
"""Edit mode of the job.
|
|
615
|
-
|
|
616
|
-
* `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is
|
|
617
|
-
in an editable state and can be modified."""
|
|
618
|
-
|
|
619
|
-
EDITABLE = 'EDITABLE'
|
|
620
|
-
UI_LOCKED = 'UI_LOCKED'
|
|
621
|
-
|
|
622
|
-
|
|
623
631
|
@dataclass
|
|
624
632
|
class CreateResponse:
|
|
633
|
+
"""Job was created successfully"""
|
|
634
|
+
|
|
625
635
|
job_id: Optional[int] = None
|
|
626
636
|
"""The canonical identifier for the newly created job."""
|
|
627
637
|
|
|
@@ -641,7 +651,7 @@ class CreateResponse:
|
|
|
641
651
|
class CronSchedule:
|
|
642
652
|
quartz_cron_expression: str
|
|
643
653
|
"""A Cron expression using Quartz syntax that describes the schedule for a job. See [Cron Trigger]
|
|
644
|
-
for details. This field is required.
|
|
654
|
+
for details. This field is required.
|
|
645
655
|
|
|
646
656
|
[Cron Trigger]: http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html"""
|
|
647
657
|
|
|
@@ -719,12 +729,12 @@ class DbtTask:
|
|
|
719
729
|
|
|
720
730
|
source: Optional[Source] = None
|
|
721
731
|
"""Optional location type of the project directory. When set to `WORKSPACE`, the project will be
|
|
722
|
-
retrieved from the local
|
|
723
|
-
|
|
724
|
-
|
|
732
|
+
retrieved from the local Databricks workspace. When set to `GIT`, the project will be retrieved
|
|
733
|
+
from a Git repository defined in `git_source`. If the value is empty, the task will use `GIT` if
|
|
734
|
+
`git_source` is defined and `WORKSPACE` otherwise.
|
|
725
735
|
|
|
726
|
-
* `WORKSPACE`: Project is located in
|
|
727
|
-
|
|
736
|
+
* `WORKSPACE`: Project is located in Databricks workspace. * `GIT`: Project is located in cloud
|
|
737
|
+
Git provider."""
|
|
728
738
|
|
|
729
739
|
warehouse_id: Optional[str] = None
|
|
730
740
|
"""ID of the SQL warehouse to connect to. If provided, we automatically generate and provide the
|
|
@@ -789,7 +799,7 @@ class DeleteResponse:
|
|
|
789
799
|
@dataclass
|
|
790
800
|
class DeleteRun:
|
|
791
801
|
run_id: int
|
|
792
|
-
"""
|
|
802
|
+
"""ID of the run to delete."""
|
|
793
803
|
|
|
794
804
|
def as_dict(self) -> dict:
|
|
795
805
|
"""Serializes the DeleteRun into a dictionary suitable for use as a JSON request body."""
|
|
@@ -819,6 +829,8 @@ class DeleteRunResponse:
|
|
|
819
829
|
|
|
820
830
|
@dataclass
|
|
821
831
|
class ExportRunOutput:
|
|
832
|
+
"""Run was exported successfully."""
|
|
833
|
+
|
|
822
834
|
views: Optional[List[ViewItem]] = None
|
|
823
835
|
"""The exported content in HTML format (one for every view item). To extract the HTML notebook from
|
|
824
836
|
the JSON response, download and run this [Python script].
|
|
@@ -839,14 +851,14 @@ class ExportRunOutput:
|
|
|
839
851
|
|
|
840
852
|
@dataclass
|
|
841
853
|
class FileArrivalTriggerConfiguration:
|
|
854
|
+
url: str
|
|
855
|
+
"""URL to be monitored for file arrivals. The path must point to the root or a subpath of the
|
|
856
|
+
external location."""
|
|
857
|
+
|
|
842
858
|
min_time_between_triggers_seconds: Optional[int] = None
|
|
843
859
|
"""If set, the trigger starts a run only after the specified amount of time passed since the last
|
|
844
860
|
time the trigger fired. The minimum allowed value is 60 seconds"""
|
|
845
861
|
|
|
846
|
-
url: Optional[str] = None
|
|
847
|
-
"""The storage location to monitor for file arrivals. The value must point to the root or a subpath
|
|
848
|
-
of an external location URL or the root or subpath of a Unity Catalog volume."""
|
|
849
|
-
|
|
850
862
|
wait_after_last_change_seconds: Optional[int] = None
|
|
851
863
|
"""If set, the trigger starts a run only after no file activity has occurred for the specified
|
|
852
864
|
amount of time. This makes it possible to wait for a batch of incoming files to arrive before
|
|
@@ -872,7 +884,7 @@ class FileArrivalTriggerConfiguration:
|
|
|
872
884
|
|
|
873
885
|
@dataclass
|
|
874
886
|
class ForEachStats:
|
|
875
|
-
error_message_stats: Optional[ForEachTaskErrorMessageStats] = None
|
|
887
|
+
error_message_stats: Optional[List[ForEachTaskErrorMessageStats]] = None
|
|
876
888
|
"""Sample of 3 most common error messages occurred during the iteration."""
|
|
877
889
|
|
|
878
890
|
task_run_stats: Optional[ForEachTaskTaskRunStats] = None
|
|
@@ -881,14 +893,15 @@ class ForEachStats:
|
|
|
881
893
|
def as_dict(self) -> dict:
|
|
882
894
|
"""Serializes the ForEachStats into a dictionary suitable for use as a JSON request body."""
|
|
883
895
|
body = {}
|
|
884
|
-
if self.error_message_stats:
|
|
896
|
+
if self.error_message_stats:
|
|
897
|
+
body['error_message_stats'] = [v.as_dict() for v in self.error_message_stats]
|
|
885
898
|
if self.task_run_stats: body['task_run_stats'] = self.task_run_stats.as_dict()
|
|
886
899
|
return body
|
|
887
900
|
|
|
888
901
|
@classmethod
|
|
889
902
|
def from_dict(cls, d: Dict[str, any]) -> ForEachStats:
|
|
890
903
|
"""Deserializes the ForEachStats from a dictionary."""
|
|
891
|
-
return cls(error_message_stats=
|
|
904
|
+
return cls(error_message_stats=_repeated_dict(d, 'error_message_stats', ForEachTaskErrorMessageStats),
|
|
892
905
|
task_run_stats=_from_dict(d, 'task_run_stats', ForEachTaskTaskRunStats))
|
|
893
906
|
|
|
894
907
|
|
|
@@ -898,6 +911,7 @@ class ForEachTask:
|
|
|
898
911
|
"""Array for task to iterate on. This can be a JSON string or a reference to an array parameter."""
|
|
899
912
|
|
|
900
913
|
task: Task
|
|
914
|
+
"""Configuration for the task that will be run for each element in the array"""
|
|
901
915
|
|
|
902
916
|
concurrency: Optional[int] = None
|
|
903
917
|
"""Controls the number of active iterations task runs. Default is 20, maximum allowed is 100."""
|
|
@@ -920,7 +934,7 @@ class ForEachTask:
|
|
|
920
934
|
|
|
921
935
|
@dataclass
|
|
922
936
|
class ForEachTaskErrorMessageStats:
|
|
923
|
-
count: Optional[
|
|
937
|
+
count: Optional[int] = None
|
|
924
938
|
"""Describes the count of such error message encountered during the iterations."""
|
|
925
939
|
|
|
926
940
|
error_message: Optional[str] = None
|
|
@@ -1100,6 +1114,8 @@ class GitSource:
|
|
|
1100
1114
|
|
|
1101
1115
|
@dataclass
|
|
1102
1116
|
class Job:
|
|
1117
|
+
"""Job was retrieved successfully."""
|
|
1118
|
+
|
|
1103
1119
|
created_time: Optional[int] = None
|
|
1104
1120
|
"""The time at which this job was created in epoch milliseconds (milliseconds since 1/1/1970 UTC)."""
|
|
1105
1121
|
|
|
@@ -1221,7 +1237,7 @@ class JobCluster:
|
|
|
1221
1237
|
`JobTaskSettings` may refer to this field to determine which cluster to launch for the task
|
|
1222
1238
|
execution."""
|
|
1223
1239
|
|
|
1224
|
-
new_cluster:
|
|
1240
|
+
new_cluster: compute.ClusterSpec
|
|
1225
1241
|
"""If new_cluster, a description of a cluster that is created for each task."""
|
|
1226
1242
|
|
|
1227
1243
|
def as_dict(self) -> dict:
|
|
@@ -1238,28 +1254,6 @@ class JobCluster:
|
|
|
1238
1254
|
new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec))
|
|
1239
1255
|
|
|
1240
1256
|
|
|
1241
|
-
@dataclass
|
|
1242
|
-
class JobCompute:
|
|
1243
|
-
compute_key: str
|
|
1244
|
-
"""A unique name for the compute requirement. This field is required and must be unique within the
|
|
1245
|
-
job. `JobTaskSettings` may refer to this field to determine the compute requirements for the
|
|
1246
|
-
task execution."""
|
|
1247
|
-
|
|
1248
|
-
spec: compute.ComputeSpec
|
|
1249
|
-
|
|
1250
|
-
def as_dict(self) -> dict:
|
|
1251
|
-
"""Serializes the JobCompute into a dictionary suitable for use as a JSON request body."""
|
|
1252
|
-
body = {}
|
|
1253
|
-
if self.compute_key is not None: body['compute_key'] = self.compute_key
|
|
1254
|
-
if self.spec: body['spec'] = self.spec.as_dict()
|
|
1255
|
-
return body
|
|
1256
|
-
|
|
1257
|
-
@classmethod
|
|
1258
|
-
def from_dict(cls, d: Dict[str, any]) -> JobCompute:
|
|
1259
|
-
"""Deserializes the JobCompute from a dictionary."""
|
|
1260
|
-
return cls(compute_key=d.get('compute_key', None), spec=_from_dict(d, 'spec', compute.ComputeSpec))
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
1257
|
@dataclass
|
|
1264
1258
|
class JobDeployment:
|
|
1265
1259
|
kind: JobDeploymentKind
|
|
@@ -1285,13 +1279,21 @@ class JobDeployment:
|
|
|
1285
1279
|
|
|
1286
1280
|
|
|
1287
1281
|
class JobDeploymentKind(Enum):
|
|
1288
|
-
"""The
|
|
1289
|
-
|
|
1290
|
-
* `BUNDLE`: The job is managed by Databricks Asset Bundle."""
|
|
1282
|
+
"""* `BUNDLE`: The job is managed by Databricks Asset Bundle."""
|
|
1291
1283
|
|
|
1292
1284
|
BUNDLE = 'BUNDLE'
|
|
1293
1285
|
|
|
1294
1286
|
|
|
1287
|
+
class JobEditMode(Enum):
|
|
1288
|
+
"""Edit mode of the job.
|
|
1289
|
+
|
|
1290
|
+
* `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is
|
|
1291
|
+
in an editable state and can be modified."""
|
|
1292
|
+
|
|
1293
|
+
EDITABLE = 'EDITABLE'
|
|
1294
|
+
UI_LOCKED = 'UI_LOCKED'
|
|
1295
|
+
|
|
1296
|
+
|
|
1295
1297
|
@dataclass
|
|
1296
1298
|
class JobEmailNotifications:
|
|
1297
1299
|
no_alert_for_skipped_runs: Optional[bool] = None
|
|
@@ -1344,6 +1346,30 @@ class JobEmailNotifications:
|
|
|
1344
1346
|
on_success=d.get('on_success', None))
|
|
1345
1347
|
|
|
1346
1348
|
|
|
1349
|
+
@dataclass
|
|
1350
|
+
class JobEnvironment:
|
|
1351
|
+
environment_key: str
|
|
1352
|
+
"""The key of an environment. It has to be unique within a job."""
|
|
1353
|
+
|
|
1354
|
+
spec: Optional[compute.Environment] = None
|
|
1355
|
+
"""The a environment entity used to preserve serverless environment side panel and jobs'
|
|
1356
|
+
environment for non-notebook task. In this minimal environment spec, only pip dependencies are
|
|
1357
|
+
supported. Next ID: 5"""
|
|
1358
|
+
|
|
1359
|
+
def as_dict(self) -> dict:
|
|
1360
|
+
"""Serializes the JobEnvironment into a dictionary suitable for use as a JSON request body."""
|
|
1361
|
+
body = {}
|
|
1362
|
+
if self.environment_key is not None: body['environment_key'] = self.environment_key
|
|
1363
|
+
if self.spec: body['spec'] = self.spec.as_dict()
|
|
1364
|
+
return body
|
|
1365
|
+
|
|
1366
|
+
@classmethod
|
|
1367
|
+
def from_dict(cls, d: Dict[str, any]) -> JobEnvironment:
|
|
1368
|
+
"""Deserializes the JobEnvironment from a dictionary."""
|
|
1369
|
+
return cls(environment_key=d.get('environment_key', None),
|
|
1370
|
+
spec=_from_dict(d, 'spec', compute.Environment))
|
|
1371
|
+
|
|
1372
|
+
|
|
1347
1373
|
@dataclass
|
|
1348
1374
|
class JobNotificationSettings:
|
|
1349
1375
|
no_alert_for_canceled_runs: Optional[bool] = None
|
|
@@ -1552,9 +1578,6 @@ class JobRunAs:
|
|
|
1552
1578
|
|
|
1553
1579
|
@dataclass
|
|
1554
1580
|
class JobSettings:
|
|
1555
|
-
compute: Optional[List[JobCompute]] = None
|
|
1556
|
-
"""A list of compute requirements that can be referenced by tasks of this job."""
|
|
1557
|
-
|
|
1558
1581
|
continuous: Optional[Continuous] = None
|
|
1559
1582
|
"""An optional continuous property for this job. The continuous property will ensure that there is
|
|
1560
1583
|
always one run executing. Only one of `schedule` and `continuous` can be used."""
|
|
@@ -1565,7 +1588,7 @@ class JobSettings:
|
|
|
1565
1588
|
description: Optional[str] = None
|
|
1566
1589
|
"""An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding."""
|
|
1567
1590
|
|
|
1568
|
-
edit_mode: Optional[
|
|
1591
|
+
edit_mode: Optional[JobEditMode] = None
|
|
1569
1592
|
"""Edit mode of the job.
|
|
1570
1593
|
|
|
1571
1594
|
* `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is
|
|
@@ -1575,6 +1598,9 @@ class JobSettings:
|
|
|
1575
1598
|
"""An optional set of email addresses that is notified when runs of this job begin or complete as
|
|
1576
1599
|
well as when this job is deleted."""
|
|
1577
1600
|
|
|
1601
|
+
environments: Optional[List[JobEnvironment]] = None
|
|
1602
|
+
"""A list of task execution environment specifications that can be referenced by tasks of this job."""
|
|
1603
|
+
|
|
1578
1604
|
format: Optional[Format] = None
|
|
1579
1605
|
"""Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls.
|
|
1580
1606
|
When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`."""
|
|
@@ -1598,18 +1624,14 @@ class JobSettings:
|
|
|
1598
1624
|
task settings."""
|
|
1599
1625
|
|
|
1600
1626
|
max_concurrent_runs: Optional[int] = None
|
|
1601
|
-
"""An optional maximum allowed number of concurrent runs of the job.
|
|
1602
|
-
|
|
1603
|
-
|
|
1604
|
-
|
|
1605
|
-
|
|
1606
|
-
|
|
1607
|
-
|
|
1608
|
-
|
|
1609
|
-
are 4 concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active
|
|
1610
|
-
runs. However, from then on, new runs are skipped unless there are fewer than 3 active runs.
|
|
1611
|
-
|
|
1612
|
-
This value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped."""
|
|
1627
|
+
"""An optional maximum allowed number of concurrent runs of the job. Set this value if you want to
|
|
1628
|
+
be able to execute multiple runs of the same job concurrently. This is useful for example if you
|
|
1629
|
+
trigger your job on a frequent schedule and want to allow consecutive runs to overlap with each
|
|
1630
|
+
other, or if you want to trigger multiple runs which differ by their input parameters. This
|
|
1631
|
+
setting affects only new runs. For example, suppose the job’s concurrency is 4 and there are 4
|
|
1632
|
+
concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active runs.
|
|
1633
|
+
However, from then on, new runs are skipped unless there are fewer than 3 active runs. This
|
|
1634
|
+
value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped."""
|
|
1613
1635
|
|
|
1614
1636
|
name: Optional[str] = None
|
|
1615
1637
|
"""An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding."""
|
|
@@ -1658,12 +1680,12 @@ class JobSettings:
|
|
|
1658
1680
|
def as_dict(self) -> dict:
|
|
1659
1681
|
"""Serializes the JobSettings into a dictionary suitable for use as a JSON request body."""
|
|
1660
1682
|
body = {}
|
|
1661
|
-
if self.compute: body['compute'] = [v.as_dict() for v in self.compute]
|
|
1662
1683
|
if self.continuous: body['continuous'] = self.continuous.as_dict()
|
|
1663
1684
|
if self.deployment: body['deployment'] = self.deployment.as_dict()
|
|
1664
1685
|
if self.description is not None: body['description'] = self.description
|
|
1665
1686
|
if self.edit_mode is not None: body['edit_mode'] = self.edit_mode.value
|
|
1666
1687
|
if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
|
|
1688
|
+
if self.environments: body['environments'] = [v.as_dict() for v in self.environments]
|
|
1667
1689
|
if self.format is not None: body['format'] = self.format.value
|
|
1668
1690
|
if self.git_source: body['git_source'] = self.git_source.as_dict()
|
|
1669
1691
|
if self.health: body['health'] = self.health.as_dict()
|
|
@@ -1685,12 +1707,12 @@ class JobSettings:
|
|
|
1685
1707
|
@classmethod
|
|
1686
1708
|
def from_dict(cls, d: Dict[str, any]) -> JobSettings:
|
|
1687
1709
|
"""Deserializes the JobSettings from a dictionary."""
|
|
1688
|
-
return cls(
|
|
1689
|
-
continuous=_from_dict(d, 'continuous', Continuous),
|
|
1710
|
+
return cls(continuous=_from_dict(d, 'continuous', Continuous),
|
|
1690
1711
|
deployment=_from_dict(d, 'deployment', JobDeployment),
|
|
1691
1712
|
description=d.get('description', None),
|
|
1692
|
-
edit_mode=_enum(d, 'edit_mode',
|
|
1713
|
+
edit_mode=_enum(d, 'edit_mode', JobEditMode),
|
|
1693
1714
|
email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
|
|
1715
|
+
environments=_repeated_dict(d, 'environments', JobEnvironment),
|
|
1694
1716
|
format=_enum(d, 'format', Format),
|
|
1695
1717
|
git_source=_from_dict(d, 'git_source', GitSource),
|
|
1696
1718
|
health=_from_dict(d, 'health', JobsHealthRules),
|
|
@@ -1709,16 +1731,6 @@ class JobSettings:
|
|
|
1709
1731
|
webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications))
|
|
1710
1732
|
|
|
1711
1733
|
|
|
1712
|
-
class JobSettingsEditMode(Enum):
|
|
1713
|
-
"""Edit mode of the job.
|
|
1714
|
-
|
|
1715
|
-
* `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is
|
|
1716
|
-
in an editable state and can be modified."""
|
|
1717
|
-
|
|
1718
|
-
EDITABLE = 'EDITABLE'
|
|
1719
|
-
UI_LOCKED = 'UI_LOCKED'
|
|
1720
|
-
|
|
1721
|
-
|
|
1722
1734
|
@dataclass
|
|
1723
1735
|
class JobSource:
|
|
1724
1736
|
"""The source of the job specification in the remote repository when the job is source controlled."""
|
|
@@ -1784,13 +1796,13 @@ class JobsHealthOperator(Enum):
|
|
|
1784
1796
|
|
|
1785
1797
|
@dataclass
|
|
1786
1798
|
class JobsHealthRule:
|
|
1787
|
-
metric:
|
|
1799
|
+
metric: JobsHealthMetric
|
|
1788
1800
|
"""Specifies the health metric that is being evaluated for a particular health rule."""
|
|
1789
1801
|
|
|
1790
|
-
op:
|
|
1802
|
+
op: JobsHealthOperator
|
|
1791
1803
|
"""Specifies the operator used to compare the health metric value with the specified threshold."""
|
|
1792
1804
|
|
|
1793
|
-
value:
|
|
1805
|
+
value: int
|
|
1794
1806
|
"""Specifies the threshold value that the health metric should obey to satisfy the health rule."""
|
|
1795
1807
|
|
|
1796
1808
|
def as_dict(self) -> dict:
|
|
@@ -1829,6 +1841,8 @@ class JobsHealthRules:
|
|
|
1829
1841
|
|
|
1830
1842
|
@dataclass
|
|
1831
1843
|
class ListJobsResponse:
|
|
1844
|
+
"""List of jobs was retrieved successfully."""
|
|
1845
|
+
|
|
1832
1846
|
has_more: Optional[bool] = None
|
|
1833
1847
|
"""If true, additional jobs matching the provided filter are available for listing."""
|
|
1834
1848
|
|
|
@@ -1861,6 +1875,8 @@ class ListJobsResponse:
|
|
|
1861
1875
|
|
|
1862
1876
|
@dataclass
|
|
1863
1877
|
class ListRunsResponse:
|
|
1878
|
+
"""List of runs was retrieved successfully."""
|
|
1879
|
+
|
|
1864
1880
|
has_more: Optional[bool] = None
|
|
1865
1881
|
"""If true, additional runs matching the provided filter are available for listing."""
|
|
1866
1882
|
|
|
@@ -1892,18 +1908,6 @@ class ListRunsResponse:
|
|
|
1892
1908
|
runs=_repeated_dict(d, 'runs', BaseRun))
|
|
1893
1909
|
|
|
1894
1910
|
|
|
1895
|
-
class ListRunsRunType(Enum):
|
|
1896
|
-
"""* `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow
|
|
1897
|
-
run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with
|
|
1898
|
-
:method:jobs/submit.
|
|
1899
|
-
|
|
1900
|
-
[dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow"""
|
|
1901
|
-
|
|
1902
|
-
JOB_RUN = 'JOB_RUN'
|
|
1903
|
-
SUBMIT_RUN = 'SUBMIT_RUN'
|
|
1904
|
-
WORKFLOW_RUN = 'WORKFLOW_RUN'
|
|
1905
|
-
|
|
1906
|
-
|
|
1907
1911
|
@dataclass
|
|
1908
1912
|
class NotebookOutput:
|
|
1909
1913
|
result: Optional[str] = None
|
|
@@ -1938,10 +1942,9 @@ class NotebookTask:
|
|
|
1938
1942
|
|
|
1939
1943
|
base_parameters: Optional[Dict[str, str]] = None
|
|
1940
1944
|
"""Base parameters to be used for each run of this job. If the run is initiated by a call to
|
|
1941
|
-
:method:jobs/
|
|
1942
|
-
key is specified in `base_parameters` and in `run-now`, the value from `run-now` is used.
|
|
1943
|
-
|
|
1944
|
-
Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
|
|
1945
|
+
:method:jobs/run Now with parameters specified, the two parameters maps are merged. If the same
|
|
1946
|
+
key is specified in `base_parameters` and in `run-now`, the value from `run-now` is used. Use
|
|
1947
|
+
[Task parameter variables] to set parameters containing information about job runs.
|
|
1945
1948
|
|
|
1946
1949
|
If the notebook takes a parameter that is not specified in the job’s `base_parameters` or the
|
|
1947
1950
|
`run-now` override parameters, the default value from the notebook is used.
|
|
@@ -1950,17 +1953,15 @@ class NotebookTask:
|
|
|
1950
1953
|
|
|
1951
1954
|
The JSON representation of this field cannot exceed 1MB.
|
|
1952
1955
|
|
|
1953
|
-
[
|
|
1954
|
-
[
|
|
1956
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
1957
|
+
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-widgets"""
|
|
1955
1958
|
|
|
1956
1959
|
source: Optional[Source] = None
|
|
1957
1960
|
"""Optional location type of the notebook. When set to `WORKSPACE`, the notebook will be retrieved
|
|
1958
|
-
from the local
|
|
1961
|
+
from the local Databricks workspace. When set to `GIT`, the notebook will be retrieved from a
|
|
1959
1962
|
Git repository defined in `git_source`. If the value is empty, the task will use `GIT` if
|
|
1960
|
-
`git_source` is defined and `WORKSPACE` otherwise.
|
|
1961
|
-
|
|
1962
|
-
* `WORKSPACE`: Notebook is located in <Databricks> workspace. * `GIT`: Notebook is located in
|
|
1963
|
-
cloud Git provider."""
|
|
1963
|
+
`git_source` is defined and `WORKSPACE` otherwise. * `WORKSPACE`: Notebook is located in
|
|
1964
|
+
Databricks workspace. * `GIT`: Notebook is located in cloud Git provider."""
|
|
1964
1965
|
|
|
1965
1966
|
def as_dict(self) -> dict:
|
|
1966
1967
|
"""Serializes the NotebookTask into a dictionary suitable for use as a JSON request body."""
|
|
@@ -1978,9 +1979,6 @@ class NotebookTask:
|
|
|
1978
1979
|
source=_enum(d, 'source', Source))
|
|
1979
1980
|
|
|
1980
1981
|
|
|
1981
|
-
ParamPairs = Dict[str, str]
|
|
1982
|
-
|
|
1983
|
-
|
|
1984
1982
|
class PauseStatus(Enum):
|
|
1985
1983
|
|
|
1986
1984
|
PAUSED = 'PAUSED'
|
|
@@ -2006,12 +2004,12 @@ class PipelineParams:
|
|
|
2006
2004
|
|
|
2007
2005
|
@dataclass
|
|
2008
2006
|
class PipelineTask:
|
|
2009
|
-
|
|
2010
|
-
"""If true, a full refresh will be triggered on the delta live table."""
|
|
2011
|
-
|
|
2012
|
-
pipeline_id: Optional[str] = None
|
|
2007
|
+
pipeline_id: str
|
|
2013
2008
|
"""The full name of the pipeline task to execute."""
|
|
2014
2009
|
|
|
2010
|
+
full_refresh: Optional[bool] = None
|
|
2011
|
+
"""If true, triggers a full refresh on the delta live table."""
|
|
2012
|
+
|
|
2015
2013
|
def as_dict(self) -> dict:
|
|
2016
2014
|
"""Serializes the PipelineTask into a dictionary suitable for use as a JSON request body."""
|
|
2017
2015
|
body = {}
|
|
@@ -2027,7 +2025,10 @@ class PipelineTask:
|
|
|
2027
2025
|
|
|
2028
2026
|
@dataclass
|
|
2029
2027
|
class PythonWheelTask:
|
|
2030
|
-
|
|
2028
|
+
package_name: str
|
|
2029
|
+
"""Name of the package to execute"""
|
|
2030
|
+
|
|
2031
|
+
entry_point: str
|
|
2031
2032
|
"""Named entry point to use, if it does not exist in the metadata of the package it executes the
|
|
2032
2033
|
function from the package directly using `$packageName.$entryPoint()`"""
|
|
2033
2034
|
|
|
@@ -2035,9 +2036,6 @@ class PythonWheelTask:
|
|
|
2035
2036
|
"""Command-line parameters passed to Python wheel task in the form of `["--name=task",
|
|
2036
2037
|
"--data=dbfs:/path/to/data.json"]`. Leave it empty if `parameters` is not null."""
|
|
2037
2038
|
|
|
2038
|
-
package_name: Optional[str] = None
|
|
2039
|
-
"""Name of the package to execute"""
|
|
2040
|
-
|
|
2041
2039
|
parameters: Optional[List[str]] = None
|
|
2042
2040
|
"""Command-line parameters passed to Python wheel task. Leave it empty if `named_parameters` is not
|
|
2043
2041
|
null."""
|
|
@@ -2133,7 +2131,7 @@ class RepairRun:
|
|
|
2133
2131
|
|
|
2134
2132
|
dbt_commands: Optional[List[str]] = None
|
|
2135
2133
|
"""An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
|
|
2136
|
-
deps", "dbt seed", "dbt run"]`"""
|
|
2134
|
+
deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`"""
|
|
2137
2135
|
|
|
2138
2136
|
jar_params: Optional[List[str]] = None
|
|
2139
2137
|
"""A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe",
|
|
@@ -2142,9 +2140,8 @@ class RepairRun:
|
|
|
2142
2140
|
be specified in conjunction with notebook_params. The JSON representation of this field (for
|
|
2143
2141
|
example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
2144
2142
|
|
|
2145
|
-
Use [
|
|
2146
|
-
|
|
2147
|
-
[task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
|
|
2143
|
+
Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
|
|
2144
|
+
information about job runs."""
|
|
2148
2145
|
|
|
2149
2146
|
job_parameters: Optional[Dict[str, str]] = None
|
|
2150
2147
|
"""Job-level parameters used in the run. for example `"param": "overriding_val"`"""
|
|
@@ -2162,13 +2159,13 @@ class RepairRun:
|
|
|
2162
2159
|
|
|
2163
2160
|
notebook_params cannot be specified in conjunction with jar_params.
|
|
2164
2161
|
|
|
2165
|
-
Use [
|
|
2162
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
2166
2163
|
|
|
2167
2164
|
The JSON representation of this field (for example `{"notebook_params":{"name":"john
|
|
2168
2165
|
doe","age":"35"}}`) cannot exceed 10,000 bytes.
|
|
2169
2166
|
|
|
2170
|
-
[
|
|
2171
|
-
[
|
|
2167
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
2168
|
+
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
|
|
2172
2169
|
|
|
2173
2170
|
pipeline_params: Optional[PipelineParams] = None
|
|
2174
2171
|
|
|
@@ -2182,7 +2179,7 @@ class RepairRun:
|
|
|
2182
2179
|
`run-now`, it would overwrite the parameters specified in job setting. The JSON representation
|
|
2183
2180
|
of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
2184
2181
|
|
|
2185
|
-
Use [
|
|
2182
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
2186
2183
|
|
|
2187
2184
|
Important
|
|
2188
2185
|
|
|
@@ -2190,7 +2187,7 @@ class RepairRun:
|
|
|
2190
2187
|
returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
|
|
2191
2188
|
emojis.
|
|
2192
2189
|
|
|
2193
|
-
[
|
|
2190
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
2194
2191
|
|
|
2195
2192
|
rerun_all_failed_tasks: Optional[bool] = None
|
|
2196
2193
|
"""If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be
|
|
@@ -2210,7 +2207,7 @@ class RepairRun:
|
|
|
2210
2207
|
parameters specified in job setting. The JSON representation of this field (for example
|
|
2211
2208
|
`{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
2212
2209
|
|
|
2213
|
-
Use [
|
|
2210
|
+
Use [Task parameter variables] to set parameters containing information about job runs
|
|
2214
2211
|
|
|
2215
2212
|
Important
|
|
2216
2213
|
|
|
@@ -2218,7 +2215,7 @@ class RepairRun:
|
|
|
2218
2215
|
returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
|
|
2219
2216
|
emojis.
|
|
2220
2217
|
|
|
2221
|
-
[
|
|
2218
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
2222
2219
|
|
|
2223
2220
|
sql_params: Optional[Dict[str, str]] = None
|
|
2224
2221
|
"""A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
|
|
@@ -2265,6 +2262,8 @@ class RepairRun:
|
|
|
2265
2262
|
|
|
2266
2263
|
@dataclass
|
|
2267
2264
|
class RepairRunResponse:
|
|
2265
|
+
"""Run repair was initiated."""
|
|
2266
|
+
|
|
2268
2267
|
repair_id: Optional[int] = None
|
|
2269
2268
|
"""The ID of the repair. Must be provided in subsequent repairs using the `latest_repair_id` field
|
|
2270
2269
|
to ensure sequential repairs."""
|
|
@@ -2407,21 +2406,21 @@ class ResolvedPythonWheelTaskValues:
|
|
|
2407
2406
|
|
|
2408
2407
|
@dataclass
|
|
2409
2408
|
class ResolvedRunJobTaskValues:
|
|
2410
|
-
|
|
2409
|
+
job_parameters: Optional[Dict[str, str]] = None
|
|
2411
2410
|
|
|
2412
2411
|
parameters: Optional[Dict[str, str]] = None
|
|
2413
2412
|
|
|
2414
2413
|
def as_dict(self) -> dict:
|
|
2415
2414
|
"""Serializes the ResolvedRunJobTaskValues into a dictionary suitable for use as a JSON request body."""
|
|
2416
2415
|
body = {}
|
|
2417
|
-
if self.
|
|
2416
|
+
if self.job_parameters: body['job_parameters'] = self.job_parameters
|
|
2418
2417
|
if self.parameters: body['parameters'] = self.parameters
|
|
2419
2418
|
return body
|
|
2420
2419
|
|
|
2421
2420
|
@classmethod
|
|
2422
2421
|
def from_dict(cls, d: Dict[str, any]) -> ResolvedRunJobTaskValues:
|
|
2423
2422
|
"""Deserializes the ResolvedRunJobTaskValues from a dictionary."""
|
|
2424
|
-
return cls(
|
|
2423
|
+
return cls(job_parameters=d.get('job_parameters', None), parameters=d.get('parameters', None))
|
|
2425
2424
|
|
|
2426
2425
|
|
|
2427
2426
|
@dataclass
|
|
@@ -2518,6 +2517,9 @@ class Run:
|
|
|
2518
2517
|
"""The creator user name. This field won’t be included in the response if the user has already
|
|
2519
2518
|
been deleted."""
|
|
2520
2519
|
|
|
2520
|
+
description: Optional[str] = None
|
|
2521
|
+
"""Description of the run"""
|
|
2522
|
+
|
|
2521
2523
|
end_time: Optional[int] = None
|
|
2522
2524
|
"""The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This
|
|
2523
2525
|
field is set to 0 if the job is still running."""
|
|
@@ -2560,6 +2562,9 @@ class Run:
|
|
|
2560
2562
|
overriding_parameters: Optional[RunParameters] = None
|
|
2561
2563
|
"""The parameters used for this run."""
|
|
2562
2564
|
|
|
2565
|
+
queue_duration: Optional[int] = None
|
|
2566
|
+
"""The time in milliseconds that the run has spent in the queue."""
|
|
2567
|
+
|
|
2563
2568
|
repair_history: Optional[List[RepairHistoryItem]] = None
|
|
2564
2569
|
"""The repair history of the run."""
|
|
2565
2570
|
|
|
@@ -2576,9 +2581,9 @@ class Run:
|
|
|
2576
2581
|
"""The URL to the detail page of the run."""
|
|
2577
2582
|
|
|
2578
2583
|
run_type: Optional[RunType] = None
|
|
2579
|
-
"""* `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. *
|
|
2580
|
-
run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit
|
|
2581
|
-
:method:jobs/submit.
|
|
2584
|
+
"""The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. *
|
|
2585
|
+
`WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit
|
|
2586
|
+
run. A run created with :method:jobs/submit.
|
|
2582
2587
|
|
|
2583
2588
|
[dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow"""
|
|
2584
2589
|
|
|
@@ -2616,6 +2621,7 @@ class Run:
|
|
|
2616
2621
|
triggered by a table update."""
|
|
2617
2622
|
|
|
2618
2623
|
trigger_info: Optional[TriggerInfo] = None
|
|
2624
|
+
"""Additional details about what triggered the run"""
|
|
2619
2625
|
|
|
2620
2626
|
def as_dict(self) -> dict:
|
|
2621
2627
|
"""Serializes the Run into a dictionary suitable for use as a JSON request body."""
|
|
@@ -2625,6 +2631,7 @@ class Run:
|
|
|
2625
2631
|
if self.cluster_instance: body['cluster_instance'] = self.cluster_instance.as_dict()
|
|
2626
2632
|
if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict()
|
|
2627
2633
|
if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
|
|
2634
|
+
if self.description is not None: body['description'] = self.description
|
|
2628
2635
|
if self.end_time is not None: body['end_time'] = self.end_time
|
|
2629
2636
|
if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
|
|
2630
2637
|
if self.git_source: body['git_source'] = self.git_source.as_dict()
|
|
@@ -2635,6 +2642,7 @@ class Run:
|
|
|
2635
2642
|
if self.original_attempt_run_id is not None:
|
|
2636
2643
|
body['original_attempt_run_id'] = self.original_attempt_run_id
|
|
2637
2644
|
if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict()
|
|
2645
|
+
if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
|
|
2638
2646
|
if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history]
|
|
2639
2647
|
if self.run_duration is not None: body['run_duration'] = self.run_duration
|
|
2640
2648
|
if self.run_id is not None: body['run_id'] = self.run_id
|
|
@@ -2658,6 +2666,7 @@ class Run:
|
|
|
2658
2666
|
cluster_instance=_from_dict(d, 'cluster_instance', ClusterInstance),
|
|
2659
2667
|
cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec),
|
|
2660
2668
|
creator_user_name=d.get('creator_user_name', None),
|
|
2669
|
+
description=d.get('description', None),
|
|
2661
2670
|
end_time=d.get('end_time', None),
|
|
2662
2671
|
execution_duration=d.get('execution_duration', None),
|
|
2663
2672
|
git_source=_from_dict(d, 'git_source', GitSource),
|
|
@@ -2667,6 +2676,7 @@ class Run:
|
|
|
2667
2676
|
number_in_job=d.get('number_in_job', None),
|
|
2668
2677
|
original_attempt_run_id=d.get('original_attempt_run_id', None),
|
|
2669
2678
|
overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters),
|
|
2679
|
+
queue_duration=d.get('queue_duration', None),
|
|
2670
2680
|
repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem),
|
|
2671
2681
|
run_duration=d.get('run_duration', None),
|
|
2672
2682
|
run_id=d.get('run_id', None),
|
|
@@ -2684,14 +2694,24 @@ class Run:
|
|
|
2684
2694
|
|
|
2685
2695
|
@dataclass
|
|
2686
2696
|
class RunConditionTask:
|
|
2697
|
+
op: ConditionTaskOp
|
|
2698
|
+
"""* `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that
|
|
2699
|
+
`“12.0” == “12”` will evaluate to `false`. * `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`,
|
|
2700
|
+
`LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands.
|
|
2701
|
+
`“12.0” >= “12”` will evaluate to `true`, `“10.0” >= “12”` will evaluate to
|
|
2702
|
+
`false`.
|
|
2703
|
+
|
|
2704
|
+
The boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`.
|
|
2705
|
+
If a task value was set to a boolean value, it will be serialized to `“true”` or
|
|
2706
|
+
`“false”` for the comparison."""
|
|
2707
|
+
|
|
2687
2708
|
left: str
|
|
2688
|
-
"""The left operand of the condition task.
|
|
2709
|
+
"""The left operand of the condition task. Can be either a string value or a job state or parameter
|
|
2710
|
+
reference."""
|
|
2689
2711
|
|
|
2690
2712
|
right: str
|
|
2691
|
-
"""The right operand of the condition task.
|
|
2692
|
-
|
|
2693
|
-
op: RunConditionTaskOp
|
|
2694
|
-
"""The condtion task operator."""
|
|
2713
|
+
"""The right operand of the condition task. Can be either a string value or a job state or
|
|
2714
|
+
parameter reference."""
|
|
2695
2715
|
|
|
2696
2716
|
outcome: Optional[str] = None
|
|
2697
2717
|
"""The condition expression evaluation result. Filled in if the task was successfully completed.
|
|
@@ -2710,33 +2730,25 @@ class RunConditionTask:
|
|
|
2710
2730
|
def from_dict(cls, d: Dict[str, any]) -> RunConditionTask:
|
|
2711
2731
|
"""Deserializes the RunConditionTask from a dictionary."""
|
|
2712
2732
|
return cls(left=d.get('left', None),
|
|
2713
|
-
op=_enum(d, 'op',
|
|
2733
|
+
op=_enum(d, 'op', ConditionTaskOp),
|
|
2714
2734
|
outcome=d.get('outcome', None),
|
|
2715
2735
|
right=d.get('right', None))
|
|
2716
2736
|
|
|
2717
2737
|
|
|
2718
|
-
class RunConditionTaskOp(Enum):
|
|
2719
|
-
"""The condtion task operator."""
|
|
2720
|
-
|
|
2721
|
-
EQUAL_TO = 'EQUAL_TO'
|
|
2722
|
-
GREATER_THAN = 'GREATER_THAN'
|
|
2723
|
-
GREATER_THAN_OR_EQUAL = 'GREATER_THAN_OR_EQUAL'
|
|
2724
|
-
LESS_THAN = 'LESS_THAN'
|
|
2725
|
-
LESS_THAN_OR_EQUAL = 'LESS_THAN_OR_EQUAL'
|
|
2726
|
-
NOT_EQUAL = 'NOT_EQUAL'
|
|
2727
|
-
|
|
2728
|
-
|
|
2729
2738
|
@dataclass
|
|
2730
2739
|
class RunForEachTask:
|
|
2740
|
+
inputs: str
|
|
2741
|
+
"""Array for task to iterate on. This can be a JSON string or a reference to an array parameter."""
|
|
2742
|
+
|
|
2743
|
+
task: Task
|
|
2744
|
+
"""Configuration for the task that will be run for each element in the array"""
|
|
2745
|
+
|
|
2731
2746
|
concurrency: Optional[int] = None
|
|
2732
2747
|
"""Controls the number of active iterations task runs. Default is 20, maximum allowed is 100."""
|
|
2733
2748
|
|
|
2734
|
-
inputs: Optional[str] = None
|
|
2735
|
-
"""Array for task to iterate on. This can be a JSON string or a reference to an array parameter."""
|
|
2736
|
-
|
|
2737
2749
|
stats: Optional[ForEachStats] = None
|
|
2738
|
-
|
|
2739
|
-
|
|
2750
|
+
"""Read only field. Populated for GetRun and ListRuns RPC calls and stores the execution stats of
|
|
2751
|
+
an For each task"""
|
|
2740
2752
|
|
|
2741
2753
|
def as_dict(self) -> dict:
|
|
2742
2754
|
"""Serializes the RunForEachTask into a dictionary suitable for use as a JSON request body."""
|
|
@@ -2796,20 +2808,111 @@ class RunJobTask:
|
|
|
2796
2808
|
job_id: int
|
|
2797
2809
|
"""ID of the job to trigger."""
|
|
2798
2810
|
|
|
2811
|
+
dbt_commands: Optional[List[str]] = None
|
|
2812
|
+
"""An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
|
|
2813
|
+
deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`"""
|
|
2814
|
+
|
|
2815
|
+
jar_params: Optional[List[str]] = None
|
|
2816
|
+
"""A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe",
|
|
2817
|
+
"35"]`. The parameters are used to invoke the main function of the main class specified in the
|
|
2818
|
+
Spark JAR task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot
|
|
2819
|
+
be specified in conjunction with notebook_params. The JSON representation of this field (for
|
|
2820
|
+
example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
2821
|
+
|
|
2822
|
+
Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
|
|
2823
|
+
information about job runs."""
|
|
2824
|
+
|
|
2799
2825
|
job_parameters: Optional[Dict[str, str]] = None
|
|
2800
2826
|
"""Job-level parameters used to trigger the job."""
|
|
2801
2827
|
|
|
2828
|
+
notebook_params: Optional[Dict[str, str]] = None
|
|
2829
|
+
"""A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
|
|
2830
|
+
"john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the
|
|
2831
|
+
[dbutils.widgets.get] function.
|
|
2832
|
+
|
|
2833
|
+
If not specified upon `run-now`, the triggered run uses the job’s base parameters.
|
|
2834
|
+
|
|
2835
|
+
notebook_params cannot be specified in conjunction with jar_params.
|
|
2836
|
+
|
|
2837
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
2838
|
+
|
|
2839
|
+
The JSON representation of this field (for example `{"notebook_params":{"name":"john
|
|
2840
|
+
doe","age":"35"}}`) cannot exceed 10,000 bytes.
|
|
2841
|
+
|
|
2842
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
2843
|
+
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
|
|
2844
|
+
|
|
2845
|
+
pipeline_params: Optional[PipelineParams] = None
|
|
2846
|
+
|
|
2847
|
+
python_named_params: Optional[Dict[str, str]] = None
|
|
2848
|
+
"""A map from keys to values for jobs with Python wheel task, for example `"python_named_params":
|
|
2849
|
+
{"name": "task", "data": "dbfs:/path/to/data.json"}`."""
|
|
2850
|
+
|
|
2851
|
+
python_params: Optional[List[str]] = None
|
|
2852
|
+
"""A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe",
|
|
2853
|
+
"35"]`. The parameters are passed to Python file as command-line parameters. If specified upon
|
|
2854
|
+
`run-now`, it would overwrite the parameters specified in job setting. The JSON representation
|
|
2855
|
+
of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
2856
|
+
|
|
2857
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
2858
|
+
|
|
2859
|
+
Important
|
|
2860
|
+
|
|
2861
|
+
These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
|
|
2862
|
+
returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
|
|
2863
|
+
emojis.
|
|
2864
|
+
|
|
2865
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
2866
|
+
|
|
2867
|
+
spark_submit_params: Optional[List[str]] = None
|
|
2868
|
+
"""A list of parameters for jobs with spark submit task, for example `"spark_submit_params":
|
|
2869
|
+
["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit
|
|
2870
|
+
script as command-line parameters. If specified upon `run-now`, it would overwrite the
|
|
2871
|
+
parameters specified in job setting. The JSON representation of this field (for example
|
|
2872
|
+
`{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
2873
|
+
|
|
2874
|
+
Use [Task parameter variables] to set parameters containing information about job runs
|
|
2875
|
+
|
|
2876
|
+
Important
|
|
2877
|
+
|
|
2878
|
+
These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
|
|
2879
|
+
returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
|
|
2880
|
+
emojis.
|
|
2881
|
+
|
|
2882
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
2883
|
+
|
|
2884
|
+
sql_params: Optional[Dict[str, str]] = None
|
|
2885
|
+
"""A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
|
|
2886
|
+
doe", "age": "35"}`. The SQL alert task does not support custom parameters."""
|
|
2887
|
+
|
|
2802
2888
|
def as_dict(self) -> dict:
|
|
2803
2889
|
"""Serializes the RunJobTask into a dictionary suitable for use as a JSON request body."""
|
|
2804
2890
|
body = {}
|
|
2891
|
+
if self.dbt_commands: body['dbt_commands'] = [v for v in self.dbt_commands]
|
|
2892
|
+
if self.jar_params: body['jar_params'] = [v for v in self.jar_params]
|
|
2805
2893
|
if self.job_id is not None: body['job_id'] = self.job_id
|
|
2806
2894
|
if self.job_parameters: body['job_parameters'] = self.job_parameters
|
|
2895
|
+
if self.notebook_params: body['notebook_params'] = self.notebook_params
|
|
2896
|
+
if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict()
|
|
2897
|
+
if self.python_named_params: body['python_named_params'] = self.python_named_params
|
|
2898
|
+
if self.python_params: body['python_params'] = [v for v in self.python_params]
|
|
2899
|
+
if self.spark_submit_params: body['spark_submit_params'] = [v for v in self.spark_submit_params]
|
|
2900
|
+
if self.sql_params: body['sql_params'] = self.sql_params
|
|
2807
2901
|
return body
|
|
2808
2902
|
|
|
2809
2903
|
@classmethod
|
|
2810
2904
|
def from_dict(cls, d: Dict[str, any]) -> RunJobTask:
|
|
2811
2905
|
"""Deserializes the RunJobTask from a dictionary."""
|
|
2812
|
-
return cls(
|
|
2906
|
+
return cls(dbt_commands=d.get('dbt_commands', None),
|
|
2907
|
+
jar_params=d.get('jar_params', None),
|
|
2908
|
+
job_id=d.get('job_id', None),
|
|
2909
|
+
job_parameters=d.get('job_parameters', None),
|
|
2910
|
+
notebook_params=d.get('notebook_params', None),
|
|
2911
|
+
pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams),
|
|
2912
|
+
python_named_params=d.get('python_named_params', None),
|
|
2913
|
+
python_params=d.get('python_params', None),
|
|
2914
|
+
spark_submit_params=d.get('spark_submit_params', None),
|
|
2915
|
+
sql_params=d.get('sql_params', None))
|
|
2813
2916
|
|
|
2814
2917
|
|
|
2815
2918
|
class RunLifeCycleState(Enum):
|
|
@@ -2843,7 +2946,7 @@ class RunNow:
|
|
|
2843
2946
|
|
|
2844
2947
|
dbt_commands: Optional[List[str]] = None
|
|
2845
2948
|
"""An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
|
|
2846
|
-
deps", "dbt seed", "dbt run"]`"""
|
|
2949
|
+
deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`"""
|
|
2847
2950
|
|
|
2848
2951
|
idempotency_token: Optional[str] = None
|
|
2849
2952
|
"""An optional token to guarantee the idempotency of job run requests. If a run with the provided
|
|
@@ -2866,9 +2969,8 @@ class RunNow:
|
|
|
2866
2969
|
be specified in conjunction with notebook_params. The JSON representation of this field (for
|
|
2867
2970
|
example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
2868
2971
|
|
|
2869
|
-
Use [
|
|
2870
|
-
|
|
2871
|
-
[task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
|
|
2972
|
+
Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
|
|
2973
|
+
information about job runs."""
|
|
2872
2974
|
|
|
2873
2975
|
job_parameters: Optional[Dict[str, str]] = None
|
|
2874
2976
|
"""Job-level parameters used in the run. for example `"param": "overriding_val"`"""
|
|
@@ -2882,13 +2984,13 @@ class RunNow:
|
|
|
2882
2984
|
|
|
2883
2985
|
notebook_params cannot be specified in conjunction with jar_params.
|
|
2884
2986
|
|
|
2885
|
-
Use [
|
|
2987
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
2886
2988
|
|
|
2887
2989
|
The JSON representation of this field (for example `{"notebook_params":{"name":"john
|
|
2888
2990
|
doe","age":"35"}}`) cannot exceed 10,000 bytes.
|
|
2889
2991
|
|
|
2890
|
-
[
|
|
2891
|
-
[
|
|
2992
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
2993
|
+
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
|
|
2892
2994
|
|
|
2893
2995
|
pipeline_params: Optional[PipelineParams] = None
|
|
2894
2996
|
|
|
@@ -2902,7 +3004,7 @@ class RunNow:
|
|
|
2902
3004
|
`run-now`, it would overwrite the parameters specified in job setting. The JSON representation
|
|
2903
3005
|
of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
2904
3006
|
|
|
2905
|
-
Use [
|
|
3007
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
2906
3008
|
|
|
2907
3009
|
Important
|
|
2908
3010
|
|
|
@@ -2910,7 +3012,7 @@ class RunNow:
|
|
|
2910
3012
|
returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
|
|
2911
3013
|
emojis.
|
|
2912
3014
|
|
|
2913
|
-
[
|
|
3015
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
2914
3016
|
|
|
2915
3017
|
queue: Optional[QueueSettings] = None
|
|
2916
3018
|
"""The queue settings of the run."""
|
|
@@ -2922,7 +3024,7 @@ class RunNow:
|
|
|
2922
3024
|
parameters specified in job setting. The JSON representation of this field (for example
|
|
2923
3025
|
`{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
2924
3026
|
|
|
2925
|
-
Use [
|
|
3027
|
+
Use [Task parameter variables] to set parameters containing information about job runs
|
|
2926
3028
|
|
|
2927
3029
|
Important
|
|
2928
3030
|
|
|
@@ -2930,7 +3032,7 @@ class RunNow:
|
|
|
2930
3032
|
returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
|
|
2931
3033
|
emojis.
|
|
2932
3034
|
|
|
2933
|
-
[
|
|
3035
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
2934
3036
|
|
|
2935
3037
|
sql_params: Optional[Dict[str, str]] = None
|
|
2936
3038
|
"""A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
|
|
@@ -2972,6 +3074,8 @@ class RunNow:
|
|
|
2972
3074
|
|
|
2973
3075
|
@dataclass
|
|
2974
3076
|
class RunNowResponse:
|
|
3077
|
+
"""Run was started successfully."""
|
|
3078
|
+
|
|
2975
3079
|
number_in_job: Optional[int] = None
|
|
2976
3080
|
"""A unique identifier for this job run. This is set to the same value as `run_id`."""
|
|
2977
3081
|
|
|
@@ -2993,6 +3097,8 @@ class RunNowResponse:
|
|
|
2993
3097
|
|
|
2994
3098
|
@dataclass
|
|
2995
3099
|
class RunOutput:
|
|
3100
|
+
"""Run output was retrieved successfully."""
|
|
3101
|
+
|
|
2996
3102
|
dbt_output: Optional[DbtOutput] = None
|
|
2997
3103
|
"""The output of a dbt task, if available."""
|
|
2998
3104
|
|
|
@@ -3003,6 +3109,8 @@ class RunOutput:
|
|
|
3003
3109
|
error_trace: Optional[str] = None
|
|
3004
3110
|
"""If there was an error executing the run, this field contains any available stack traces."""
|
|
3005
3111
|
|
|
3112
|
+
info: Optional[str] = None
|
|
3113
|
+
|
|
3006
3114
|
logs: Optional[str] = None
|
|
3007
3115
|
"""The output from tasks that write to standard streams (stdout/stderr) such as spark_jar_task,
|
|
3008
3116
|
spark_python_task, python_wheel_task.
|
|
@@ -3020,10 +3128,11 @@ class RunOutput:
|
|
|
3020
3128
|
notebook_output: Optional[NotebookOutput] = None
|
|
3021
3129
|
"""The output of a notebook task, if available. A notebook task that terminates (either
|
|
3022
3130
|
successfully or with a failure) without calling `dbutils.notebook.exit()` is considered to have
|
|
3023
|
-
an empty output. This field is set but its result value is empty.
|
|
3024
|
-
|
|
3025
|
-
|
|
3026
|
-
|
|
3131
|
+
an empty output. This field is set but its result value is empty. Databricks restricts this API
|
|
3132
|
+
to return the first 5 MB of the output. To return a larger result, use the [ClusterLogConf]
|
|
3133
|
+
field to configure log storage for the job cluster.
|
|
3134
|
+
|
|
3135
|
+
[ClusterLogConf]: https://docs.databricks.com/dev-tools/api/latest/clusters.html#clusterlogconf"""
|
|
3027
3136
|
|
|
3028
3137
|
run_job_output: Optional[RunJobOutput] = None
|
|
3029
3138
|
"""The output of a run job task, if available"""
|
|
@@ -3037,6 +3146,7 @@ class RunOutput:
|
|
|
3037
3146
|
if self.dbt_output: body['dbt_output'] = self.dbt_output.as_dict()
|
|
3038
3147
|
if self.error is not None: body['error'] = self.error
|
|
3039
3148
|
if self.error_trace is not None: body['error_trace'] = self.error_trace
|
|
3149
|
+
if self.info is not None: body['info'] = self.info
|
|
3040
3150
|
if self.logs is not None: body['logs'] = self.logs
|
|
3041
3151
|
if self.logs_truncated is not None: body['logs_truncated'] = self.logs_truncated
|
|
3042
3152
|
if self.metadata: body['metadata'] = self.metadata.as_dict()
|
|
@@ -3051,6 +3161,7 @@ class RunOutput:
|
|
|
3051
3161
|
return cls(dbt_output=_from_dict(d, 'dbt_output', DbtOutput),
|
|
3052
3162
|
error=d.get('error', None),
|
|
3053
3163
|
error_trace=d.get('error_trace', None),
|
|
3164
|
+
info=d.get('info', None),
|
|
3054
3165
|
logs=d.get('logs', None),
|
|
3055
3166
|
logs_truncated=d.get('logs_truncated', None),
|
|
3056
3167
|
metadata=_from_dict(d, 'metadata', Run),
|
|
@@ -3063,7 +3174,7 @@ class RunOutput:
|
|
|
3063
3174
|
class RunParameters:
|
|
3064
3175
|
dbt_commands: Optional[List[str]] = None
|
|
3065
3176
|
"""An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
|
|
3066
|
-
deps", "dbt seed", "dbt run"]`"""
|
|
3177
|
+
deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`"""
|
|
3067
3178
|
|
|
3068
3179
|
jar_params: Optional[List[str]] = None
|
|
3069
3180
|
"""A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe",
|
|
@@ -3072,12 +3183,8 @@ class RunParameters:
|
|
|
3072
3183
|
be specified in conjunction with notebook_params. The JSON representation of this field (for
|
|
3073
3184
|
example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
3074
3185
|
|
|
3075
|
-
Use [
|
|
3076
|
-
|
|
3077
|
-
[task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
|
|
3078
|
-
|
|
3079
|
-
job_parameters: Optional[Dict[str, str]] = None
|
|
3080
|
-
"""Job-level parameters used in the run. for example `"param": "overriding_val"`"""
|
|
3186
|
+
Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
|
|
3187
|
+
information about job runs."""
|
|
3081
3188
|
|
|
3082
3189
|
notebook_params: Optional[Dict[str, str]] = None
|
|
3083
3190
|
"""A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
|
|
@@ -3088,13 +3195,13 @@ class RunParameters:
|
|
|
3088
3195
|
|
|
3089
3196
|
notebook_params cannot be specified in conjunction with jar_params.
|
|
3090
3197
|
|
|
3091
|
-
Use [
|
|
3198
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
3092
3199
|
|
|
3093
3200
|
The JSON representation of this field (for example `{"notebook_params":{"name":"john
|
|
3094
3201
|
doe","age":"35"}}`) cannot exceed 10,000 bytes.
|
|
3095
3202
|
|
|
3096
|
-
[
|
|
3097
|
-
[
|
|
3203
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
3204
|
+
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
|
|
3098
3205
|
|
|
3099
3206
|
pipeline_params: Optional[PipelineParams] = None
|
|
3100
3207
|
|
|
@@ -3108,7 +3215,7 @@ class RunParameters:
|
|
|
3108
3215
|
`run-now`, it would overwrite the parameters specified in job setting. The JSON representation
|
|
3109
3216
|
of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
3110
3217
|
|
|
3111
|
-
Use [
|
|
3218
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
3112
3219
|
|
|
3113
3220
|
Important
|
|
3114
3221
|
|
|
@@ -3116,7 +3223,7 @@ class RunParameters:
|
|
|
3116
3223
|
returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
|
|
3117
3224
|
emojis.
|
|
3118
3225
|
|
|
3119
|
-
[
|
|
3226
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
3120
3227
|
|
|
3121
3228
|
spark_submit_params: Optional[List[str]] = None
|
|
3122
3229
|
"""A list of parameters for jobs with spark submit task, for example `"spark_submit_params":
|
|
@@ -3125,7 +3232,7 @@ class RunParameters:
|
|
|
3125
3232
|
parameters specified in job setting. The JSON representation of this field (for example
|
|
3126
3233
|
`{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
3127
3234
|
|
|
3128
|
-
Use [
|
|
3235
|
+
Use [Task parameter variables] to set parameters containing information about job runs
|
|
3129
3236
|
|
|
3130
3237
|
Important
|
|
3131
3238
|
|
|
@@ -3133,7 +3240,7 @@ class RunParameters:
|
|
|
3133
3240
|
returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
|
|
3134
3241
|
emojis.
|
|
3135
3242
|
|
|
3136
|
-
[
|
|
3243
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
3137
3244
|
|
|
3138
3245
|
sql_params: Optional[Dict[str, str]] = None
|
|
3139
3246
|
"""A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
|
|
@@ -3144,7 +3251,6 @@ class RunParameters:
|
|
|
3144
3251
|
body = {}
|
|
3145
3252
|
if self.dbt_commands: body['dbt_commands'] = [v for v in self.dbt_commands]
|
|
3146
3253
|
if self.jar_params: body['jar_params'] = [v for v in self.jar_params]
|
|
3147
|
-
if self.job_parameters: body['job_parameters'] = self.job_parameters
|
|
3148
3254
|
if self.notebook_params: body['notebook_params'] = self.notebook_params
|
|
3149
3255
|
if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict()
|
|
3150
3256
|
if self.python_named_params: body['python_named_params'] = self.python_named_params
|
|
@@ -3158,7 +3264,6 @@ class RunParameters:
|
|
|
3158
3264
|
"""Deserializes the RunParameters from a dictionary."""
|
|
3159
3265
|
return cls(dbt_commands=d.get('dbt_commands', None),
|
|
3160
3266
|
jar_params=d.get('jar_params', None),
|
|
3161
|
-
job_parameters=d.get('job_parameters', None),
|
|
3162
3267
|
notebook_params=d.get('notebook_params', None),
|
|
3163
3268
|
pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams),
|
|
3164
3269
|
python_named_params=d.get('python_named_params', None),
|
|
@@ -3233,6 +3338,13 @@ class RunState:
|
|
|
3233
3338
|
|
|
3234
3339
|
@dataclass
|
|
3235
3340
|
class RunTask:
|
|
3341
|
+
"""Used when outputting a child run, in GetRun or ListRuns."""
|
|
3342
|
+
|
|
3343
|
+
task_key: str
|
|
3344
|
+
"""A unique name for the task. This field is used to refer to this task from other tasks. This
|
|
3345
|
+
field is required and must be unique within its parent job. On Update or Reset, this field is
|
|
3346
|
+
used to reference the tasks to be updated or reset."""
|
|
3347
|
+
|
|
3236
3348
|
attempt_number: Optional[int] = None
|
|
3237
3349
|
"""The sequence number of this run attempt for a triggered job run. The initial attempt of a run
|
|
3238
3350
|
has an attempt_number of 0\. If the initial run attempt fails, and the job has a retry policy
|
|
@@ -3267,6 +3379,10 @@ class RunTask:
|
|
|
3267
3379
|
description: Optional[str] = None
|
|
3268
3380
|
"""An optional description for this task."""
|
|
3269
3381
|
|
|
3382
|
+
email_notifications: Optional[JobEmailNotifications] = None
|
|
3383
|
+
"""An optional set of email addresses notified when the task run begins or completes. The default
|
|
3384
|
+
behavior is to not send any emails."""
|
|
3385
|
+
|
|
3270
3386
|
end_time: Optional[int] = None
|
|
3271
3387
|
"""The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This
|
|
3272
3388
|
field is set to 0 if the job is still running."""
|
|
@@ -3279,9 +3395,9 @@ class RunTask:
|
|
|
3279
3395
|
duration of a multitask job run is the value of the `run_duration` field."""
|
|
3280
3396
|
|
|
3281
3397
|
existing_cluster_id: Optional[str] = None
|
|
3282
|
-
"""If existing_cluster_id, the ID of an existing cluster that is used for all runs
|
|
3283
|
-
|
|
3284
|
-
|
|
3398
|
+
"""If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running
|
|
3399
|
+
jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops
|
|
3400
|
+
responding. We suggest running jobs and tasks on new clusters for greater reliability"""
|
|
3285
3401
|
|
|
3286
3402
|
for_each_task: Optional[RunForEachTask] = None
|
|
3287
3403
|
"""If for_each_task, indicates that this task must execute the nested task within it."""
|
|
@@ -3289,26 +3405,32 @@ class RunTask:
|
|
|
3289
3405
|
git_source: Optional[GitSource] = None
|
|
3290
3406
|
"""An optional specification for a remote Git repository containing the source code used by tasks.
|
|
3291
3407
|
Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
|
|
3292
|
-
|
|
3293
3408
|
If `git_source` is set, these tasks retrieve the file from the remote repository by default.
|
|
3294
|
-
However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.
|
|
3295
|
-
|
|
3296
|
-
|
|
3297
|
-
|
|
3409
|
+
However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. Note:
|
|
3410
|
+
dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are
|
|
3411
|
+
used, `git_source` must be defined on the job."""
|
|
3412
|
+
|
|
3413
|
+
job_cluster_key: Optional[str] = None
|
|
3414
|
+
"""If job_cluster_key, this task is executed reusing the cluster specified in
|
|
3415
|
+
`job.settings.job_clusters`."""
|
|
3298
3416
|
|
|
3299
3417
|
libraries: Optional[List[compute.Library]] = None
|
|
3300
|
-
"""An optional list of libraries to be installed on the cluster
|
|
3301
|
-
|
|
3418
|
+
"""An optional list of libraries to be installed on the cluster. The default value is an empty
|
|
3419
|
+
list."""
|
|
3302
3420
|
|
|
3303
3421
|
new_cluster: Optional[compute.ClusterSpec] = None
|
|
3304
|
-
"""If new_cluster, a description of a new cluster that is created
|
|
3422
|
+
"""If new_cluster, a description of a new cluster that is created for each run."""
|
|
3305
3423
|
|
|
3306
3424
|
notebook_task: Optional[NotebookTask] = None
|
|
3307
|
-
"""If notebook_task, indicates that this
|
|
3425
|
+
"""If notebook_task, indicates that this task must run a notebook. This field may not be specified
|
|
3308
3426
|
in conjunction with spark_jar_task."""
|
|
3309
3427
|
|
|
3428
|
+
notification_settings: Optional[TaskNotificationSettings] = None
|
|
3429
|
+
"""Optional notification settings that are used when sending notifications to each of the
|
|
3430
|
+
`email_notifications` and `webhook_notifications` for this task run."""
|
|
3431
|
+
|
|
3310
3432
|
pipeline_task: Optional[PipelineTask] = None
|
|
3311
|
-
"""If pipeline_task, indicates that this
|
|
3433
|
+
"""If pipeline_task, indicates that this task must execute a Pipeline."""
|
|
3312
3434
|
|
|
3313
3435
|
python_wheel_task: Optional[PythonWheelTask] = None
|
|
3314
3436
|
"""If python_wheel_task, indicates that this job must execute a PythonWheel."""
|
|
@@ -3319,6 +3441,9 @@ class RunTask:
|
|
|
3319
3441
|
resolved_values: Optional[ResolvedValues] = None
|
|
3320
3442
|
"""Parameter values including resolved references"""
|
|
3321
3443
|
|
|
3444
|
+
run_duration: Optional[int] = None
|
|
3445
|
+
"""The time in milliseconds it took the job run and all of its repairs to finish."""
|
|
3446
|
+
|
|
3322
3447
|
run_id: Optional[int] = None
|
|
3323
3448
|
"""The ID of the task run."""
|
|
3324
3449
|
|
|
@@ -3330,6 +3455,8 @@ class RunTask:
|
|
|
3330
3455
|
run_job_task: Optional[RunJobTask] = None
|
|
3331
3456
|
"""If run_job_task, indicates that this task must execute another job."""
|
|
3332
3457
|
|
|
3458
|
+
run_page_url: Optional[str] = None
|
|
3459
|
+
|
|
3333
3460
|
setup_duration: Optional[int] = None
|
|
3334
3461
|
"""The time in milliseconds it took to set up the cluster. For runs that run on new clusters this
|
|
3335
3462
|
is the cluster creation time, for runs that run on existing clusters this time should be very
|
|
@@ -3338,10 +3465,10 @@ class RunTask:
|
|
|
3338
3465
|
duration of a multitask job run is the value of the `run_duration` field."""
|
|
3339
3466
|
|
|
3340
3467
|
spark_jar_task: Optional[SparkJarTask] = None
|
|
3341
|
-
"""If spark_jar_task, indicates that this
|
|
3468
|
+
"""If spark_jar_task, indicates that this task must run a JAR."""
|
|
3342
3469
|
|
|
3343
3470
|
spark_python_task: Optional[SparkPythonTask] = None
|
|
3344
|
-
"""If spark_python_task, indicates that this
|
|
3471
|
+
"""If spark_python_task, indicates that this task must run a Python file."""
|
|
3345
3472
|
|
|
3346
3473
|
spark_submit_task: Optional[SparkSubmitTask] = None
|
|
3347
3474
|
"""If `spark_submit_task`, indicates that this task must be launched by the spark submit script.
|
|
@@ -3361,7 +3488,7 @@ class RunTask:
|
|
|
3361
3488
|
The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
|
|
3362
3489
|
|
|
3363
3490
|
sql_task: Optional[SqlTask] = None
|
|
3364
|
-
"""If sql_task, indicates that this job must execute a SQL."""
|
|
3491
|
+
"""If sql_task, indicates that this job must execute a SQL task."""
|
|
3365
3492
|
|
|
3366
3493
|
start_time: Optional[int] = None
|
|
3367
3494
|
"""The time at which this run was started in epoch milliseconds (milliseconds since 1/1/1970 UTC).
|
|
@@ -3371,10 +3498,13 @@ class RunTask:
|
|
|
3371
3498
|
state: Optional[RunState] = None
|
|
3372
3499
|
"""The current state of the run."""
|
|
3373
3500
|
|
|
3374
|
-
|
|
3375
|
-
"""
|
|
3376
|
-
|
|
3377
|
-
|
|
3501
|
+
timeout_seconds: Optional[int] = None
|
|
3502
|
+
"""An optional timeout applied to each run of this job task. A value of `0` means no timeout."""
|
|
3503
|
+
|
|
3504
|
+
webhook_notifications: Optional[WebhookNotifications] = None
|
|
3505
|
+
"""A collection of system notification IDs to notify when the run begins or completes. The default
|
|
3506
|
+
behavior is to not send any system notifications. Task webhooks respect the task notification
|
|
3507
|
+
settings."""
|
|
3378
3508
|
|
|
3379
3509
|
def as_dict(self) -> dict:
|
|
3380
3510
|
"""Serializes the RunTask into a dictionary suitable for use as a JSON request body."""
|
|
@@ -3386,21 +3516,26 @@ class RunTask:
|
|
|
3386
3516
|
if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
|
|
3387
3517
|
if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on]
|
|
3388
3518
|
if self.description is not None: body['description'] = self.description
|
|
3519
|
+
if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
|
|
3389
3520
|
if self.end_time is not None: body['end_time'] = self.end_time
|
|
3390
3521
|
if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
|
|
3391
3522
|
if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
|
|
3392
3523
|
if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict()
|
|
3393
3524
|
if self.git_source: body['git_source'] = self.git_source.as_dict()
|
|
3525
|
+
if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
|
|
3394
3526
|
if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
|
|
3395
3527
|
if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict()
|
|
3396
3528
|
if self.notebook_task: body['notebook_task'] = self.notebook_task.as_dict()
|
|
3529
|
+
if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict()
|
|
3397
3530
|
if self.pipeline_task: body['pipeline_task'] = self.pipeline_task.as_dict()
|
|
3398
3531
|
if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict()
|
|
3399
3532
|
if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
|
|
3400
3533
|
if self.resolved_values: body['resolved_values'] = self.resolved_values.as_dict()
|
|
3534
|
+
if self.run_duration is not None: body['run_duration'] = self.run_duration
|
|
3401
3535
|
if self.run_id is not None: body['run_id'] = self.run_id
|
|
3402
3536
|
if self.run_if is not None: body['run_if'] = self.run_if.value
|
|
3403
3537
|
if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict()
|
|
3538
|
+
if self.run_page_url is not None: body['run_page_url'] = self.run_page_url
|
|
3404
3539
|
if self.setup_duration is not None: body['setup_duration'] = self.setup_duration
|
|
3405
3540
|
if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict()
|
|
3406
3541
|
if self.spark_python_task: body['spark_python_task'] = self.spark_python_task.as_dict()
|
|
@@ -3409,6 +3544,8 @@ class RunTask:
|
|
|
3409
3544
|
if self.start_time is not None: body['start_time'] = self.start_time
|
|
3410
3545
|
if self.state: body['state'] = self.state.as_dict()
|
|
3411
3546
|
if self.task_key is not None: body['task_key'] = self.task_key
|
|
3547
|
+
if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
|
|
3548
|
+
if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
|
|
3412
3549
|
return body
|
|
3413
3550
|
|
|
3414
3551
|
@classmethod
|
|
@@ -3421,21 +3558,26 @@ class RunTask:
|
|
|
3421
3558
|
dbt_task=_from_dict(d, 'dbt_task', DbtTask),
|
|
3422
3559
|
depends_on=_repeated_dict(d, 'depends_on', TaskDependency),
|
|
3423
3560
|
description=d.get('description', None),
|
|
3561
|
+
email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
|
|
3424
3562
|
end_time=d.get('end_time', None),
|
|
3425
3563
|
execution_duration=d.get('execution_duration', None),
|
|
3426
3564
|
existing_cluster_id=d.get('existing_cluster_id', None),
|
|
3427
3565
|
for_each_task=_from_dict(d, 'for_each_task', RunForEachTask),
|
|
3428
3566
|
git_source=_from_dict(d, 'git_source', GitSource),
|
|
3567
|
+
job_cluster_key=d.get('job_cluster_key', None),
|
|
3429
3568
|
libraries=_repeated_dict(d, 'libraries', compute.Library),
|
|
3430
3569
|
new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec),
|
|
3431
3570
|
notebook_task=_from_dict(d, 'notebook_task', NotebookTask),
|
|
3571
|
+
notification_settings=_from_dict(d, 'notification_settings', TaskNotificationSettings),
|
|
3432
3572
|
pipeline_task=_from_dict(d, 'pipeline_task', PipelineTask),
|
|
3433
3573
|
python_wheel_task=_from_dict(d, 'python_wheel_task', PythonWheelTask),
|
|
3434
3574
|
queue_duration=d.get('queue_duration', None),
|
|
3435
3575
|
resolved_values=_from_dict(d, 'resolved_values', ResolvedValues),
|
|
3576
|
+
run_duration=d.get('run_duration', None),
|
|
3436
3577
|
run_id=d.get('run_id', None),
|
|
3437
3578
|
run_if=_enum(d, 'run_if', RunIf),
|
|
3438
3579
|
run_job_task=_from_dict(d, 'run_job_task', RunJobTask),
|
|
3580
|
+
run_page_url=d.get('run_page_url', None),
|
|
3439
3581
|
setup_duration=d.get('setup_duration', None),
|
|
3440
3582
|
spark_jar_task=_from_dict(d, 'spark_jar_task', SparkJarTask),
|
|
3441
3583
|
spark_python_task=_from_dict(d, 'spark_python_task', SparkPythonTask),
|
|
@@ -3443,13 +3585,15 @@ class RunTask:
|
|
|
3443
3585
|
sql_task=_from_dict(d, 'sql_task', SqlTask),
|
|
3444
3586
|
start_time=d.get('start_time', None),
|
|
3445
3587
|
state=_from_dict(d, 'state', RunState),
|
|
3446
|
-
task_key=d.get('task_key', None)
|
|
3588
|
+
task_key=d.get('task_key', None),
|
|
3589
|
+
timeout_seconds=d.get('timeout_seconds', None),
|
|
3590
|
+
webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications))
|
|
3447
3591
|
|
|
3448
3592
|
|
|
3449
3593
|
class RunType(Enum):
|
|
3450
|
-
"""* `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. *
|
|
3451
|
-
run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit
|
|
3452
|
-
:method:jobs/submit.
|
|
3594
|
+
"""The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. *
|
|
3595
|
+
`WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit
|
|
3596
|
+
run. A run created with :method:jobs/submit.
|
|
3453
3597
|
|
|
3454
3598
|
[dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow"""
|
|
3455
3599
|
|
|
@@ -3459,6 +3603,13 @@ class RunType(Enum):
|
|
|
3459
3603
|
|
|
3460
3604
|
|
|
3461
3605
|
class Source(Enum):
|
|
3606
|
+
"""Optional location type of the SQL file. When set to `WORKSPACE`, the SQL file will be retrieved\
|
|
3607
|
+
from the local Databricks workspace. When set to `GIT`, the SQL file will be retrieved from a
|
|
3608
|
+
Git repository defined in `git_source`. If the value is empty, the task will use `GIT` if
|
|
3609
|
+
`git_source` is defined and `WORKSPACE` otherwise.
|
|
3610
|
+
|
|
3611
|
+
* `WORKSPACE`: SQL file is located in Databricks workspace. * `GIT`: SQL file is located in
|
|
3612
|
+
cloud Git provider."""
|
|
3462
3613
|
|
|
3463
3614
|
GIT = 'GIT'
|
|
3464
3615
|
WORKSPACE = 'WORKSPACE'
|
|
@@ -3480,9 +3631,9 @@ class SparkJarTask:
|
|
|
3480
3631
|
parameters: Optional[List[str]] = None
|
|
3481
3632
|
"""Parameters passed to the main method.
|
|
3482
3633
|
|
|
3483
|
-
Use [
|
|
3634
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
3484
3635
|
|
|
3485
|
-
[
|
|
3636
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
3486
3637
|
|
|
3487
3638
|
def as_dict(self) -> dict:
|
|
3488
3639
|
"""Serializes the SparkJarTask into a dictionary suitable for use as a JSON request body."""
|
|
@@ -3511,17 +3662,17 @@ class SparkPythonTask:
|
|
|
3511
3662
|
parameters: Optional[List[str]] = None
|
|
3512
3663
|
"""Command line parameters passed to the Python file.
|
|
3513
3664
|
|
|
3514
|
-
Use [
|
|
3665
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
3515
3666
|
|
|
3516
|
-
[
|
|
3667
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
3517
3668
|
|
|
3518
3669
|
source: Optional[Source] = None
|
|
3519
3670
|
"""Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file
|
|
3520
|
-
will be retrieved from the local
|
|
3671
|
+
will be retrieved from the local Databricks workspace or cloud location (if the `python_file`
|
|
3521
3672
|
has a URI format). When set to `GIT`, the Python file will be retrieved from a Git repository
|
|
3522
3673
|
defined in `git_source`.
|
|
3523
3674
|
|
|
3524
|
-
* `WORKSPACE`: The Python file is located in a
|
|
3675
|
+
* `WORKSPACE`: The Python file is located in a Databricks workspace or at a cloud filesystem
|
|
3525
3676
|
URI. * `GIT`: The Python file is located in a remote Git repository."""
|
|
3526
3677
|
|
|
3527
3678
|
def as_dict(self) -> dict:
|
|
@@ -3545,9 +3696,9 @@ class SparkSubmitTask:
|
|
|
3545
3696
|
parameters: Optional[List[str]] = None
|
|
3546
3697
|
"""Command-line parameters passed to spark submit.
|
|
3547
3698
|
|
|
3548
|
-
Use [
|
|
3699
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
3549
3700
|
|
|
3550
|
-
[
|
|
3701
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
3551
3702
|
|
|
3552
3703
|
def as_dict(self) -> dict:
|
|
3553
3704
|
"""Serializes the SparkSubmitTask into a dictionary suitable for use as a JSON request body."""
|
|
@@ -3683,7 +3834,6 @@ class SqlDashboardWidgetOutput:
|
|
|
3683
3834
|
|
|
3684
3835
|
|
|
3685
3836
|
class SqlDashboardWidgetOutputStatus(Enum):
|
|
3686
|
-
"""The execution status of the SQL widget."""
|
|
3687
3837
|
|
|
3688
3838
|
CANCELLED = 'CANCELLED'
|
|
3689
3839
|
FAILED = 'FAILED'
|
|
@@ -3738,6 +3888,8 @@ class SqlOutputError:
|
|
|
3738
3888
|
|
|
3739
3889
|
@dataclass
|
|
3740
3890
|
class SqlQueryOutput:
|
|
3891
|
+
endpoint_id: Optional[str] = None
|
|
3892
|
+
|
|
3741
3893
|
output_link: Optional[str] = None
|
|
3742
3894
|
"""The link to find the output results."""
|
|
3743
3895
|
|
|
@@ -3753,6 +3905,7 @@ class SqlQueryOutput:
|
|
|
3753
3905
|
def as_dict(self) -> dict:
|
|
3754
3906
|
"""Serializes the SqlQueryOutput into a dictionary suitable for use as a JSON request body."""
|
|
3755
3907
|
body = {}
|
|
3908
|
+
if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id
|
|
3756
3909
|
if self.output_link is not None: body['output_link'] = self.output_link
|
|
3757
3910
|
if self.query_text is not None: body['query_text'] = self.query_text
|
|
3758
3911
|
if self.sql_statements: body['sql_statements'] = [v.as_dict() for v in self.sql_statements]
|
|
@@ -3762,7 +3915,8 @@ class SqlQueryOutput:
|
|
|
3762
3915
|
@classmethod
|
|
3763
3916
|
def from_dict(cls, d: Dict[str, any]) -> SqlQueryOutput:
|
|
3764
3917
|
"""Deserializes the SqlQueryOutput from a dictionary."""
|
|
3765
|
-
return cls(
|
|
3918
|
+
return cls(endpoint_id=d.get('endpoint_id', None),
|
|
3919
|
+
output_link=d.get('output_link', None),
|
|
3766
3920
|
query_text=d.get('query_text', None),
|
|
3767
3921
|
sql_statements=_repeated_dict(d, 'sql_statements', SqlStatementOutput),
|
|
3768
3922
|
warehouse_id=d.get('warehouse_id', None))
|
|
@@ -3899,11 +4053,11 @@ class SqlTaskFile:
|
|
|
3899
4053
|
|
|
3900
4054
|
source: Optional[Source] = None
|
|
3901
4055
|
"""Optional location type of the SQL file. When set to `WORKSPACE`, the SQL file will be retrieved
|
|
3902
|
-
from the local
|
|
4056
|
+
from the local Databricks workspace. When set to `GIT`, the SQL file will be retrieved from a
|
|
3903
4057
|
Git repository defined in `git_source`. If the value is empty, the task will use `GIT` if
|
|
3904
4058
|
`git_source` is defined and `WORKSPACE` otherwise.
|
|
3905
4059
|
|
|
3906
|
-
* `WORKSPACE`: SQL file is located in
|
|
4060
|
+
* `WORKSPACE`: SQL file is located in Databricks workspace. * `GIT`: SQL file is located in
|
|
3907
4061
|
cloud Git provider."""
|
|
3908
4062
|
|
|
3909
4063
|
def as_dict(self) -> dict:
|
|
@@ -3965,6 +4119,15 @@ class SubmitRun:
|
|
|
3965
4119
|
access_control_list: Optional[List[iam.AccessControlRequest]] = None
|
|
3966
4120
|
"""List of permissions to set on the job."""
|
|
3967
4121
|
|
|
4122
|
+
condition_task: Optional[ConditionTask] = None
|
|
4123
|
+
"""If condition_task, specifies a condition with an outcome that can be used to control the
|
|
4124
|
+
execution of other tasks. Does not require a cluster to execute and does not support retries or
|
|
4125
|
+
notifications."""
|
|
4126
|
+
|
|
4127
|
+
dbt_task: Optional[DbtTask] = None
|
|
4128
|
+
"""If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and
|
|
4129
|
+
the ability to use a serverless or a pro SQL warehouse."""
|
|
4130
|
+
|
|
3968
4131
|
email_notifications: Optional[JobEmailNotifications] = None
|
|
3969
4132
|
"""An optional set of email addresses notified when the run begins or completes."""
|
|
3970
4133
|
|
|
@@ -3995,16 +4158,55 @@ class SubmitRun:
|
|
|
3995
4158
|
|
|
3996
4159
|
[How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html"""
|
|
3997
4160
|
|
|
4161
|
+
notebook_task: Optional[NotebookTask] = None
|
|
4162
|
+
"""If notebook_task, indicates that this task must run a notebook. This field may not be specified
|
|
4163
|
+
in conjunction with spark_jar_task."""
|
|
4164
|
+
|
|
3998
4165
|
notification_settings: Optional[JobNotificationSettings] = None
|
|
3999
4166
|
"""Optional notification settings that are used when sending notifications to each of the
|
|
4000
4167
|
`email_notifications` and `webhook_notifications` for this run."""
|
|
4001
4168
|
|
|
4169
|
+
pipeline_task: Optional[PipelineTask] = None
|
|
4170
|
+
"""If pipeline_task, indicates that this task must execute a Pipeline."""
|
|
4171
|
+
|
|
4172
|
+
python_wheel_task: Optional[PythonWheelTask] = None
|
|
4173
|
+
"""If python_wheel_task, indicates that this job must execute a PythonWheel."""
|
|
4174
|
+
|
|
4002
4175
|
queue: Optional[QueueSettings] = None
|
|
4003
4176
|
"""The queue settings of the one-time run."""
|
|
4004
4177
|
|
|
4178
|
+
run_job_task: Optional[RunJobTask] = None
|
|
4179
|
+
"""If run_job_task, indicates that this task must execute another job."""
|
|
4180
|
+
|
|
4005
4181
|
run_name: Optional[str] = None
|
|
4006
4182
|
"""An optional name for the run. The default value is `Untitled`."""
|
|
4007
4183
|
|
|
4184
|
+
spark_jar_task: Optional[SparkJarTask] = None
|
|
4185
|
+
"""If spark_jar_task, indicates that this task must run a JAR."""
|
|
4186
|
+
|
|
4187
|
+
spark_python_task: Optional[SparkPythonTask] = None
|
|
4188
|
+
"""If spark_python_task, indicates that this task must run a Python file."""
|
|
4189
|
+
|
|
4190
|
+
spark_submit_task: Optional[SparkSubmitTask] = None
|
|
4191
|
+
"""If `spark_submit_task`, indicates that this task must be launched by the spark submit script.
|
|
4192
|
+
This task can run only on new clusters.
|
|
4193
|
+
|
|
4194
|
+
In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
|
|
4195
|
+
`--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
|
|
4196
|
+
configurations.
|
|
4197
|
+
|
|
4198
|
+
`master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you
|
|
4199
|
+
_cannot_ specify them in parameters.
|
|
4200
|
+
|
|
4201
|
+
By default, the Spark submit job uses all available memory (excluding reserved memory for
|
|
4202
|
+
Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value
|
|
4203
|
+
to leave some room for off-heap usage.
|
|
4204
|
+
|
|
4205
|
+
The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
|
|
4206
|
+
|
|
4207
|
+
sql_task: Optional[SqlTask] = None
|
|
4208
|
+
"""If sql_task, indicates that this job must execute a SQL task."""
|
|
4209
|
+
|
|
4008
4210
|
tasks: Optional[List[SubmitTask]] = None
|
|
4009
4211
|
|
|
4010
4212
|
timeout_seconds: Optional[int] = None
|
|
@@ -4018,13 +4220,23 @@ class SubmitRun:
|
|
|
4018
4220
|
body = {}
|
|
4019
4221
|
if self.access_control_list:
|
|
4020
4222
|
body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
|
|
4223
|
+
if self.condition_task: body['condition_task'] = self.condition_task.as_dict()
|
|
4224
|
+
if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
|
|
4021
4225
|
if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
|
|
4022
4226
|
if self.git_source: body['git_source'] = self.git_source.as_dict()
|
|
4023
4227
|
if self.health: body['health'] = self.health.as_dict()
|
|
4024
4228
|
if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token
|
|
4229
|
+
if self.notebook_task: body['notebook_task'] = self.notebook_task.as_dict()
|
|
4025
4230
|
if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict()
|
|
4231
|
+
if self.pipeline_task: body['pipeline_task'] = self.pipeline_task.as_dict()
|
|
4232
|
+
if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict()
|
|
4026
4233
|
if self.queue: body['queue'] = self.queue.as_dict()
|
|
4234
|
+
if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict()
|
|
4027
4235
|
if self.run_name is not None: body['run_name'] = self.run_name
|
|
4236
|
+
if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict()
|
|
4237
|
+
if self.spark_python_task: body['spark_python_task'] = self.spark_python_task.as_dict()
|
|
4238
|
+
if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task.as_dict()
|
|
4239
|
+
if self.sql_task: body['sql_task'] = self.sql_task.as_dict()
|
|
4028
4240
|
if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks]
|
|
4029
4241
|
if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
|
|
4030
4242
|
if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
|
|
@@ -4034,13 +4246,23 @@ class SubmitRun:
|
|
|
4034
4246
|
def from_dict(cls, d: Dict[str, any]) -> SubmitRun:
|
|
4035
4247
|
"""Deserializes the SubmitRun from a dictionary."""
|
|
4036
4248
|
return cls(access_control_list=_repeated_dict(d, 'access_control_list', iam.AccessControlRequest),
|
|
4249
|
+
condition_task=_from_dict(d, 'condition_task', ConditionTask),
|
|
4250
|
+
dbt_task=_from_dict(d, 'dbt_task', DbtTask),
|
|
4037
4251
|
email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
|
|
4038
4252
|
git_source=_from_dict(d, 'git_source', GitSource),
|
|
4039
4253
|
health=_from_dict(d, 'health', JobsHealthRules),
|
|
4040
4254
|
idempotency_token=d.get('idempotency_token', None),
|
|
4255
|
+
notebook_task=_from_dict(d, 'notebook_task', NotebookTask),
|
|
4041
4256
|
notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings),
|
|
4257
|
+
pipeline_task=_from_dict(d, 'pipeline_task', PipelineTask),
|
|
4258
|
+
python_wheel_task=_from_dict(d, 'python_wheel_task', PythonWheelTask),
|
|
4042
4259
|
queue=_from_dict(d, 'queue', QueueSettings),
|
|
4260
|
+
run_job_task=_from_dict(d, 'run_job_task', RunJobTask),
|
|
4043
4261
|
run_name=d.get('run_name', None),
|
|
4262
|
+
spark_jar_task=_from_dict(d, 'spark_jar_task', SparkJarTask),
|
|
4263
|
+
spark_python_task=_from_dict(d, 'spark_python_task', SparkPythonTask),
|
|
4264
|
+
spark_submit_task=_from_dict(d, 'spark_submit_task', SparkSubmitTask),
|
|
4265
|
+
sql_task=_from_dict(d, 'sql_task', SqlTask),
|
|
4044
4266
|
tasks=_repeated_dict(d, 'tasks', SubmitTask),
|
|
4045
4267
|
timeout_seconds=d.get('timeout_seconds', None),
|
|
4046
4268
|
webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications))
|
|
@@ -4048,6 +4270,8 @@ class SubmitRun:
|
|
|
4048
4270
|
|
|
4049
4271
|
@dataclass
|
|
4050
4272
|
class SubmitRunResponse:
|
|
4273
|
+
"""Run was created and started successfully."""
|
|
4274
|
+
|
|
4051
4275
|
run_id: Optional[int] = None
|
|
4052
4276
|
"""The canonical identifier for the newly submitted run."""
|
|
4053
4277
|
|
|
@@ -4080,29 +4304,30 @@ class SubmitTask:
|
|
|
4080
4304
|
this field must complete successfully before executing this task. The key is `task_key`, and the
|
|
4081
4305
|
value is the name assigned to the dependent task."""
|
|
4082
4306
|
|
|
4307
|
+
description: Optional[str] = None
|
|
4308
|
+
"""An optional description for this task."""
|
|
4309
|
+
|
|
4083
4310
|
email_notifications: Optional[JobEmailNotifications] = None
|
|
4084
4311
|
"""An optional set of email addresses notified when the task run begins or completes. The default
|
|
4085
4312
|
behavior is to not send any emails."""
|
|
4086
4313
|
|
|
4087
4314
|
existing_cluster_id: Optional[str] = None
|
|
4088
|
-
"""If existing_cluster_id, the ID of an existing cluster that is used for all runs
|
|
4089
|
-
|
|
4090
|
-
|
|
4091
|
-
for greater reliability."""
|
|
4315
|
+
"""If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running
|
|
4316
|
+
jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops
|
|
4317
|
+
responding. We suggest running jobs and tasks on new clusters for greater reliability"""
|
|
4092
4318
|
|
|
4093
4319
|
for_each_task: Optional[ForEachTask] = None
|
|
4094
|
-
"""If for_each_task, indicates that this must execute the nested task within it
|
|
4095
|
-
provided."""
|
|
4320
|
+
"""If for_each_task, indicates that this task must execute the nested task within it."""
|
|
4096
4321
|
|
|
4097
4322
|
health: Optional[JobsHealthRules] = None
|
|
4098
4323
|
"""An optional set of health rules that can be defined for this job."""
|
|
4099
4324
|
|
|
4100
4325
|
libraries: Optional[List[compute.Library]] = None
|
|
4101
|
-
"""An optional list of libraries to be installed on the cluster
|
|
4102
|
-
|
|
4326
|
+
"""An optional list of libraries to be installed on the cluster. The default value is an empty
|
|
4327
|
+
list."""
|
|
4103
4328
|
|
|
4104
4329
|
new_cluster: Optional[compute.ClusterSpec] = None
|
|
4105
|
-
"""If new_cluster, a description of a cluster that is created for each run."""
|
|
4330
|
+
"""If new_cluster, a description of a new cluster that is created for each run."""
|
|
4106
4331
|
|
|
4107
4332
|
notebook_task: Optional[NotebookTask] = None
|
|
4108
4333
|
"""If notebook_task, indicates that this task must run a notebook. This field may not be specified
|
|
@@ -4124,7 +4349,7 @@ class SubmitTask:
|
|
|
4124
4349
|
:method:jobs/create for a list of possible values."""
|
|
4125
4350
|
|
|
4126
4351
|
run_job_task: Optional[RunJobTask] = None
|
|
4127
|
-
"""If run_job_task, indicates that this
|
|
4352
|
+
"""If run_job_task, indicates that this task must execute another job."""
|
|
4128
4353
|
|
|
4129
4354
|
spark_jar_task: Optional[SparkJarTask] = None
|
|
4130
4355
|
"""If spark_jar_task, indicates that this task must run a JAR."""
|
|
@@ -4150,7 +4375,7 @@ class SubmitTask:
|
|
|
4150
4375
|
The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
|
|
4151
4376
|
|
|
4152
4377
|
sql_task: Optional[SqlTask] = None
|
|
4153
|
-
"""If sql_task, indicates that this job must execute a SQL."""
|
|
4378
|
+
"""If sql_task, indicates that this job must execute a SQL task."""
|
|
4154
4379
|
|
|
4155
4380
|
timeout_seconds: Optional[int] = None
|
|
4156
4381
|
"""An optional timeout applied to each run of this job task. A value of `0` means no timeout."""
|
|
@@ -4165,6 +4390,7 @@ class SubmitTask:
|
|
|
4165
4390
|
body = {}
|
|
4166
4391
|
if self.condition_task: body['condition_task'] = self.condition_task.as_dict()
|
|
4167
4392
|
if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on]
|
|
4393
|
+
if self.description is not None: body['description'] = self.description
|
|
4168
4394
|
if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
|
|
4169
4395
|
if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
|
|
4170
4396
|
if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict()
|
|
@@ -4191,6 +4417,7 @@ class SubmitTask:
|
|
|
4191
4417
|
"""Deserializes the SubmitTask from a dictionary."""
|
|
4192
4418
|
return cls(condition_task=_from_dict(d, 'condition_task', ConditionTask),
|
|
4193
4419
|
depends_on=_repeated_dict(d, 'depends_on', TaskDependency),
|
|
4420
|
+
description=d.get('description', None),
|
|
4194
4421
|
email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
|
|
4195
4422
|
existing_cluster_id=d.get('existing_cluster_id', None),
|
|
4196
4423
|
for_each_task=_from_dict(d, 'for_each_task', ForEachTask),
|
|
@@ -4213,7 +4440,7 @@ class SubmitTask:
|
|
|
4213
4440
|
|
|
4214
4441
|
|
|
4215
4442
|
@dataclass
|
|
4216
|
-
class
|
|
4443
|
+
class TableUpdateTriggerConfiguration:
|
|
4217
4444
|
condition: Optional[Condition] = None
|
|
4218
4445
|
"""The table(s) condition based on which to trigger a job run."""
|
|
4219
4446
|
|
|
@@ -4231,7 +4458,7 @@ class TableTriggerConfiguration:
|
|
|
4231
4458
|
allowed value is 60 seconds."""
|
|
4232
4459
|
|
|
4233
4460
|
def as_dict(self) -> dict:
|
|
4234
|
-
"""Serializes the
|
|
4461
|
+
"""Serializes the TableUpdateTriggerConfiguration into a dictionary suitable for use as a JSON request body."""
|
|
4235
4462
|
body = {}
|
|
4236
4463
|
if self.condition is not None: body['condition'] = self.condition.value
|
|
4237
4464
|
if self.min_time_between_triggers_seconds is not None:
|
|
@@ -4242,8 +4469,8 @@ class TableTriggerConfiguration:
|
|
|
4242
4469
|
return body
|
|
4243
4470
|
|
|
4244
4471
|
@classmethod
|
|
4245
|
-
def from_dict(cls, d: Dict[str, any]) ->
|
|
4246
|
-
"""Deserializes the
|
|
4472
|
+
def from_dict(cls, d: Dict[str, any]) -> TableUpdateTriggerConfiguration:
|
|
4473
|
+
"""Deserializes the TableUpdateTriggerConfiguration from a dictionary."""
|
|
4247
4474
|
return cls(condition=_enum(d, 'condition', Condition),
|
|
4248
4475
|
min_time_between_triggers_seconds=d.get('min_time_between_triggers_seconds', None),
|
|
4249
4476
|
table_names=d.get('table_names', None),
|
|
@@ -4257,10 +4484,6 @@ class Task:
|
|
|
4257
4484
|
field is required and must be unique within its parent job. On Update or Reset, this field is
|
|
4258
4485
|
used to reference the tasks to be updated or reset."""
|
|
4259
4486
|
|
|
4260
|
-
compute_key: Optional[str] = None
|
|
4261
|
-
"""The key of the compute requirement, specified in `job.settings.compute`, to use for execution of
|
|
4262
|
-
this task."""
|
|
4263
|
-
|
|
4264
4487
|
condition_task: Optional[ConditionTask] = None
|
|
4265
4488
|
"""If condition_task, specifies a condition with an outcome that can be used to control the
|
|
4266
4489
|
execution of other tasks. Does not require a cluster to execute and does not support retries or
|
|
@@ -4279,19 +4502,24 @@ class Task:
|
|
|
4279
4502
|
description: Optional[str] = None
|
|
4280
4503
|
"""An optional description for this task."""
|
|
4281
4504
|
|
|
4505
|
+
disable_auto_optimization: Optional[bool] = None
|
|
4506
|
+
"""An option to disable auto optimization in serverless"""
|
|
4507
|
+
|
|
4282
4508
|
email_notifications: Optional[TaskEmailNotifications] = None
|
|
4283
4509
|
"""An optional set of email addresses that is notified when runs of this task begin or complete as
|
|
4284
4510
|
well as when this task is deleted. The default behavior is to not send any emails."""
|
|
4285
4511
|
|
|
4512
|
+
environment_key: Optional[str] = None
|
|
4513
|
+
"""The key that references an environment spec in a job. This field is required for Python script,
|
|
4514
|
+
Python wheel and dbt tasks when using serverless compute."""
|
|
4515
|
+
|
|
4286
4516
|
existing_cluster_id: Optional[str] = None
|
|
4287
|
-
"""If existing_cluster_id, the ID of an existing cluster that is used for all runs
|
|
4288
|
-
|
|
4289
|
-
|
|
4290
|
-
for greater reliability."""
|
|
4517
|
+
"""If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running
|
|
4518
|
+
jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops
|
|
4519
|
+
responding. We suggest running jobs and tasks on new clusters for greater reliability"""
|
|
4291
4520
|
|
|
4292
4521
|
for_each_task: Optional[ForEachTask] = None
|
|
4293
|
-
"""If for_each_task, indicates that this must execute the nested task within it
|
|
4294
|
-
provided."""
|
|
4522
|
+
"""If for_each_task, indicates that this task must execute the nested task within it."""
|
|
4295
4523
|
|
|
4296
4524
|
health: Optional[JobsHealthRules] = None
|
|
4297
4525
|
"""An optional set of health rules that can be defined for this job."""
|
|
@@ -4301,8 +4529,8 @@ class Task:
|
|
|
4301
4529
|
`job.settings.job_clusters`."""
|
|
4302
4530
|
|
|
4303
4531
|
libraries: Optional[List[compute.Library]] = None
|
|
4304
|
-
"""An optional list of libraries to be installed on the cluster
|
|
4305
|
-
|
|
4532
|
+
"""An optional list of libraries to be installed on the cluster. The default value is an empty
|
|
4533
|
+
list."""
|
|
4306
4534
|
|
|
4307
4535
|
max_retries: Optional[int] = None
|
|
4308
4536
|
"""An optional maximum number of times to retry an unsuccessful run. A run is considered to be
|
|
@@ -4315,7 +4543,7 @@ class Task:
|
|
|
4315
4543
|
subsequent retry run. The default behavior is that unsuccessful runs are immediately retried."""
|
|
4316
4544
|
|
|
4317
4545
|
new_cluster: Optional[compute.ClusterSpec] = None
|
|
4318
|
-
"""If new_cluster, a description of a cluster that is created for
|
|
4546
|
+
"""If new_cluster, a description of a new cluster that is created for each run."""
|
|
4319
4547
|
|
|
4320
4548
|
notebook_task: Optional[NotebookTask] = None
|
|
4321
4549
|
"""If notebook_task, indicates that this task must run a notebook. This field may not be specified
|
|
@@ -4332,7 +4560,8 @@ class Task:
|
|
|
4332
4560
|
"""If python_wheel_task, indicates that this job must execute a PythonWheel."""
|
|
4333
4561
|
|
|
4334
4562
|
retry_on_timeout: Optional[bool] = None
|
|
4335
|
-
"""An optional policy to specify whether to retry a
|
|
4563
|
+
"""An optional policy to specify whether to retry a job when it times out. The default behavior is
|
|
4564
|
+
to not retry on timeout."""
|
|
4336
4565
|
|
|
4337
4566
|
run_if: Optional[RunIf] = None
|
|
4338
4567
|
"""An optional value specifying the condition determining whether the task is run once its
|
|
@@ -4382,12 +4611,14 @@ class Task:
|
|
|
4382
4611
|
def as_dict(self) -> dict:
|
|
4383
4612
|
"""Serializes the Task into a dictionary suitable for use as a JSON request body."""
|
|
4384
4613
|
body = {}
|
|
4385
|
-
if self.compute_key is not None: body['compute_key'] = self.compute_key
|
|
4386
4614
|
if self.condition_task: body['condition_task'] = self.condition_task.as_dict()
|
|
4387
4615
|
if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
|
|
4388
4616
|
if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on]
|
|
4389
4617
|
if self.description is not None: body['description'] = self.description
|
|
4618
|
+
if self.disable_auto_optimization is not None:
|
|
4619
|
+
body['disable_auto_optimization'] = self.disable_auto_optimization
|
|
4390
4620
|
if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
|
|
4621
|
+
if self.environment_key is not None: body['environment_key'] = self.environment_key
|
|
4391
4622
|
if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
|
|
4392
4623
|
if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict()
|
|
4393
4624
|
if self.health: body['health'] = self.health.as_dict()
|
|
@@ -4416,12 +4647,13 @@ class Task:
|
|
|
4416
4647
|
@classmethod
|
|
4417
4648
|
def from_dict(cls, d: Dict[str, any]) -> Task:
|
|
4418
4649
|
"""Deserializes the Task from a dictionary."""
|
|
4419
|
-
return cls(
|
|
4420
|
-
condition_task=_from_dict(d, 'condition_task', ConditionTask),
|
|
4650
|
+
return cls(condition_task=_from_dict(d, 'condition_task', ConditionTask),
|
|
4421
4651
|
dbt_task=_from_dict(d, 'dbt_task', DbtTask),
|
|
4422
4652
|
depends_on=_repeated_dict(d, 'depends_on', TaskDependency),
|
|
4423
4653
|
description=d.get('description', None),
|
|
4654
|
+
disable_auto_optimization=d.get('disable_auto_optimization', None),
|
|
4424
4655
|
email_notifications=_from_dict(d, 'email_notifications', TaskEmailNotifications),
|
|
4656
|
+
environment_key=d.get('environment_key', None),
|
|
4425
4657
|
existing_cluster_id=d.get('existing_cluster_id', None),
|
|
4426
4658
|
for_each_task=_from_dict(d, 'for_each_task', ForEachTask),
|
|
4427
4659
|
health=_from_dict(d, 'health', JobsHealthRules),
|
|
@@ -4470,6 +4702,9 @@ class TaskDependency:
|
|
|
4470
4702
|
|
|
4471
4703
|
@dataclass
|
|
4472
4704
|
class TaskEmailNotifications:
|
|
4705
|
+
no_alert_for_skipped_runs: Optional[bool] = None
|
|
4706
|
+
"""If true, do not send email to recipients specified in `on_failure` if the run is skipped."""
|
|
4707
|
+
|
|
4473
4708
|
on_duration_warning_threshold_exceeded: Optional[List[str]] = None
|
|
4474
4709
|
"""A list of email addresses to be notified when the duration of a run exceeds the threshold
|
|
4475
4710
|
specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the
|
|
@@ -4495,6 +4730,8 @@ class TaskEmailNotifications:
|
|
|
4495
4730
|
def as_dict(self) -> dict:
|
|
4496
4731
|
"""Serializes the TaskEmailNotifications into a dictionary suitable for use as a JSON request body."""
|
|
4497
4732
|
body = {}
|
|
4733
|
+
if self.no_alert_for_skipped_runs is not None:
|
|
4734
|
+
body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
|
|
4498
4735
|
if self.on_duration_warning_threshold_exceeded:
|
|
4499
4736
|
body['on_duration_warning_threshold_exceeded'] = [
|
|
4500
4737
|
v for v in self.on_duration_warning_threshold_exceeded
|
|
@@ -4507,7 +4744,8 @@ class TaskEmailNotifications:
|
|
|
4507
4744
|
@classmethod
|
|
4508
4745
|
def from_dict(cls, d: Dict[str, any]) -> TaskEmailNotifications:
|
|
4509
4746
|
"""Deserializes the TaskEmailNotifications from a dictionary."""
|
|
4510
|
-
return cls(
|
|
4747
|
+
return cls(no_alert_for_skipped_runs=d.get('no_alert_for_skipped_runs', None),
|
|
4748
|
+
on_duration_warning_threshold_exceeded=d.get('on_duration_warning_threshold_exceeded',
|
|
4511
4749
|
None),
|
|
4512
4750
|
on_failure=d.get('on_failure', None),
|
|
4513
4751
|
on_start=d.get('on_start', None),
|
|
@@ -4549,6 +4787,8 @@ class TaskNotificationSettings:
|
|
|
4549
4787
|
|
|
4550
4788
|
@dataclass
|
|
4551
4789
|
class TriggerInfo:
|
|
4790
|
+
"""Additional details about what triggered the run"""
|
|
4791
|
+
|
|
4552
4792
|
run_id: Optional[int] = None
|
|
4553
4793
|
"""The run id of the Run Job task run"""
|
|
4554
4794
|
|
|
@@ -4572,8 +4812,10 @@ class TriggerSettings:
|
|
|
4572
4812
|
pause_status: Optional[PauseStatus] = None
|
|
4573
4813
|
"""Whether this trigger is paused or not."""
|
|
4574
4814
|
|
|
4575
|
-
table: Optional[
|
|
4576
|
-
"""
|
|
4815
|
+
table: Optional[TableUpdateTriggerConfiguration] = None
|
|
4816
|
+
"""Old table trigger settings name. Deprecated in favor of `table_update`."""
|
|
4817
|
+
|
|
4818
|
+
table_update: Optional[TableUpdateTriggerConfiguration] = None
|
|
4577
4819
|
|
|
4578
4820
|
def as_dict(self) -> dict:
|
|
4579
4821
|
"""Serializes the TriggerSettings into a dictionary suitable for use as a JSON request body."""
|
|
@@ -4581,6 +4823,7 @@ class TriggerSettings:
|
|
|
4581
4823
|
if self.file_arrival: body['file_arrival'] = self.file_arrival.as_dict()
|
|
4582
4824
|
if self.pause_status is not None: body['pause_status'] = self.pause_status.value
|
|
4583
4825
|
if self.table: body['table'] = self.table.as_dict()
|
|
4826
|
+
if self.table_update: body['table_update'] = self.table_update.as_dict()
|
|
4584
4827
|
return body
|
|
4585
4828
|
|
|
4586
4829
|
@classmethod
|
|
@@ -4588,7 +4831,8 @@ class TriggerSettings:
|
|
|
4588
4831
|
"""Deserializes the TriggerSettings from a dictionary."""
|
|
4589
4832
|
return cls(file_arrival=_from_dict(d, 'file_arrival', FileArrivalTriggerConfiguration),
|
|
4590
4833
|
pause_status=_enum(d, 'pause_status', PauseStatus),
|
|
4591
|
-
table=_from_dict(d, 'table',
|
|
4834
|
+
table=_from_dict(d, 'table', TableUpdateTriggerConfiguration),
|
|
4835
|
+
table_update=_from_dict(d, 'table_update', TableUpdateTriggerConfiguration))
|
|
4592
4836
|
|
|
4593
4837
|
|
|
4594
4838
|
class TriggerType(Enum):
|
|
@@ -4705,7 +4949,7 @@ class ViewsToExport(Enum):
|
|
|
4705
4949
|
|
|
4706
4950
|
@dataclass
|
|
4707
4951
|
class Webhook:
|
|
4708
|
-
id:
|
|
4952
|
+
id: str
|
|
4709
4953
|
|
|
4710
4954
|
def as_dict(self) -> dict:
|
|
4711
4955
|
"""Serializes the Webhook into a dictionary suitable for use as a JSON request body."""
|
|
@@ -4721,8 +4965,7 @@ class Webhook:
|
|
|
4721
4965
|
|
|
4722
4966
|
@dataclass
|
|
4723
4967
|
class WebhookNotifications:
|
|
4724
|
-
on_duration_warning_threshold_exceeded: Optional[
|
|
4725
|
-
List[WebhookNotificationsOnDurationWarningThresholdExceededItem]] = None
|
|
4968
|
+
on_duration_warning_threshold_exceeded: Optional[List[Webhook]] = None
|
|
4726
4969
|
"""An optional list of system notification IDs to call when the duration of a run exceeds the
|
|
4727
4970
|
threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. A maximum of 3
|
|
4728
4971
|
destinations can be specified for the `on_duration_warning_threshold_exceeded` property."""
|
|
@@ -4755,29 +4998,12 @@ class WebhookNotifications:
|
|
|
4755
4998
|
def from_dict(cls, d: Dict[str, any]) -> WebhookNotifications:
|
|
4756
4999
|
"""Deserializes the WebhookNotifications from a dictionary."""
|
|
4757
5000
|
return cls(on_duration_warning_threshold_exceeded=_repeated_dict(
|
|
4758
|
-
d, 'on_duration_warning_threshold_exceeded',
|
|
4759
|
-
WebhookNotificationsOnDurationWarningThresholdExceededItem),
|
|
5001
|
+
d, 'on_duration_warning_threshold_exceeded', Webhook),
|
|
4760
5002
|
on_failure=_repeated_dict(d, 'on_failure', Webhook),
|
|
4761
5003
|
on_start=_repeated_dict(d, 'on_start', Webhook),
|
|
4762
5004
|
on_success=_repeated_dict(d, 'on_success', Webhook))
|
|
4763
5005
|
|
|
4764
5006
|
|
|
4765
|
-
@dataclass
|
|
4766
|
-
class WebhookNotificationsOnDurationWarningThresholdExceededItem:
|
|
4767
|
-
id: Optional[str] = None
|
|
4768
|
-
|
|
4769
|
-
def as_dict(self) -> dict:
|
|
4770
|
-
"""Serializes the WebhookNotificationsOnDurationWarningThresholdExceededItem into a dictionary suitable for use as a JSON request body."""
|
|
4771
|
-
body = {}
|
|
4772
|
-
if self.id is not None: body['id'] = self.id
|
|
4773
|
-
return body
|
|
4774
|
-
|
|
4775
|
-
@classmethod
|
|
4776
|
-
def from_dict(cls, d: Dict[str, any]) -> WebhookNotificationsOnDurationWarningThresholdExceededItem:
|
|
4777
|
-
"""Deserializes the WebhookNotificationsOnDurationWarningThresholdExceededItem from a dictionary."""
|
|
4778
|
-
return cls(id=d.get('id', None))
|
|
4779
|
-
|
|
4780
|
-
|
|
4781
5007
|
class JobsAPI:
|
|
4782
5008
|
"""The Jobs API allows you to create, edit, and delete jobs.
|
|
4783
5009
|
|
|
@@ -4879,12 +5105,12 @@ class JobsAPI:
|
|
|
4879
5105
|
def create(self,
|
|
4880
5106
|
*,
|
|
4881
5107
|
access_control_list: Optional[List[iam.AccessControlRequest]] = None,
|
|
4882
|
-
compute: Optional[List[JobCompute]] = None,
|
|
4883
5108
|
continuous: Optional[Continuous] = None,
|
|
4884
5109
|
deployment: Optional[JobDeployment] = None,
|
|
4885
5110
|
description: Optional[str] = None,
|
|
4886
|
-
edit_mode: Optional[
|
|
5111
|
+
edit_mode: Optional[JobEditMode] = None,
|
|
4887
5112
|
email_notifications: Optional[JobEmailNotifications] = None,
|
|
5113
|
+
environments: Optional[List[JobEnvironment]] = None,
|
|
4888
5114
|
format: Optional[Format] = None,
|
|
4889
5115
|
git_source: Optional[GitSource] = None,
|
|
4890
5116
|
health: Optional[JobsHealthRules] = None,
|
|
@@ -4907,8 +5133,6 @@ class JobsAPI:
|
|
|
4907
5133
|
|
|
4908
5134
|
:param access_control_list: List[:class:`AccessControlRequest`] (optional)
|
|
4909
5135
|
List of permissions to set on the job.
|
|
4910
|
-
:param compute: List[:class:`JobCompute`] (optional)
|
|
4911
|
-
A list of compute requirements that can be referenced by tasks of this job.
|
|
4912
5136
|
:param continuous: :class:`Continuous` (optional)
|
|
4913
5137
|
An optional continuous property for this job. The continuous property will ensure that there is
|
|
4914
5138
|
always one run executing. Only one of `schedule` and `continuous` can be used.
|
|
@@ -4916,7 +5140,7 @@ class JobsAPI:
|
|
|
4916
5140
|
Deployment information for jobs managed by external sources.
|
|
4917
5141
|
:param description: str (optional)
|
|
4918
5142
|
An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding.
|
|
4919
|
-
:param edit_mode: :class:`
|
|
5143
|
+
:param edit_mode: :class:`JobEditMode` (optional)
|
|
4920
5144
|
Edit mode of the job.
|
|
4921
5145
|
|
|
4922
5146
|
* `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in
|
|
@@ -4924,6 +5148,8 @@ class JobsAPI:
|
|
|
4924
5148
|
:param email_notifications: :class:`JobEmailNotifications` (optional)
|
|
4925
5149
|
An optional set of email addresses that is notified when runs of this job begin or complete as well
|
|
4926
5150
|
as when this job is deleted.
|
|
5151
|
+
:param environments: List[:class:`JobEnvironment`] (optional)
|
|
5152
|
+
A list of task execution environment specifications that can be referenced by tasks of this job.
|
|
4927
5153
|
:param format: :class:`Format` (optional)
|
|
4928
5154
|
Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When
|
|
4929
5155
|
using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`.
|
|
@@ -4942,18 +5168,14 @@ class JobsAPI:
|
|
|
4942
5168
|
A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries
|
|
4943
5169
|
cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.
|
|
4944
5170
|
:param max_concurrent_runs: int (optional)
|
|
4945
|
-
An optional maximum allowed number of concurrent runs of the job.
|
|
4946
|
-
|
|
4947
|
-
|
|
4948
|
-
|
|
4949
|
-
|
|
4950
|
-
|
|
4951
|
-
|
|
4952
|
-
|
|
4953
|
-
4 concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active runs.
|
|
4954
|
-
However, from then on, new runs are skipped unless there are fewer than 3 active runs.
|
|
4955
|
-
|
|
4956
|
-
This value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped.
|
|
5171
|
+
An optional maximum allowed number of concurrent runs of the job. Set this value if you want to be
|
|
5172
|
+
able to execute multiple runs of the same job concurrently. This is useful for example if you
|
|
5173
|
+
trigger your job on a frequent schedule and want to allow consecutive runs to overlap with each
|
|
5174
|
+
other, or if you want to trigger multiple runs which differ by their input parameters. This setting
|
|
5175
|
+
affects only new runs. For example, suppose the job’s concurrency is 4 and there are 4 concurrent
|
|
5176
|
+
active runs. Then setting the concurrency to 3 won’t kill any of the active runs. However, from
|
|
5177
|
+
then on, new runs are skipped unless there are fewer than 3 active runs. This value cannot exceed
|
|
5178
|
+
1000. Setting this value to `0` causes all new runs to be skipped.
|
|
4957
5179
|
:param name: str (optional)
|
|
4958
5180
|
An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding.
|
|
4959
5181
|
:param notification_settings: :class:`JobNotificationSettings` (optional)
|
|
@@ -4993,12 +5215,12 @@ class JobsAPI:
|
|
|
4993
5215
|
body = {}
|
|
4994
5216
|
if access_control_list is not None:
|
|
4995
5217
|
body['access_control_list'] = [v.as_dict() for v in access_control_list]
|
|
4996
|
-
if compute is not None: body['compute'] = [v.as_dict() for v in compute]
|
|
4997
5218
|
if continuous is not None: body['continuous'] = continuous.as_dict()
|
|
4998
5219
|
if deployment is not None: body['deployment'] = deployment.as_dict()
|
|
4999
5220
|
if description is not None: body['description'] = description
|
|
5000
5221
|
if edit_mode is not None: body['edit_mode'] = edit_mode.value
|
|
5001
5222
|
if email_notifications is not None: body['email_notifications'] = email_notifications.as_dict()
|
|
5223
|
+
if environments is not None: body['environments'] = [v.as_dict() for v in environments]
|
|
5002
5224
|
if format is not None: body['format'] = format.value
|
|
5003
5225
|
if git_source is not None: body['git_source'] = git_source.as_dict()
|
|
5004
5226
|
if health is not None: body['health'] = health.as_dict()
|
|
@@ -5042,7 +5264,7 @@ class JobsAPI:
|
|
|
5042
5264
|
Deletes a non-active run. Returns an error if the run is active.
|
|
5043
5265
|
|
|
5044
5266
|
:param run_id: int
|
|
5045
|
-
|
|
5267
|
+
ID of the run to delete.
|
|
5046
5268
|
|
|
5047
5269
|
|
|
5048
5270
|
"""
|
|
@@ -5164,7 +5386,7 @@ class JobsAPI:
|
|
|
5164
5386
|
reference them beyond 60 days, you must save old run results before they expire.
|
|
5165
5387
|
|
|
5166
5388
|
:param run_id: int
|
|
5167
|
-
The canonical identifier for the run.
|
|
5389
|
+
The canonical identifier for the run.
|
|
5168
5390
|
|
|
5169
5391
|
:returns: :class:`RunOutput`
|
|
5170
5392
|
"""
|
|
@@ -5195,9 +5417,8 @@ class JobsAPI:
|
|
|
5195
5417
|
:param name: str (optional)
|
|
5196
5418
|
A filter on the list based on the exact (case insensitive) job name.
|
|
5197
5419
|
:param offset: int (optional)
|
|
5198
|
-
The offset of the first job to return, relative to the most recently created job.
|
|
5199
|
-
|
|
5200
|
-
Deprecated since June 2023. Use `page_token` to iterate through the pages instead.
|
|
5420
|
+
The offset of the first job to return, relative to the most recently created job. Deprecated since
|
|
5421
|
+
June 2023. Use `page_token` to iterate through the pages instead.
|
|
5201
5422
|
:param page_token: str (optional)
|
|
5202
5423
|
Use `next_page_token` or `prev_page_token` returned from the previous request to list the next or
|
|
5203
5424
|
previous page of jobs respectively.
|
|
@@ -5231,7 +5452,7 @@ class JobsAPI:
|
|
|
5231
5452
|
limit: Optional[int] = None,
|
|
5232
5453
|
offset: Optional[int] = None,
|
|
5233
5454
|
page_token: Optional[str] = None,
|
|
5234
|
-
run_type: Optional[
|
|
5455
|
+
run_type: Optional[RunType] = None,
|
|
5235
5456
|
start_time_from: Optional[int] = None,
|
|
5236
5457
|
start_time_to: Optional[int] = None) -> Iterator[BaseRun]:
|
|
5237
5458
|
"""List job runs.
|
|
@@ -5253,13 +5474,12 @@ class JobsAPI:
|
|
|
5253
5474
|
The number of runs to return. This value must be greater than 0 and less than 25. The default value
|
|
5254
5475
|
is 20. If a request specifies a limit of 0, the service instead uses the maximum limit.
|
|
5255
5476
|
:param offset: int (optional)
|
|
5256
|
-
The offset of the first run to return, relative to the most recent run.
|
|
5257
|
-
|
|
5258
|
-
Deprecated since June 2023. Use `page_token` to iterate through the pages instead.
|
|
5477
|
+
The offset of the first run to return, relative to the most recent run. Deprecated since June 2023.
|
|
5478
|
+
Use `page_token` to iterate through the pages instead.
|
|
5259
5479
|
:param page_token: str (optional)
|
|
5260
5480
|
Use `next_page_token` or `prev_page_token` returned from the previous request to list the next or
|
|
5261
5481
|
previous page of runs respectively.
|
|
5262
|
-
:param run_type: :class:`
|
|
5482
|
+
:param run_type: :class:`RunType` (optional)
|
|
5263
5483
|
The type of runs to return. For a description of run types, see :method:jobs/getRun.
|
|
5264
5484
|
:param start_time_from: int (optional)
|
|
5265
5485
|
Show runs that started _at or after_ this value. The value must be a UTC timestamp in milliseconds.
|
|
@@ -5318,7 +5538,7 @@ class JobsAPI:
|
|
|
5318
5538
|
The job run ID of the run to repair. The run must not be in progress.
|
|
5319
5539
|
:param dbt_commands: List[str] (optional)
|
|
5320
5540
|
An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
|
|
5321
|
-
deps", "dbt seed", "dbt run"]`
|
|
5541
|
+
deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
|
|
5322
5542
|
:param jar_params: List[str] (optional)
|
|
5323
5543
|
A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`.
|
|
5324
5544
|
The parameters are used to invoke the main function of the main class specified in the Spark JAR
|
|
@@ -5326,9 +5546,8 @@ class JobsAPI:
|
|
|
5326
5546
|
in conjunction with notebook_params. The JSON representation of this field (for example
|
|
5327
5547
|
`{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
5328
5548
|
|
|
5329
|
-
Use [
|
|
5330
|
-
|
|
5331
|
-
[task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
|
|
5549
|
+
Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
|
|
5550
|
+
information about job runs.
|
|
5332
5551
|
:param job_parameters: Dict[str,str] (optional)
|
|
5333
5552
|
Job-level parameters used in the run. for example `"param": "overriding_val"`
|
|
5334
5553
|
:param latest_repair_id: int (optional)
|
|
@@ -5343,13 +5562,13 @@ class JobsAPI:
|
|
|
5343
5562
|
|
|
5344
5563
|
notebook_params cannot be specified in conjunction with jar_params.
|
|
5345
5564
|
|
|
5346
|
-
Use [
|
|
5565
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
5347
5566
|
|
|
5348
5567
|
The JSON representation of this field (for example `{"notebook_params":{"name":"john
|
|
5349
5568
|
doe","age":"35"}}`) cannot exceed 10,000 bytes.
|
|
5350
5569
|
|
|
5570
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
5351
5571
|
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
|
|
5352
|
-
[task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
|
|
5353
5572
|
:param pipeline_params: :class:`PipelineParams` (optional)
|
|
5354
5573
|
:param python_named_params: Dict[str,str] (optional)
|
|
5355
5574
|
A map from keys to values for jobs with Python wheel task, for example `"python_named_params":
|
|
@@ -5360,7 +5579,7 @@ class JobsAPI:
|
|
|
5360
5579
|
would overwrite the parameters specified in job setting. The JSON representation of this field (for
|
|
5361
5580
|
example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
5362
5581
|
|
|
5363
|
-
Use [
|
|
5582
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
5364
5583
|
|
|
5365
5584
|
Important
|
|
5366
5585
|
|
|
@@ -5368,7 +5587,7 @@ class JobsAPI:
|
|
|
5368
5587
|
returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
|
|
5369
5588
|
emojis.
|
|
5370
5589
|
|
|
5371
|
-
[
|
|
5590
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
5372
5591
|
:param rerun_all_failed_tasks: bool (optional)
|
|
5373
5592
|
If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be used.
|
|
5374
5593
|
:param rerun_dependent_tasks: bool (optional)
|
|
@@ -5383,7 +5602,7 @@ class JobsAPI:
|
|
|
5383
5602
|
in job setting. The JSON representation of this field (for example `{"python_params":["john
|
|
5384
5603
|
doe","35"]}`) cannot exceed 10,000 bytes.
|
|
5385
5604
|
|
|
5386
|
-
Use [
|
|
5605
|
+
Use [Task parameter variables] to set parameters containing information about job runs
|
|
5387
5606
|
|
|
5388
5607
|
Important
|
|
5389
5608
|
|
|
@@ -5391,7 +5610,7 @@ class JobsAPI:
|
|
|
5391
5610
|
returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
|
|
5392
5611
|
emojis.
|
|
5393
5612
|
|
|
5394
|
-
[
|
|
5613
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
5395
5614
|
:param sql_params: Dict[str,str] (optional)
|
|
5396
5615
|
A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
|
|
5397
5616
|
"age": "35"}`. The SQL alert task does not support custom parameters.
|
|
@@ -5500,7 +5719,7 @@ class JobsAPI:
|
|
|
5500
5719
|
The ID of the job to be executed
|
|
5501
5720
|
:param dbt_commands: List[str] (optional)
|
|
5502
5721
|
An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
|
|
5503
|
-
deps", "dbt seed", "dbt run"]`
|
|
5722
|
+
deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
|
|
5504
5723
|
:param idempotency_token: str (optional)
|
|
5505
5724
|
An optional token to guarantee the idempotency of job run requests. If a run with the provided token
|
|
5506
5725
|
already exists, the request does not create a new run but returns the ID of the existing run
|
|
@@ -5521,9 +5740,8 @@ class JobsAPI:
|
|
|
5521
5740
|
in conjunction with notebook_params. The JSON representation of this field (for example
|
|
5522
5741
|
`{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
5523
5742
|
|
|
5524
|
-
Use [
|
|
5525
|
-
|
|
5526
|
-
[task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
|
|
5743
|
+
Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
|
|
5744
|
+
information about job runs.
|
|
5527
5745
|
:param job_parameters: Dict[str,str] (optional)
|
|
5528
5746
|
Job-level parameters used in the run. for example `"param": "overriding_val"`
|
|
5529
5747
|
:param notebook_params: Dict[str,str] (optional)
|
|
@@ -5535,13 +5753,13 @@ class JobsAPI:
|
|
|
5535
5753
|
|
|
5536
5754
|
notebook_params cannot be specified in conjunction with jar_params.
|
|
5537
5755
|
|
|
5538
|
-
Use [
|
|
5756
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
5539
5757
|
|
|
5540
5758
|
The JSON representation of this field (for example `{"notebook_params":{"name":"john
|
|
5541
5759
|
doe","age":"35"}}`) cannot exceed 10,000 bytes.
|
|
5542
5760
|
|
|
5761
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
5543
5762
|
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
|
|
5544
|
-
[task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
|
|
5545
5763
|
:param pipeline_params: :class:`PipelineParams` (optional)
|
|
5546
5764
|
:param python_named_params: Dict[str,str] (optional)
|
|
5547
5765
|
A map from keys to values for jobs with Python wheel task, for example `"python_named_params":
|
|
@@ -5552,7 +5770,7 @@ class JobsAPI:
|
|
|
5552
5770
|
would overwrite the parameters specified in job setting. The JSON representation of this field (for
|
|
5553
5771
|
example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
5554
5772
|
|
|
5555
|
-
Use [
|
|
5773
|
+
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
5556
5774
|
|
|
5557
5775
|
Important
|
|
5558
5776
|
|
|
@@ -5560,7 +5778,7 @@ class JobsAPI:
|
|
|
5560
5778
|
returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
|
|
5561
5779
|
emojis.
|
|
5562
5780
|
|
|
5563
|
-
[
|
|
5781
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
5564
5782
|
:param queue: :class:`QueueSettings` (optional)
|
|
5565
5783
|
The queue settings of the run.
|
|
5566
5784
|
:param spark_submit_params: List[str] (optional)
|
|
@@ -5570,7 +5788,7 @@ class JobsAPI:
|
|
|
5570
5788
|
in job setting. The JSON representation of this field (for example `{"python_params":["john
|
|
5571
5789
|
doe","35"]}`) cannot exceed 10,000 bytes.
|
|
5572
5790
|
|
|
5573
|
-
Use [
|
|
5791
|
+
Use [Task parameter variables] to set parameters containing information about job runs
|
|
5574
5792
|
|
|
5575
5793
|
Important
|
|
5576
5794
|
|
|
@@ -5578,7 +5796,7 @@ class JobsAPI:
|
|
|
5578
5796
|
returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
|
|
5579
5797
|
emojis.
|
|
5580
5798
|
|
|
5581
|
-
[
|
|
5799
|
+
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
5582
5800
|
:param sql_params: Dict[str,str] (optional)
|
|
5583
5801
|
A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
|
|
5584
5802
|
"age": "35"}`. The SQL alert task does not support custom parameters.
|
|
@@ -5661,13 +5879,23 @@ class JobsAPI:
|
|
|
5661
5879
|
def submit(self,
|
|
5662
5880
|
*,
|
|
5663
5881
|
access_control_list: Optional[List[iam.AccessControlRequest]] = None,
|
|
5882
|
+
condition_task: Optional[ConditionTask] = None,
|
|
5883
|
+
dbt_task: Optional[DbtTask] = None,
|
|
5664
5884
|
email_notifications: Optional[JobEmailNotifications] = None,
|
|
5665
5885
|
git_source: Optional[GitSource] = None,
|
|
5666
5886
|
health: Optional[JobsHealthRules] = None,
|
|
5667
5887
|
idempotency_token: Optional[str] = None,
|
|
5888
|
+
notebook_task: Optional[NotebookTask] = None,
|
|
5668
5889
|
notification_settings: Optional[JobNotificationSettings] = None,
|
|
5890
|
+
pipeline_task: Optional[PipelineTask] = None,
|
|
5891
|
+
python_wheel_task: Optional[PythonWheelTask] = None,
|
|
5669
5892
|
queue: Optional[QueueSettings] = None,
|
|
5893
|
+
run_job_task: Optional[RunJobTask] = None,
|
|
5670
5894
|
run_name: Optional[str] = None,
|
|
5895
|
+
spark_jar_task: Optional[SparkJarTask] = None,
|
|
5896
|
+
spark_python_task: Optional[SparkPythonTask] = None,
|
|
5897
|
+
spark_submit_task: Optional[SparkSubmitTask] = None,
|
|
5898
|
+
sql_task: Optional[SqlTask] = None,
|
|
5671
5899
|
tasks: Optional[List[SubmitTask]] = None,
|
|
5672
5900
|
timeout_seconds: Optional[int] = None,
|
|
5673
5901
|
webhook_notifications: Optional[WebhookNotifications] = None) -> Wait[Run]:
|
|
@@ -5679,6 +5907,12 @@ class JobsAPI:
|
|
|
5679
5907
|
|
|
5680
5908
|
:param access_control_list: List[:class:`AccessControlRequest`] (optional)
|
|
5681
5909
|
List of permissions to set on the job.
|
|
5910
|
+
:param condition_task: :class:`ConditionTask` (optional)
|
|
5911
|
+
If condition_task, specifies a condition with an outcome that can be used to control the execution
|
|
5912
|
+
of other tasks. Does not require a cluster to execute and does not support retries or notifications.
|
|
5913
|
+
:param dbt_task: :class:`DbtTask` (optional)
|
|
5914
|
+
If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and the
|
|
5915
|
+
ability to use a serverless or a pro SQL warehouse.
|
|
5682
5916
|
:param email_notifications: :class:`JobEmailNotifications` (optional)
|
|
5683
5917
|
An optional set of email addresses notified when the run begins or completes.
|
|
5684
5918
|
:param git_source: :class:`GitSource` (optional)
|
|
@@ -5705,13 +5939,44 @@ class JobsAPI:
|
|
|
5705
5939
|
For more information, see [How to ensure idempotency for jobs].
|
|
5706
5940
|
|
|
5707
5941
|
[How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html
|
|
5942
|
+
:param notebook_task: :class:`NotebookTask` (optional)
|
|
5943
|
+
If notebook_task, indicates that this task must run a notebook. This field may not be specified in
|
|
5944
|
+
conjunction with spark_jar_task.
|
|
5708
5945
|
:param notification_settings: :class:`JobNotificationSettings` (optional)
|
|
5709
5946
|
Optional notification settings that are used when sending notifications to each of the
|
|
5710
5947
|
`email_notifications` and `webhook_notifications` for this run.
|
|
5948
|
+
:param pipeline_task: :class:`PipelineTask` (optional)
|
|
5949
|
+
If pipeline_task, indicates that this task must execute a Pipeline.
|
|
5950
|
+
:param python_wheel_task: :class:`PythonWheelTask` (optional)
|
|
5951
|
+
If python_wheel_task, indicates that this job must execute a PythonWheel.
|
|
5711
5952
|
:param queue: :class:`QueueSettings` (optional)
|
|
5712
5953
|
The queue settings of the one-time run.
|
|
5954
|
+
:param run_job_task: :class:`RunJobTask` (optional)
|
|
5955
|
+
If run_job_task, indicates that this task must execute another job.
|
|
5713
5956
|
:param run_name: str (optional)
|
|
5714
5957
|
An optional name for the run. The default value is `Untitled`.
|
|
5958
|
+
:param spark_jar_task: :class:`SparkJarTask` (optional)
|
|
5959
|
+
If spark_jar_task, indicates that this task must run a JAR.
|
|
5960
|
+
:param spark_python_task: :class:`SparkPythonTask` (optional)
|
|
5961
|
+
If spark_python_task, indicates that this task must run a Python file.
|
|
5962
|
+
:param spark_submit_task: :class:`SparkSubmitTask` (optional)
|
|
5963
|
+
If `spark_submit_task`, indicates that this task must be launched by the spark submit script. This
|
|
5964
|
+
task can run only on new clusters.
|
|
5965
|
+
|
|
5966
|
+
In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
|
|
5967
|
+
`--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
|
|
5968
|
+
configurations.
|
|
5969
|
+
|
|
5970
|
+
`master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you
|
|
5971
|
+
_cannot_ specify them in parameters.
|
|
5972
|
+
|
|
5973
|
+
By default, the Spark submit job uses all available memory (excluding reserved memory for Databricks
|
|
5974
|
+
services). You can set `--driver-memory`, and `--executor-memory` to a smaller value to leave some
|
|
5975
|
+
room for off-heap usage.
|
|
5976
|
+
|
|
5977
|
+
The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.
|
|
5978
|
+
:param sql_task: :class:`SqlTask` (optional)
|
|
5979
|
+
If sql_task, indicates that this job must execute a SQL task.
|
|
5715
5980
|
:param tasks: List[:class:`SubmitTask`] (optional)
|
|
5716
5981
|
:param timeout_seconds: int (optional)
|
|
5717
5982
|
An optional timeout applied to each run of this job. A value of `0` means no timeout.
|
|
@@ -5725,13 +5990,23 @@ class JobsAPI:
|
|
|
5725
5990
|
body = {}
|
|
5726
5991
|
if access_control_list is not None:
|
|
5727
5992
|
body['access_control_list'] = [v.as_dict() for v in access_control_list]
|
|
5993
|
+
if condition_task is not None: body['condition_task'] = condition_task.as_dict()
|
|
5994
|
+
if dbt_task is not None: body['dbt_task'] = dbt_task.as_dict()
|
|
5728
5995
|
if email_notifications is not None: body['email_notifications'] = email_notifications.as_dict()
|
|
5729
5996
|
if git_source is not None: body['git_source'] = git_source.as_dict()
|
|
5730
5997
|
if health is not None: body['health'] = health.as_dict()
|
|
5731
5998
|
if idempotency_token is not None: body['idempotency_token'] = idempotency_token
|
|
5999
|
+
if notebook_task is not None: body['notebook_task'] = notebook_task.as_dict()
|
|
5732
6000
|
if notification_settings is not None: body['notification_settings'] = notification_settings.as_dict()
|
|
6001
|
+
if pipeline_task is not None: body['pipeline_task'] = pipeline_task.as_dict()
|
|
6002
|
+
if python_wheel_task is not None: body['python_wheel_task'] = python_wheel_task.as_dict()
|
|
5733
6003
|
if queue is not None: body['queue'] = queue.as_dict()
|
|
6004
|
+
if run_job_task is not None: body['run_job_task'] = run_job_task.as_dict()
|
|
5734
6005
|
if run_name is not None: body['run_name'] = run_name
|
|
6006
|
+
if spark_jar_task is not None: body['spark_jar_task'] = spark_jar_task.as_dict()
|
|
6007
|
+
if spark_python_task is not None: body['spark_python_task'] = spark_python_task.as_dict()
|
|
6008
|
+
if spark_submit_task is not None: body['spark_submit_task'] = spark_submit_task.as_dict()
|
|
6009
|
+
if sql_task is not None: body['sql_task'] = sql_task.as_dict()
|
|
5735
6010
|
if tasks is not None: body['tasks'] = [v.as_dict() for v in tasks]
|
|
5736
6011
|
if timeout_seconds is not None: body['timeout_seconds'] = timeout_seconds
|
|
5737
6012
|
if webhook_notifications is not None: body['webhook_notifications'] = webhook_notifications.as_dict()
|
|
@@ -5746,25 +6021,45 @@ class JobsAPI:
|
|
|
5746
6021
|
self,
|
|
5747
6022
|
*,
|
|
5748
6023
|
access_control_list: Optional[List[iam.AccessControlRequest]] = None,
|
|
6024
|
+
condition_task: Optional[ConditionTask] = None,
|
|
6025
|
+
dbt_task: Optional[DbtTask] = None,
|
|
5749
6026
|
email_notifications: Optional[JobEmailNotifications] = None,
|
|
5750
6027
|
git_source: Optional[GitSource] = None,
|
|
5751
6028
|
health: Optional[JobsHealthRules] = None,
|
|
5752
6029
|
idempotency_token: Optional[str] = None,
|
|
6030
|
+
notebook_task: Optional[NotebookTask] = None,
|
|
5753
6031
|
notification_settings: Optional[JobNotificationSettings] = None,
|
|
6032
|
+
pipeline_task: Optional[PipelineTask] = None,
|
|
6033
|
+
python_wheel_task: Optional[PythonWheelTask] = None,
|
|
5754
6034
|
queue: Optional[QueueSettings] = None,
|
|
6035
|
+
run_job_task: Optional[RunJobTask] = None,
|
|
5755
6036
|
run_name: Optional[str] = None,
|
|
6037
|
+
spark_jar_task: Optional[SparkJarTask] = None,
|
|
6038
|
+
spark_python_task: Optional[SparkPythonTask] = None,
|
|
6039
|
+
spark_submit_task: Optional[SparkSubmitTask] = None,
|
|
6040
|
+
sql_task: Optional[SqlTask] = None,
|
|
5756
6041
|
tasks: Optional[List[SubmitTask]] = None,
|
|
5757
6042
|
timeout_seconds: Optional[int] = None,
|
|
5758
6043
|
webhook_notifications: Optional[WebhookNotifications] = None,
|
|
5759
6044
|
timeout=timedelta(minutes=20)) -> Run:
|
|
5760
6045
|
return self.submit(access_control_list=access_control_list,
|
|
6046
|
+
condition_task=condition_task,
|
|
6047
|
+
dbt_task=dbt_task,
|
|
5761
6048
|
email_notifications=email_notifications,
|
|
5762
6049
|
git_source=git_source,
|
|
5763
6050
|
health=health,
|
|
5764
6051
|
idempotency_token=idempotency_token,
|
|
6052
|
+
notebook_task=notebook_task,
|
|
5765
6053
|
notification_settings=notification_settings,
|
|
6054
|
+
pipeline_task=pipeline_task,
|
|
6055
|
+
python_wheel_task=python_wheel_task,
|
|
5766
6056
|
queue=queue,
|
|
6057
|
+
run_job_task=run_job_task,
|
|
5767
6058
|
run_name=run_name,
|
|
6059
|
+
spark_jar_task=spark_jar_task,
|
|
6060
|
+
spark_python_task=spark_python_task,
|
|
6061
|
+
spark_submit_task=spark_submit_task,
|
|
6062
|
+
sql_task=sql_task,
|
|
5768
6063
|
tasks=tasks,
|
|
5769
6064
|
timeout_seconds=timeout_seconds,
|
|
5770
6065
|
webhook_notifications=webhook_notifications).result(timeout=timeout)
|