databricks-sdk 0.58.0__py3-none-any.whl → 0.59.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +13 -5
- databricks/sdk/service/aibuilder.py +0 -127
- databricks/sdk/service/apps.py +52 -46
- databricks/sdk/service/billing.py +9 -200
- databricks/sdk/service/catalog.py +5500 -7697
- databricks/sdk/service/cleanrooms.py +2 -32
- databricks/sdk/service/compute.py +456 -2515
- databricks/sdk/service/dashboards.py +1 -177
- databricks/sdk/service/database.py +18 -52
- databricks/sdk/service/files.py +2 -218
- databricks/sdk/service/iam.py +16 -295
- databricks/sdk/service/jobs.py +108 -1171
- databricks/sdk/service/marketplace.py +0 -573
- databricks/sdk/service/ml.py +76 -2445
- databricks/sdk/service/oauth2.py +122 -237
- databricks/sdk/service/pipelines.py +178 -752
- databricks/sdk/service/provisioning.py +0 -603
- databricks/sdk/service/serving.py +5 -577
- databricks/sdk/service/settings.py +191 -1560
- databricks/sdk/service/sharing.py +3 -469
- databricks/sdk/service/sql.py +117 -1704
- databricks/sdk/service/vectorsearch.py +0 -391
- databricks/sdk/service/workspace.py +250 -721
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.58.0.dist-info → databricks_sdk-0.59.0.dist-info}/METADATA +1 -1
- {databricks_sdk-0.58.0.dist-info → databricks_sdk-0.59.0.dist-info}/RECORD +30 -30
- {databricks_sdk-0.58.0.dist-info → databricks_sdk-0.59.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.58.0.dist-info → databricks_sdk-0.59.0.dist-info}/licenses/LICENSE +0 -0
- {databricks_sdk-0.58.0.dist-info → databricks_sdk-0.59.0.dist-info}/licenses/NOTICE +0 -0
- {databricks_sdk-0.58.0.dist-info → databricks_sdk-0.59.0.dist-info}/top_level.txt +0 -0
databricks/sdk/service/jobs.py
CHANGED
|
@@ -432,39 +432,6 @@ class BaseRun:
|
|
|
432
432
|
)
|
|
433
433
|
|
|
434
434
|
|
|
435
|
-
@dataclass
|
|
436
|
-
class CancelAllRuns:
|
|
437
|
-
all_queued_runs: Optional[bool] = None
|
|
438
|
-
"""Optional boolean parameter to cancel all queued runs. If no job_id is provided, all queued runs
|
|
439
|
-
in the workspace are canceled."""
|
|
440
|
-
|
|
441
|
-
job_id: Optional[int] = None
|
|
442
|
-
"""The canonical identifier of the job to cancel all runs of."""
|
|
443
|
-
|
|
444
|
-
def as_dict(self) -> dict:
|
|
445
|
-
"""Serializes the CancelAllRuns into a dictionary suitable for use as a JSON request body."""
|
|
446
|
-
body = {}
|
|
447
|
-
if self.all_queued_runs is not None:
|
|
448
|
-
body["all_queued_runs"] = self.all_queued_runs
|
|
449
|
-
if self.job_id is not None:
|
|
450
|
-
body["job_id"] = self.job_id
|
|
451
|
-
return body
|
|
452
|
-
|
|
453
|
-
def as_shallow_dict(self) -> dict:
|
|
454
|
-
"""Serializes the CancelAllRuns into a shallow dictionary of its immediate attributes."""
|
|
455
|
-
body = {}
|
|
456
|
-
if self.all_queued_runs is not None:
|
|
457
|
-
body["all_queued_runs"] = self.all_queued_runs
|
|
458
|
-
if self.job_id is not None:
|
|
459
|
-
body["job_id"] = self.job_id
|
|
460
|
-
return body
|
|
461
|
-
|
|
462
|
-
@classmethod
|
|
463
|
-
def from_dict(cls, d: Dict[str, Any]) -> CancelAllRuns:
|
|
464
|
-
"""Deserializes the CancelAllRuns from a dictionary."""
|
|
465
|
-
return cls(all_queued_runs=d.get("all_queued_runs", None), job_id=d.get("job_id", None))
|
|
466
|
-
|
|
467
|
-
|
|
468
435
|
@dataclass
|
|
469
436
|
class CancelAllRunsResponse:
|
|
470
437
|
def as_dict(self) -> dict:
|
|
@@ -483,31 +450,6 @@ class CancelAllRunsResponse:
|
|
|
483
450
|
return cls()
|
|
484
451
|
|
|
485
452
|
|
|
486
|
-
@dataclass
|
|
487
|
-
class CancelRun:
|
|
488
|
-
run_id: int
|
|
489
|
-
"""This field is required."""
|
|
490
|
-
|
|
491
|
-
def as_dict(self) -> dict:
|
|
492
|
-
"""Serializes the CancelRun into a dictionary suitable for use as a JSON request body."""
|
|
493
|
-
body = {}
|
|
494
|
-
if self.run_id is not None:
|
|
495
|
-
body["run_id"] = self.run_id
|
|
496
|
-
return body
|
|
497
|
-
|
|
498
|
-
def as_shallow_dict(self) -> dict:
|
|
499
|
-
"""Serializes the CancelRun into a shallow dictionary of its immediate attributes."""
|
|
500
|
-
body = {}
|
|
501
|
-
if self.run_id is not None:
|
|
502
|
-
body["run_id"] = self.run_id
|
|
503
|
-
return body
|
|
504
|
-
|
|
505
|
-
@classmethod
|
|
506
|
-
def from_dict(cls, d: Dict[str, Any]) -> CancelRun:
|
|
507
|
-
"""Deserializes the CancelRun from a dictionary."""
|
|
508
|
-
return cls(run_id=d.get("run_id", None))
|
|
509
|
-
|
|
510
|
-
|
|
511
453
|
@dataclass
|
|
512
454
|
class CancelRunResponse:
|
|
513
455
|
def as_dict(self) -> dict:
|
|
@@ -932,267 +874,7 @@ class Continuous:
|
|
|
932
874
|
@classmethod
|
|
933
875
|
def from_dict(cls, d: Dict[str, Any]) -> Continuous:
|
|
934
876
|
"""Deserializes the Continuous from a dictionary."""
|
|
935
|
-
return cls(pause_status=_enum(d, "pause_status", PauseStatus))
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
@dataclass
|
|
939
|
-
class CreateJob:
|
|
940
|
-
access_control_list: Optional[List[JobAccessControlRequest]] = None
|
|
941
|
-
"""List of permissions to set on the job."""
|
|
942
|
-
|
|
943
|
-
budget_policy_id: Optional[str] = None
|
|
944
|
-
"""The id of the user specified budget policy to use for this job. If not specified, a default
|
|
945
|
-
budget policy may be applied when creating or modifying the job. See
|
|
946
|
-
`effective_budget_policy_id` for the budget policy used by this workload."""
|
|
947
|
-
|
|
948
|
-
continuous: Optional[Continuous] = None
|
|
949
|
-
"""An optional continuous property for this job. The continuous property will ensure that there is
|
|
950
|
-
always one run executing. Only one of `schedule` and `continuous` can be used."""
|
|
951
|
-
|
|
952
|
-
deployment: Optional[JobDeployment] = None
|
|
953
|
-
"""Deployment information for jobs managed by external sources."""
|
|
954
|
-
|
|
955
|
-
description: Optional[str] = None
|
|
956
|
-
"""An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding."""
|
|
957
|
-
|
|
958
|
-
edit_mode: Optional[JobEditMode] = None
|
|
959
|
-
"""Edit mode of the job.
|
|
960
|
-
|
|
961
|
-
* `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is
|
|
962
|
-
in an editable state and can be modified."""
|
|
963
|
-
|
|
964
|
-
email_notifications: Optional[JobEmailNotifications] = None
|
|
965
|
-
"""An optional set of email addresses that is notified when runs of this job begin or complete as
|
|
966
|
-
well as when this job is deleted."""
|
|
967
|
-
|
|
968
|
-
environments: Optional[List[JobEnvironment]] = None
|
|
969
|
-
"""A list of task execution environment specifications that can be referenced by serverless tasks
|
|
970
|
-
of this job. An environment is required to be present for serverless tasks. For serverless
|
|
971
|
-
notebook tasks, the environment is accessible in the notebook environment panel. For other
|
|
972
|
-
serverless tasks, the task environment is required to be specified using environment_key in the
|
|
973
|
-
task settings."""
|
|
974
|
-
|
|
975
|
-
format: Optional[Format] = None
|
|
976
|
-
"""Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls.
|
|
977
|
-
When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`."""
|
|
978
|
-
|
|
979
|
-
git_source: Optional[GitSource] = None
|
|
980
|
-
"""An optional specification for a remote Git repository containing the source code used by tasks.
|
|
981
|
-
Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
|
|
982
|
-
|
|
983
|
-
If `git_source` is set, these tasks retrieve the file from the remote repository by default.
|
|
984
|
-
However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.
|
|
985
|
-
|
|
986
|
-
Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks
|
|
987
|
-
are used, `git_source` must be defined on the job."""
|
|
988
|
-
|
|
989
|
-
health: Optional[JobsHealthRules] = None
|
|
990
|
-
|
|
991
|
-
job_clusters: Optional[List[JobCluster]] = None
|
|
992
|
-
"""A list of job cluster specifications that can be shared and reused by tasks of this job.
|
|
993
|
-
Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
|
|
994
|
-
task settings."""
|
|
995
|
-
|
|
996
|
-
max_concurrent_runs: Optional[int] = None
|
|
997
|
-
"""An optional maximum allowed number of concurrent runs of the job. Set this value if you want to
|
|
998
|
-
be able to execute multiple runs of the same job concurrently. This is useful for example if you
|
|
999
|
-
trigger your job on a frequent schedule and want to allow consecutive runs to overlap with each
|
|
1000
|
-
other, or if you want to trigger multiple runs which differ by their input parameters. This
|
|
1001
|
-
setting affects only new runs. For example, suppose the job’s concurrency is 4 and there are 4
|
|
1002
|
-
concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active runs.
|
|
1003
|
-
However, from then on, new runs are skipped unless there are fewer than 3 active runs. This
|
|
1004
|
-
value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped."""
|
|
1005
|
-
|
|
1006
|
-
name: Optional[str] = None
|
|
1007
|
-
"""An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding."""
|
|
1008
|
-
|
|
1009
|
-
notification_settings: Optional[JobNotificationSettings] = None
|
|
1010
|
-
"""Optional notification settings that are used when sending notifications to each of the
|
|
1011
|
-
`email_notifications` and `webhook_notifications` for this job."""
|
|
1012
|
-
|
|
1013
|
-
parameters: Optional[List[JobParameterDefinition]] = None
|
|
1014
|
-
"""Job-level parameter definitions"""
|
|
1015
|
-
|
|
1016
|
-
performance_target: Optional[PerformanceTarget] = None
|
|
1017
|
-
"""The performance mode on a serverless job. This field determines the level of compute performance
|
|
1018
|
-
or cost-efficiency for the run.
|
|
1019
|
-
|
|
1020
|
-
* `STANDARD`: Enables cost-efficient execution of serverless workloads. *
|
|
1021
|
-
`PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and
|
|
1022
|
-
optimized cluster performance."""
|
|
1023
|
-
|
|
1024
|
-
queue: Optional[QueueSettings] = None
|
|
1025
|
-
"""The queue settings of the job."""
|
|
1026
|
-
|
|
1027
|
-
run_as: Optional[JobRunAs] = None
|
|
1028
|
-
|
|
1029
|
-
schedule: Optional[CronSchedule] = None
|
|
1030
|
-
"""An optional periodic schedule for this job. The default behavior is that the job only runs when
|
|
1031
|
-
triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`."""
|
|
1032
|
-
|
|
1033
|
-
tags: Optional[Dict[str, str]] = None
|
|
1034
|
-
"""A map of tags associated with the job. These are forwarded to the cluster as cluster tags for
|
|
1035
|
-
jobs clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can
|
|
1036
|
-
be added to the job."""
|
|
1037
|
-
|
|
1038
|
-
tasks: Optional[List[Task]] = None
|
|
1039
|
-
"""A list of task specifications to be executed by this job. It supports up to 1000 elements in
|
|
1040
|
-
write endpoints (:method:jobs/create, :method:jobs/reset, :method:jobs/update,
|
|
1041
|
-
:method:jobs/submit). Read endpoints return only 100 tasks. If more than 100 tasks are
|
|
1042
|
-
available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field
|
|
1043
|
-
at the object root to determine if more results are available."""
|
|
1044
|
-
|
|
1045
|
-
timeout_seconds: Optional[int] = None
|
|
1046
|
-
"""An optional timeout applied to each run of this job. A value of `0` means no timeout."""
|
|
1047
|
-
|
|
1048
|
-
trigger: Optional[TriggerSettings] = None
|
|
1049
|
-
"""A configuration to trigger a run when certain conditions are met. The default behavior is that
|
|
1050
|
-
the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API
|
|
1051
|
-
request to `runNow`."""
|
|
1052
|
-
|
|
1053
|
-
webhook_notifications: Optional[WebhookNotifications] = None
|
|
1054
|
-
"""A collection of system notification IDs to notify when runs of this job begin or complete."""
|
|
1055
|
-
|
|
1056
|
-
def as_dict(self) -> dict:
|
|
1057
|
-
"""Serializes the CreateJob into a dictionary suitable for use as a JSON request body."""
|
|
1058
|
-
body = {}
|
|
1059
|
-
if self.access_control_list:
|
|
1060
|
-
body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
|
|
1061
|
-
if self.budget_policy_id is not None:
|
|
1062
|
-
body["budget_policy_id"] = self.budget_policy_id
|
|
1063
|
-
if self.continuous:
|
|
1064
|
-
body["continuous"] = self.continuous.as_dict()
|
|
1065
|
-
if self.deployment:
|
|
1066
|
-
body["deployment"] = self.deployment.as_dict()
|
|
1067
|
-
if self.description is not None:
|
|
1068
|
-
body["description"] = self.description
|
|
1069
|
-
if self.edit_mode is not None:
|
|
1070
|
-
body["edit_mode"] = self.edit_mode.value
|
|
1071
|
-
if self.email_notifications:
|
|
1072
|
-
body["email_notifications"] = self.email_notifications.as_dict()
|
|
1073
|
-
if self.environments:
|
|
1074
|
-
body["environments"] = [v.as_dict() for v in self.environments]
|
|
1075
|
-
if self.format is not None:
|
|
1076
|
-
body["format"] = self.format.value
|
|
1077
|
-
if self.git_source:
|
|
1078
|
-
body["git_source"] = self.git_source.as_dict()
|
|
1079
|
-
if self.health:
|
|
1080
|
-
body["health"] = self.health.as_dict()
|
|
1081
|
-
if self.job_clusters:
|
|
1082
|
-
body["job_clusters"] = [v.as_dict() for v in self.job_clusters]
|
|
1083
|
-
if self.max_concurrent_runs is not None:
|
|
1084
|
-
body["max_concurrent_runs"] = self.max_concurrent_runs
|
|
1085
|
-
if self.name is not None:
|
|
1086
|
-
body["name"] = self.name
|
|
1087
|
-
if self.notification_settings:
|
|
1088
|
-
body["notification_settings"] = self.notification_settings.as_dict()
|
|
1089
|
-
if self.parameters:
|
|
1090
|
-
body["parameters"] = [v.as_dict() for v in self.parameters]
|
|
1091
|
-
if self.performance_target is not None:
|
|
1092
|
-
body["performance_target"] = self.performance_target.value
|
|
1093
|
-
if self.queue:
|
|
1094
|
-
body["queue"] = self.queue.as_dict()
|
|
1095
|
-
if self.run_as:
|
|
1096
|
-
body["run_as"] = self.run_as.as_dict()
|
|
1097
|
-
if self.schedule:
|
|
1098
|
-
body["schedule"] = self.schedule.as_dict()
|
|
1099
|
-
if self.tags:
|
|
1100
|
-
body["tags"] = self.tags
|
|
1101
|
-
if self.tasks:
|
|
1102
|
-
body["tasks"] = [v.as_dict() for v in self.tasks]
|
|
1103
|
-
if self.timeout_seconds is not None:
|
|
1104
|
-
body["timeout_seconds"] = self.timeout_seconds
|
|
1105
|
-
if self.trigger:
|
|
1106
|
-
body["trigger"] = self.trigger.as_dict()
|
|
1107
|
-
if self.webhook_notifications:
|
|
1108
|
-
body["webhook_notifications"] = self.webhook_notifications.as_dict()
|
|
1109
|
-
return body
|
|
1110
|
-
|
|
1111
|
-
def as_shallow_dict(self) -> dict:
|
|
1112
|
-
"""Serializes the CreateJob into a shallow dictionary of its immediate attributes."""
|
|
1113
|
-
body = {}
|
|
1114
|
-
if self.access_control_list:
|
|
1115
|
-
body["access_control_list"] = self.access_control_list
|
|
1116
|
-
if self.budget_policy_id is not None:
|
|
1117
|
-
body["budget_policy_id"] = self.budget_policy_id
|
|
1118
|
-
if self.continuous:
|
|
1119
|
-
body["continuous"] = self.continuous
|
|
1120
|
-
if self.deployment:
|
|
1121
|
-
body["deployment"] = self.deployment
|
|
1122
|
-
if self.description is not None:
|
|
1123
|
-
body["description"] = self.description
|
|
1124
|
-
if self.edit_mode is not None:
|
|
1125
|
-
body["edit_mode"] = self.edit_mode
|
|
1126
|
-
if self.email_notifications:
|
|
1127
|
-
body["email_notifications"] = self.email_notifications
|
|
1128
|
-
if self.environments:
|
|
1129
|
-
body["environments"] = self.environments
|
|
1130
|
-
if self.format is not None:
|
|
1131
|
-
body["format"] = self.format
|
|
1132
|
-
if self.git_source:
|
|
1133
|
-
body["git_source"] = self.git_source
|
|
1134
|
-
if self.health:
|
|
1135
|
-
body["health"] = self.health
|
|
1136
|
-
if self.job_clusters:
|
|
1137
|
-
body["job_clusters"] = self.job_clusters
|
|
1138
|
-
if self.max_concurrent_runs is not None:
|
|
1139
|
-
body["max_concurrent_runs"] = self.max_concurrent_runs
|
|
1140
|
-
if self.name is not None:
|
|
1141
|
-
body["name"] = self.name
|
|
1142
|
-
if self.notification_settings:
|
|
1143
|
-
body["notification_settings"] = self.notification_settings
|
|
1144
|
-
if self.parameters:
|
|
1145
|
-
body["parameters"] = self.parameters
|
|
1146
|
-
if self.performance_target is not None:
|
|
1147
|
-
body["performance_target"] = self.performance_target
|
|
1148
|
-
if self.queue:
|
|
1149
|
-
body["queue"] = self.queue
|
|
1150
|
-
if self.run_as:
|
|
1151
|
-
body["run_as"] = self.run_as
|
|
1152
|
-
if self.schedule:
|
|
1153
|
-
body["schedule"] = self.schedule
|
|
1154
|
-
if self.tags:
|
|
1155
|
-
body["tags"] = self.tags
|
|
1156
|
-
if self.tasks:
|
|
1157
|
-
body["tasks"] = self.tasks
|
|
1158
|
-
if self.timeout_seconds is not None:
|
|
1159
|
-
body["timeout_seconds"] = self.timeout_seconds
|
|
1160
|
-
if self.trigger:
|
|
1161
|
-
body["trigger"] = self.trigger
|
|
1162
|
-
if self.webhook_notifications:
|
|
1163
|
-
body["webhook_notifications"] = self.webhook_notifications
|
|
1164
|
-
return body
|
|
1165
|
-
|
|
1166
|
-
@classmethod
|
|
1167
|
-
def from_dict(cls, d: Dict[str, Any]) -> CreateJob:
|
|
1168
|
-
"""Deserializes the CreateJob from a dictionary."""
|
|
1169
|
-
return cls(
|
|
1170
|
-
access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest),
|
|
1171
|
-
budget_policy_id=d.get("budget_policy_id", None),
|
|
1172
|
-
continuous=_from_dict(d, "continuous", Continuous),
|
|
1173
|
-
deployment=_from_dict(d, "deployment", JobDeployment),
|
|
1174
|
-
description=d.get("description", None),
|
|
1175
|
-
edit_mode=_enum(d, "edit_mode", JobEditMode),
|
|
1176
|
-
email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications),
|
|
1177
|
-
environments=_repeated_dict(d, "environments", JobEnvironment),
|
|
1178
|
-
format=_enum(d, "format", Format),
|
|
1179
|
-
git_source=_from_dict(d, "git_source", GitSource),
|
|
1180
|
-
health=_from_dict(d, "health", JobsHealthRules),
|
|
1181
|
-
job_clusters=_repeated_dict(d, "job_clusters", JobCluster),
|
|
1182
|
-
max_concurrent_runs=d.get("max_concurrent_runs", None),
|
|
1183
|
-
name=d.get("name", None),
|
|
1184
|
-
notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings),
|
|
1185
|
-
parameters=_repeated_dict(d, "parameters", JobParameterDefinition),
|
|
1186
|
-
performance_target=_enum(d, "performance_target", PerformanceTarget),
|
|
1187
|
-
queue=_from_dict(d, "queue", QueueSettings),
|
|
1188
|
-
run_as=_from_dict(d, "run_as", JobRunAs),
|
|
1189
|
-
schedule=_from_dict(d, "schedule", CronSchedule),
|
|
1190
|
-
tags=d.get("tags", None),
|
|
1191
|
-
tasks=_repeated_dict(d, "tasks", Task),
|
|
1192
|
-
timeout_seconds=d.get("timeout_seconds", None),
|
|
1193
|
-
trigger=_from_dict(d, "trigger", TriggerSettings),
|
|
1194
|
-
webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications),
|
|
1195
|
-
)
|
|
877
|
+
return cls(pause_status=_enum(d, "pause_status", PauseStatus))
|
|
1196
878
|
|
|
1197
879
|
|
|
1198
880
|
@dataclass
|
|
@@ -1803,31 +1485,6 @@ class DbtTask:
|
|
|
1803
1485
|
)
|
|
1804
1486
|
|
|
1805
1487
|
|
|
1806
|
-
@dataclass
|
|
1807
|
-
class DeleteJob:
|
|
1808
|
-
job_id: int
|
|
1809
|
-
"""The canonical identifier of the job to delete. This field is required."""
|
|
1810
|
-
|
|
1811
|
-
def as_dict(self) -> dict:
|
|
1812
|
-
"""Serializes the DeleteJob into a dictionary suitable for use as a JSON request body."""
|
|
1813
|
-
body = {}
|
|
1814
|
-
if self.job_id is not None:
|
|
1815
|
-
body["job_id"] = self.job_id
|
|
1816
|
-
return body
|
|
1817
|
-
|
|
1818
|
-
def as_shallow_dict(self) -> dict:
|
|
1819
|
-
"""Serializes the DeleteJob into a shallow dictionary of its immediate attributes."""
|
|
1820
|
-
body = {}
|
|
1821
|
-
if self.job_id is not None:
|
|
1822
|
-
body["job_id"] = self.job_id
|
|
1823
|
-
return body
|
|
1824
|
-
|
|
1825
|
-
@classmethod
|
|
1826
|
-
def from_dict(cls, d: Dict[str, Any]) -> DeleteJob:
|
|
1827
|
-
"""Deserializes the DeleteJob from a dictionary."""
|
|
1828
|
-
return cls(job_id=d.get("job_id", None))
|
|
1829
|
-
|
|
1830
|
-
|
|
1831
1488
|
@dataclass
|
|
1832
1489
|
class DeleteResponse:
|
|
1833
1490
|
def as_dict(self) -> dict:
|
|
@@ -1846,31 +1503,6 @@ class DeleteResponse:
|
|
|
1846
1503
|
return cls()
|
|
1847
1504
|
|
|
1848
1505
|
|
|
1849
|
-
@dataclass
|
|
1850
|
-
class DeleteRun:
|
|
1851
|
-
run_id: int
|
|
1852
|
-
"""ID of the run to delete."""
|
|
1853
|
-
|
|
1854
|
-
def as_dict(self) -> dict:
|
|
1855
|
-
"""Serializes the DeleteRun into a dictionary suitable for use as a JSON request body."""
|
|
1856
|
-
body = {}
|
|
1857
|
-
if self.run_id is not None:
|
|
1858
|
-
body["run_id"] = self.run_id
|
|
1859
|
-
return body
|
|
1860
|
-
|
|
1861
|
-
def as_shallow_dict(self) -> dict:
|
|
1862
|
-
"""Serializes the DeleteRun into a shallow dictionary of its immediate attributes."""
|
|
1863
|
-
body = {}
|
|
1864
|
-
if self.run_id is not None:
|
|
1865
|
-
body["run_id"] = self.run_id
|
|
1866
|
-
return body
|
|
1867
|
-
|
|
1868
|
-
@classmethod
|
|
1869
|
-
def from_dict(cls, d: Dict[str, Any]) -> DeleteRun:
|
|
1870
|
-
"""Deserializes the DeleteRun from a dictionary."""
|
|
1871
|
-
return cls(run_id=d.get("run_id", None))
|
|
1872
|
-
|
|
1873
|
-
|
|
1874
1506
|
@dataclass
|
|
1875
1507
|
class DeleteRunResponse:
|
|
1876
1508
|
def as_dict(self) -> dict:
|
|
@@ -1937,38 +1569,6 @@ class EnforcePolicyComplianceForJobResponseJobClusterSettingsChange:
|
|
|
1937
1569
|
)
|
|
1938
1570
|
|
|
1939
1571
|
|
|
1940
|
-
@dataclass
|
|
1941
|
-
class EnforcePolicyComplianceRequest:
|
|
1942
|
-
job_id: int
|
|
1943
|
-
"""The ID of the job you want to enforce policy compliance on."""
|
|
1944
|
-
|
|
1945
|
-
validate_only: Optional[bool] = None
|
|
1946
|
-
"""If set, previews changes made to the job to comply with its policy, but does not update the job."""
|
|
1947
|
-
|
|
1948
|
-
def as_dict(self) -> dict:
|
|
1949
|
-
"""Serializes the EnforcePolicyComplianceRequest into a dictionary suitable for use as a JSON request body."""
|
|
1950
|
-
body = {}
|
|
1951
|
-
if self.job_id is not None:
|
|
1952
|
-
body["job_id"] = self.job_id
|
|
1953
|
-
if self.validate_only is not None:
|
|
1954
|
-
body["validate_only"] = self.validate_only
|
|
1955
|
-
return body
|
|
1956
|
-
|
|
1957
|
-
def as_shallow_dict(self) -> dict:
|
|
1958
|
-
"""Serializes the EnforcePolicyComplianceRequest into a shallow dictionary of its immediate attributes."""
|
|
1959
|
-
body = {}
|
|
1960
|
-
if self.job_id is not None:
|
|
1961
|
-
body["job_id"] = self.job_id
|
|
1962
|
-
if self.validate_only is not None:
|
|
1963
|
-
body["validate_only"] = self.validate_only
|
|
1964
|
-
return body
|
|
1965
|
-
|
|
1966
|
-
@classmethod
|
|
1967
|
-
def from_dict(cls, d: Dict[str, Any]) -> EnforcePolicyComplianceRequest:
|
|
1968
|
-
"""Deserializes the EnforcePolicyComplianceRequest from a dictionary."""
|
|
1969
|
-
return cls(job_id=d.get("job_id", None), validate_only=d.get("validate_only", None))
|
|
1970
|
-
|
|
1971
|
-
|
|
1972
1572
|
@dataclass
|
|
1973
1573
|
class EnforcePolicyComplianceResponse:
|
|
1974
1574
|
has_changes: Optional[bool] = None
|
|
@@ -3283,40 +2883,6 @@ class JobPermissionsDescription:
|
|
|
3283
2883
|
)
|
|
3284
2884
|
|
|
3285
2885
|
|
|
3286
|
-
@dataclass
|
|
3287
|
-
class JobPermissionsRequest:
|
|
3288
|
-
access_control_list: Optional[List[JobAccessControlRequest]] = None
|
|
3289
|
-
|
|
3290
|
-
job_id: Optional[str] = None
|
|
3291
|
-
"""The job for which to get or manage permissions."""
|
|
3292
|
-
|
|
3293
|
-
def as_dict(self) -> dict:
|
|
3294
|
-
"""Serializes the JobPermissionsRequest into a dictionary suitable for use as a JSON request body."""
|
|
3295
|
-
body = {}
|
|
3296
|
-
if self.access_control_list:
|
|
3297
|
-
body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
|
|
3298
|
-
if self.job_id is not None:
|
|
3299
|
-
body["job_id"] = self.job_id
|
|
3300
|
-
return body
|
|
3301
|
-
|
|
3302
|
-
def as_shallow_dict(self) -> dict:
|
|
3303
|
-
"""Serializes the JobPermissionsRequest into a shallow dictionary of its immediate attributes."""
|
|
3304
|
-
body = {}
|
|
3305
|
-
if self.access_control_list:
|
|
3306
|
-
body["access_control_list"] = self.access_control_list
|
|
3307
|
-
if self.job_id is not None:
|
|
3308
|
-
body["job_id"] = self.job_id
|
|
3309
|
-
return body
|
|
3310
|
-
|
|
3311
|
-
@classmethod
|
|
3312
|
-
def from_dict(cls, d: Dict[str, Any]) -> JobPermissionsRequest:
|
|
3313
|
-
"""Deserializes the JobPermissionsRequest from a dictionary."""
|
|
3314
|
-
return cls(
|
|
3315
|
-
access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest),
|
|
3316
|
-
job_id=d.get("job_id", None),
|
|
3317
|
-
)
|
|
3318
|
-
|
|
3319
|
-
|
|
3320
2886
|
@dataclass
|
|
3321
2887
|
class JobRunAs:
|
|
3322
2888
|
"""Write-only setting. Specifies the user or service principal that the job runs as. If not
|
|
@@ -3443,6 +3009,9 @@ class JobSettings:
|
|
|
3443
3009
|
"""The queue settings of the job."""
|
|
3444
3010
|
|
|
3445
3011
|
run_as: Optional[JobRunAs] = None
|
|
3012
|
+
"""The user or service principal that the job runs as, if specified in the request. This field
|
|
3013
|
+
indicates the explicit configuration of `run_as` for the job. To find the value in all cases,
|
|
3014
|
+
explicit or implicit, use `run_as_user_name`."""
|
|
3446
3015
|
|
|
3447
3016
|
schedule: Optional[CronSchedule] = None
|
|
3448
3017
|
"""An optional periodic schedule for this job. The default behavior is that the job only runs when
|
|
@@ -4533,240 +4102,42 @@ class RepairHistoryItem:
|
|
|
4533
4102
|
body["effective_performance_target"] = self.effective_performance_target
|
|
4534
4103
|
if self.end_time is not None:
|
|
4535
4104
|
body["end_time"] = self.end_time
|
|
4536
|
-
if self.id is not None:
|
|
4537
|
-
body["id"] = self.id
|
|
4538
|
-
if self.start_time is not None:
|
|
4539
|
-
body["start_time"] = self.start_time
|
|
4540
|
-
if self.state:
|
|
4541
|
-
body["state"] = self.state
|
|
4542
|
-
if self.status:
|
|
4543
|
-
body["status"] = self.status
|
|
4544
|
-
if self.task_run_ids:
|
|
4545
|
-
body["task_run_ids"] = self.task_run_ids
|
|
4546
|
-
if self.type is not None:
|
|
4547
|
-
body["type"] = self.type
|
|
4548
|
-
return body
|
|
4549
|
-
|
|
4550
|
-
@classmethod
|
|
4551
|
-
def from_dict(cls, d: Dict[str, Any]) -> RepairHistoryItem:
|
|
4552
|
-
"""Deserializes the RepairHistoryItem from a dictionary."""
|
|
4553
|
-
return cls(
|
|
4554
|
-
effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget),
|
|
4555
|
-
end_time=d.get("end_time", None),
|
|
4556
|
-
id=d.get("id", None),
|
|
4557
|
-
start_time=d.get("start_time", None),
|
|
4558
|
-
state=_from_dict(d, "state", RunState),
|
|
4559
|
-
status=_from_dict(d, "status", RunStatus),
|
|
4560
|
-
task_run_ids=d.get("task_run_ids", None),
|
|
4561
|
-
type=_enum(d, "type", RepairHistoryItemType),
|
|
4562
|
-
)
|
|
4563
|
-
|
|
4564
|
-
|
|
4565
|
-
class RepairHistoryItemType(Enum):
|
|
4566
|
-
"""The repair history item type. Indicates whether a run is the original run or a repair run."""
|
|
4567
|
-
|
|
4568
|
-
ORIGINAL = "ORIGINAL"
|
|
4569
|
-
REPAIR = "REPAIR"
|
|
4570
|
-
|
|
4571
|
-
|
|
4572
|
-
@dataclass
|
|
4573
|
-
class RepairRun:
|
|
4574
|
-
run_id: int
|
|
4575
|
-
"""The job run ID of the run to repair. The run must not be in progress."""
|
|
4576
|
-
|
|
4577
|
-
dbt_commands: Optional[List[str]] = None
|
|
4578
|
-
"""An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
|
|
4579
|
-
deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`"""
|
|
4580
|
-
|
|
4581
|
-
jar_params: Optional[List[str]] = None
|
|
4582
|
-
"""A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe",
|
|
4583
|
-
"35"]`. The parameters are used to invoke the main function of the main class specified in the
|
|
4584
|
-
Spark JAR task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot
|
|
4585
|
-
be specified in conjunction with notebook_params. The JSON representation of this field (for
|
|
4586
|
-
example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
4587
|
-
|
|
4588
|
-
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
4589
|
-
|
|
4590
|
-
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
4591
|
-
|
|
4592
|
-
job_parameters: Optional[Dict[str, str]] = None
|
|
4593
|
-
"""Job-level parameters used in the run. for example `"param": "overriding_val"`"""
|
|
4594
|
-
|
|
4595
|
-
latest_repair_id: Optional[int] = None
|
|
4596
|
-
"""The ID of the latest repair. This parameter is not required when repairing a run for the first
|
|
4597
|
-
time, but must be provided on subsequent requests to repair the same run."""
|
|
4598
|
-
|
|
4599
|
-
notebook_params: Optional[Dict[str, str]] = None
|
|
4600
|
-
"""A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
|
|
4601
|
-
"john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the
|
|
4602
|
-
[dbutils.widgets.get] function.
|
|
4603
|
-
|
|
4604
|
-
If not specified upon `run-now`, the triggered run uses the job’s base parameters.
|
|
4605
|
-
|
|
4606
|
-
notebook_params cannot be specified in conjunction with jar_params.
|
|
4607
|
-
|
|
4608
|
-
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
4609
|
-
|
|
4610
|
-
The JSON representation of this field (for example `{"notebook_params":{"name":"john
|
|
4611
|
-
doe","age":"35"}}`) cannot exceed 10,000 bytes.
|
|
4612
|
-
|
|
4613
|
-
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
4614
|
-
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
|
|
4615
|
-
|
|
4616
|
-
performance_target: Optional[PerformanceTarget] = None
|
|
4617
|
-
"""The performance mode on a serverless job. The performance target determines the level of compute
|
|
4618
|
-
performance or cost-efficiency for the run. This field overrides the performance target defined
|
|
4619
|
-
on the job level.
|
|
4620
|
-
|
|
4621
|
-
* `STANDARD`: Enables cost-efficient execution of serverless workloads. *
|
|
4622
|
-
`PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and
|
|
4623
|
-
optimized cluster performance."""
|
|
4624
|
-
|
|
4625
|
-
pipeline_params: Optional[PipelineParams] = None
|
|
4626
|
-
"""Controls whether the pipeline should perform a full refresh"""
|
|
4627
|
-
|
|
4628
|
-
python_named_params: Optional[Dict[str, str]] = None
|
|
4629
|
-
|
|
4630
|
-
python_params: Optional[List[str]] = None
|
|
4631
|
-
"""A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe",
|
|
4632
|
-
"35"]`. The parameters are passed to Python file as command-line parameters. If specified upon
|
|
4633
|
-
`run-now`, it would overwrite the parameters specified in job setting. The JSON representation
|
|
4634
|
-
of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
4635
|
-
|
|
4636
|
-
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
4637
|
-
|
|
4638
|
-
Important
|
|
4639
|
-
|
|
4640
|
-
These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
|
|
4641
|
-
returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
|
|
4642
|
-
emojis.
|
|
4643
|
-
|
|
4644
|
-
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
4645
|
-
|
|
4646
|
-
rerun_all_failed_tasks: Optional[bool] = None
|
|
4647
|
-
"""If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be
|
|
4648
|
-
used."""
|
|
4649
|
-
|
|
4650
|
-
rerun_dependent_tasks: Optional[bool] = None
|
|
4651
|
-
"""If true, repair all tasks that depend on the tasks in `rerun_tasks`, even if they were
|
|
4652
|
-
previously successful. Can be also used in combination with `rerun_all_failed_tasks`."""
|
|
4653
|
-
|
|
4654
|
-
rerun_tasks: Optional[List[str]] = None
|
|
4655
|
-
"""The task keys of the task runs to repair."""
|
|
4656
|
-
|
|
4657
|
-
spark_submit_params: Optional[List[str]] = None
|
|
4658
|
-
"""A list of parameters for jobs with spark submit task, for example `"spark_submit_params":
|
|
4659
|
-
["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit
|
|
4660
|
-
script as command-line parameters. If specified upon `run-now`, it would overwrite the
|
|
4661
|
-
parameters specified in job setting. The JSON representation of this field (for example
|
|
4662
|
-
`{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
4663
|
-
|
|
4664
|
-
Use [Task parameter variables] to set parameters containing information about job runs
|
|
4665
|
-
|
|
4666
|
-
Important
|
|
4667
|
-
|
|
4668
|
-
These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
|
|
4669
|
-
returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
|
|
4670
|
-
emojis.
|
|
4671
|
-
|
|
4672
|
-
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
4673
|
-
|
|
4674
|
-
sql_params: Optional[Dict[str, str]] = None
|
|
4675
|
-
"""A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
|
|
4676
|
-
doe", "age": "35"}`. The SQL alert task does not support custom parameters."""
|
|
4677
|
-
|
|
4678
|
-
def as_dict(self) -> dict:
|
|
4679
|
-
"""Serializes the RepairRun into a dictionary suitable for use as a JSON request body."""
|
|
4680
|
-
body = {}
|
|
4681
|
-
if self.dbt_commands:
|
|
4682
|
-
body["dbt_commands"] = [v for v in self.dbt_commands]
|
|
4683
|
-
if self.jar_params:
|
|
4684
|
-
body["jar_params"] = [v for v in self.jar_params]
|
|
4685
|
-
if self.job_parameters:
|
|
4686
|
-
body["job_parameters"] = self.job_parameters
|
|
4687
|
-
if self.latest_repair_id is not None:
|
|
4688
|
-
body["latest_repair_id"] = self.latest_repair_id
|
|
4689
|
-
if self.notebook_params:
|
|
4690
|
-
body["notebook_params"] = self.notebook_params
|
|
4691
|
-
if self.performance_target is not None:
|
|
4692
|
-
body["performance_target"] = self.performance_target.value
|
|
4693
|
-
if self.pipeline_params:
|
|
4694
|
-
body["pipeline_params"] = self.pipeline_params.as_dict()
|
|
4695
|
-
if self.python_named_params:
|
|
4696
|
-
body["python_named_params"] = self.python_named_params
|
|
4697
|
-
if self.python_params:
|
|
4698
|
-
body["python_params"] = [v for v in self.python_params]
|
|
4699
|
-
if self.rerun_all_failed_tasks is not None:
|
|
4700
|
-
body["rerun_all_failed_tasks"] = self.rerun_all_failed_tasks
|
|
4701
|
-
if self.rerun_dependent_tasks is not None:
|
|
4702
|
-
body["rerun_dependent_tasks"] = self.rerun_dependent_tasks
|
|
4703
|
-
if self.rerun_tasks:
|
|
4704
|
-
body["rerun_tasks"] = [v for v in self.rerun_tasks]
|
|
4705
|
-
if self.run_id is not None:
|
|
4706
|
-
body["run_id"] = self.run_id
|
|
4707
|
-
if self.spark_submit_params:
|
|
4708
|
-
body["spark_submit_params"] = [v for v in self.spark_submit_params]
|
|
4709
|
-
if self.sql_params:
|
|
4710
|
-
body["sql_params"] = self.sql_params
|
|
4711
|
-
return body
|
|
4712
|
-
|
|
4713
|
-
def as_shallow_dict(self) -> dict:
|
|
4714
|
-
"""Serializes the RepairRun into a shallow dictionary of its immediate attributes."""
|
|
4715
|
-
body = {}
|
|
4716
|
-
if self.dbt_commands:
|
|
4717
|
-
body["dbt_commands"] = self.dbt_commands
|
|
4718
|
-
if self.jar_params:
|
|
4719
|
-
body["jar_params"] = self.jar_params
|
|
4720
|
-
if self.job_parameters:
|
|
4721
|
-
body["job_parameters"] = self.job_parameters
|
|
4722
|
-
if self.latest_repair_id is not None:
|
|
4723
|
-
body["latest_repair_id"] = self.latest_repair_id
|
|
4724
|
-
if self.notebook_params:
|
|
4725
|
-
body["notebook_params"] = self.notebook_params
|
|
4726
|
-
if self.performance_target is not None:
|
|
4727
|
-
body["performance_target"] = self.performance_target
|
|
4728
|
-
if self.pipeline_params:
|
|
4729
|
-
body["pipeline_params"] = self.pipeline_params
|
|
4730
|
-
if self.python_named_params:
|
|
4731
|
-
body["python_named_params"] = self.python_named_params
|
|
4732
|
-
if self.python_params:
|
|
4733
|
-
body["python_params"] = self.python_params
|
|
4734
|
-
if self.rerun_all_failed_tasks is not None:
|
|
4735
|
-
body["rerun_all_failed_tasks"] = self.rerun_all_failed_tasks
|
|
4736
|
-
if self.rerun_dependent_tasks is not None:
|
|
4737
|
-
body["rerun_dependent_tasks"] = self.rerun_dependent_tasks
|
|
4738
|
-
if self.rerun_tasks:
|
|
4739
|
-
body["rerun_tasks"] = self.rerun_tasks
|
|
4740
|
-
if self.run_id is not None:
|
|
4741
|
-
body["run_id"] = self.run_id
|
|
4742
|
-
if self.spark_submit_params:
|
|
4743
|
-
body["spark_submit_params"] = self.spark_submit_params
|
|
4744
|
-
if self.sql_params:
|
|
4745
|
-
body["sql_params"] = self.sql_params
|
|
4105
|
+
if self.id is not None:
|
|
4106
|
+
body["id"] = self.id
|
|
4107
|
+
if self.start_time is not None:
|
|
4108
|
+
body["start_time"] = self.start_time
|
|
4109
|
+
if self.state:
|
|
4110
|
+
body["state"] = self.state
|
|
4111
|
+
if self.status:
|
|
4112
|
+
body["status"] = self.status
|
|
4113
|
+
if self.task_run_ids:
|
|
4114
|
+
body["task_run_ids"] = self.task_run_ids
|
|
4115
|
+
if self.type is not None:
|
|
4116
|
+
body["type"] = self.type
|
|
4746
4117
|
return body
|
|
4747
4118
|
|
|
4748
4119
|
@classmethod
|
|
4749
|
-
def from_dict(cls, d: Dict[str, Any]) ->
|
|
4750
|
-
"""Deserializes the
|
|
4120
|
+
def from_dict(cls, d: Dict[str, Any]) -> RepairHistoryItem:
|
|
4121
|
+
"""Deserializes the RepairHistoryItem from a dictionary."""
|
|
4751
4122
|
return cls(
|
|
4752
|
-
|
|
4753
|
-
|
|
4754
|
-
|
|
4755
|
-
|
|
4756
|
-
|
|
4757
|
-
|
|
4758
|
-
|
|
4759
|
-
|
|
4760
|
-
python_params=d.get("python_params", None),
|
|
4761
|
-
rerun_all_failed_tasks=d.get("rerun_all_failed_tasks", None),
|
|
4762
|
-
rerun_dependent_tasks=d.get("rerun_dependent_tasks", None),
|
|
4763
|
-
rerun_tasks=d.get("rerun_tasks", None),
|
|
4764
|
-
run_id=d.get("run_id", None),
|
|
4765
|
-
spark_submit_params=d.get("spark_submit_params", None),
|
|
4766
|
-
sql_params=d.get("sql_params", None),
|
|
4123
|
+
effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget),
|
|
4124
|
+
end_time=d.get("end_time", None),
|
|
4125
|
+
id=d.get("id", None),
|
|
4126
|
+
start_time=d.get("start_time", None),
|
|
4127
|
+
state=_from_dict(d, "state", RunState),
|
|
4128
|
+
status=_from_dict(d, "status", RunStatus),
|
|
4129
|
+
task_run_ids=d.get("task_run_ids", None),
|
|
4130
|
+
type=_enum(d, "type", RepairHistoryItemType),
|
|
4767
4131
|
)
|
|
4768
4132
|
|
|
4769
4133
|
|
|
4134
|
+
class RepairHistoryItemType(Enum):
|
|
4135
|
+
"""The repair history item type. Indicates whether a run is the original run or a repair run."""
|
|
4136
|
+
|
|
4137
|
+
ORIGINAL = "ORIGINAL"
|
|
4138
|
+
REPAIR = "REPAIR"
|
|
4139
|
+
|
|
4140
|
+
|
|
4770
4141
|
@dataclass
|
|
4771
4142
|
class RepairRunResponse:
|
|
4772
4143
|
"""Run repair was initiated."""
|
|
@@ -4795,41 +4166,6 @@ class RepairRunResponse:
|
|
|
4795
4166
|
return cls(repair_id=d.get("repair_id", None))
|
|
4796
4167
|
|
|
4797
4168
|
|
|
4798
|
-
@dataclass
|
|
4799
|
-
class ResetJob:
|
|
4800
|
-
job_id: int
|
|
4801
|
-
"""The canonical identifier of the job to reset. This field is required."""
|
|
4802
|
-
|
|
4803
|
-
new_settings: JobSettings
|
|
4804
|
-
"""The new settings of the job. These settings completely replace the old settings.
|
|
4805
|
-
|
|
4806
|
-
Changes to the field `JobBaseSettings.timeout_seconds` are applied to active runs. Changes to
|
|
4807
|
-
other fields are applied to future runs only."""
|
|
4808
|
-
|
|
4809
|
-
def as_dict(self) -> dict:
|
|
4810
|
-
"""Serializes the ResetJob into a dictionary suitable for use as a JSON request body."""
|
|
4811
|
-
body = {}
|
|
4812
|
-
if self.job_id is not None:
|
|
4813
|
-
body["job_id"] = self.job_id
|
|
4814
|
-
if self.new_settings:
|
|
4815
|
-
body["new_settings"] = self.new_settings.as_dict()
|
|
4816
|
-
return body
|
|
4817
|
-
|
|
4818
|
-
def as_shallow_dict(self) -> dict:
|
|
4819
|
-
"""Serializes the ResetJob into a shallow dictionary of its immediate attributes."""
|
|
4820
|
-
body = {}
|
|
4821
|
-
if self.job_id is not None:
|
|
4822
|
-
body["job_id"] = self.job_id
|
|
4823
|
-
if self.new_settings:
|
|
4824
|
-
body["new_settings"] = self.new_settings
|
|
4825
|
-
return body
|
|
4826
|
-
|
|
4827
|
-
@classmethod
|
|
4828
|
-
def from_dict(cls, d: Dict[str, Any]) -> ResetJob:
|
|
4829
|
-
"""Deserializes the ResetJob from a dictionary."""
|
|
4830
|
-
return cls(job_id=d.get("job_id", None), new_settings=_from_dict(d, "new_settings", JobSettings))
|
|
4831
|
-
|
|
4832
|
-
|
|
4833
4169
|
@dataclass
|
|
4834
4170
|
class ResetResponse:
|
|
4835
4171
|
def as_dict(self) -> dict:
|
|
@@ -5587,249 +4923,50 @@ class RunIf(Enum):
|
|
|
5587
4923
|
`AT_LEAST_ONE_SUCCESS`: At least one dependency has succeeded * `NONE_FAILED`: None of the
|
|
5588
4924
|
dependencies have failed and at least one was executed * `ALL_DONE`: All dependencies have been
|
|
5589
4925
|
completed * `AT_LEAST_ONE_FAILED`: At least one dependency failed * `ALL_FAILED`: ALl
|
|
5590
|
-
dependencies have failed"""
|
|
5591
|
-
|
|
5592
|
-
ALL_DONE = "ALL_DONE"
|
|
5593
|
-
ALL_FAILED = "ALL_FAILED"
|
|
5594
|
-
ALL_SUCCESS = "ALL_SUCCESS"
|
|
5595
|
-
AT_LEAST_ONE_FAILED = "AT_LEAST_ONE_FAILED"
|
|
5596
|
-
AT_LEAST_ONE_SUCCESS = "AT_LEAST_ONE_SUCCESS"
|
|
5597
|
-
NONE_FAILED = "NONE_FAILED"
|
|
5598
|
-
|
|
5599
|
-
|
|
5600
|
-
@dataclass
|
|
5601
|
-
class RunJobOutput:
|
|
5602
|
-
run_id: Optional[int] = None
|
|
5603
|
-
"""The run id of the triggered job run"""
|
|
5604
|
-
|
|
5605
|
-
def as_dict(self) -> dict:
|
|
5606
|
-
"""Serializes the RunJobOutput into a dictionary suitable for use as a JSON request body."""
|
|
5607
|
-
body = {}
|
|
5608
|
-
if self.run_id is not None:
|
|
5609
|
-
body["run_id"] = self.run_id
|
|
5610
|
-
return body
|
|
5611
|
-
|
|
5612
|
-
def as_shallow_dict(self) -> dict:
|
|
5613
|
-
"""Serializes the RunJobOutput into a shallow dictionary of its immediate attributes."""
|
|
5614
|
-
body = {}
|
|
5615
|
-
if self.run_id is not None:
|
|
5616
|
-
body["run_id"] = self.run_id
|
|
5617
|
-
return body
|
|
5618
|
-
|
|
5619
|
-
@classmethod
|
|
5620
|
-
def from_dict(cls, d: Dict[str, Any]) -> RunJobOutput:
|
|
5621
|
-
"""Deserializes the RunJobOutput from a dictionary."""
|
|
5622
|
-
return cls(run_id=d.get("run_id", None))
|
|
5623
|
-
|
|
5624
|
-
|
|
5625
|
-
@dataclass
|
|
5626
|
-
class RunJobTask:
|
|
5627
|
-
job_id: int
|
|
5628
|
-
"""ID of the job to trigger."""
|
|
5629
|
-
|
|
5630
|
-
dbt_commands: Optional[List[str]] = None
|
|
5631
|
-
"""An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
|
|
5632
|
-
deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`"""
|
|
5633
|
-
|
|
5634
|
-
jar_params: Optional[List[str]] = None
|
|
5635
|
-
"""A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe",
|
|
5636
|
-
"35"]`. The parameters are used to invoke the main function of the main class specified in the
|
|
5637
|
-
Spark JAR task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot
|
|
5638
|
-
be specified in conjunction with notebook_params. The JSON representation of this field (for
|
|
5639
|
-
example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
5640
|
-
|
|
5641
|
-
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
5642
|
-
|
|
5643
|
-
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
5644
|
-
|
|
5645
|
-
job_parameters: Optional[Dict[str, str]] = None
|
|
5646
|
-
"""Job-level parameters used to trigger the job."""
|
|
5647
|
-
|
|
5648
|
-
notebook_params: Optional[Dict[str, str]] = None
|
|
5649
|
-
"""A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
|
|
5650
|
-
"john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the
|
|
5651
|
-
[dbutils.widgets.get] function.
|
|
5652
|
-
|
|
5653
|
-
If not specified upon `run-now`, the triggered run uses the job’s base parameters.
|
|
5654
|
-
|
|
5655
|
-
notebook_params cannot be specified in conjunction with jar_params.
|
|
5656
|
-
|
|
5657
|
-
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
5658
|
-
|
|
5659
|
-
The JSON representation of this field (for example `{"notebook_params":{"name":"john
|
|
5660
|
-
doe","age":"35"}}`) cannot exceed 10,000 bytes.
|
|
5661
|
-
|
|
5662
|
-
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
5663
|
-
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
|
|
5664
|
-
|
|
5665
|
-
pipeline_params: Optional[PipelineParams] = None
|
|
5666
|
-
"""Controls whether the pipeline should perform a full refresh"""
|
|
5667
|
-
|
|
5668
|
-
python_named_params: Optional[Dict[str, str]] = None
|
|
5669
|
-
|
|
5670
|
-
python_params: Optional[List[str]] = None
|
|
5671
|
-
"""A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe",
|
|
5672
|
-
"35"]`. The parameters are passed to Python file as command-line parameters. If specified upon
|
|
5673
|
-
`run-now`, it would overwrite the parameters specified in job setting. The JSON representation
|
|
5674
|
-
of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
5675
|
-
|
|
5676
|
-
Use [Task parameter variables] to set parameters containing information about job runs.
|
|
5677
|
-
|
|
5678
|
-
Important
|
|
5679
|
-
|
|
5680
|
-
These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
|
|
5681
|
-
returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
|
|
5682
|
-
emojis.
|
|
5683
|
-
|
|
5684
|
-
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
5685
|
-
|
|
5686
|
-
spark_submit_params: Optional[List[str]] = None
|
|
5687
|
-
"""A list of parameters for jobs with spark submit task, for example `"spark_submit_params":
|
|
5688
|
-
["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit
|
|
5689
|
-
script as command-line parameters. If specified upon `run-now`, it would overwrite the
|
|
5690
|
-
parameters specified in job setting. The JSON representation of this field (for example
|
|
5691
|
-
`{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
|
|
5692
|
-
|
|
5693
|
-
Use [Task parameter variables] to set parameters containing information about job runs
|
|
5694
|
-
|
|
5695
|
-
Important
|
|
5696
|
-
|
|
5697
|
-
These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
|
|
5698
|
-
returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
|
|
5699
|
-
emojis.
|
|
5700
|
-
|
|
5701
|
-
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
5702
|
-
|
|
5703
|
-
sql_params: Optional[Dict[str, str]] = None
|
|
5704
|
-
"""A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
|
|
5705
|
-
doe", "age": "35"}`. The SQL alert task does not support custom parameters."""
|
|
5706
|
-
|
|
5707
|
-
def as_dict(self) -> dict:
|
|
5708
|
-
"""Serializes the RunJobTask into a dictionary suitable for use as a JSON request body."""
|
|
5709
|
-
body = {}
|
|
5710
|
-
if self.dbt_commands:
|
|
5711
|
-
body["dbt_commands"] = [v for v in self.dbt_commands]
|
|
5712
|
-
if self.jar_params:
|
|
5713
|
-
body["jar_params"] = [v for v in self.jar_params]
|
|
5714
|
-
if self.job_id is not None:
|
|
5715
|
-
body["job_id"] = self.job_id
|
|
5716
|
-
if self.job_parameters:
|
|
5717
|
-
body["job_parameters"] = self.job_parameters
|
|
5718
|
-
if self.notebook_params:
|
|
5719
|
-
body["notebook_params"] = self.notebook_params
|
|
5720
|
-
if self.pipeline_params:
|
|
5721
|
-
body["pipeline_params"] = self.pipeline_params.as_dict()
|
|
5722
|
-
if self.python_named_params:
|
|
5723
|
-
body["python_named_params"] = self.python_named_params
|
|
5724
|
-
if self.python_params:
|
|
5725
|
-
body["python_params"] = [v for v in self.python_params]
|
|
5726
|
-
if self.spark_submit_params:
|
|
5727
|
-
body["spark_submit_params"] = [v for v in self.spark_submit_params]
|
|
5728
|
-
if self.sql_params:
|
|
5729
|
-
body["sql_params"] = self.sql_params
|
|
5730
|
-
return body
|
|
5731
|
-
|
|
5732
|
-
def as_shallow_dict(self) -> dict:
|
|
5733
|
-
"""Serializes the RunJobTask into a shallow dictionary of its immediate attributes."""
|
|
5734
|
-
body = {}
|
|
5735
|
-
if self.dbt_commands:
|
|
5736
|
-
body["dbt_commands"] = self.dbt_commands
|
|
5737
|
-
if self.jar_params:
|
|
5738
|
-
body["jar_params"] = self.jar_params
|
|
5739
|
-
if self.job_id is not None:
|
|
5740
|
-
body["job_id"] = self.job_id
|
|
5741
|
-
if self.job_parameters:
|
|
5742
|
-
body["job_parameters"] = self.job_parameters
|
|
5743
|
-
if self.notebook_params:
|
|
5744
|
-
body["notebook_params"] = self.notebook_params
|
|
5745
|
-
if self.pipeline_params:
|
|
5746
|
-
body["pipeline_params"] = self.pipeline_params
|
|
5747
|
-
if self.python_named_params:
|
|
5748
|
-
body["python_named_params"] = self.python_named_params
|
|
5749
|
-
if self.python_params:
|
|
5750
|
-
body["python_params"] = self.python_params
|
|
5751
|
-
if self.spark_submit_params:
|
|
5752
|
-
body["spark_submit_params"] = self.spark_submit_params
|
|
5753
|
-
if self.sql_params:
|
|
5754
|
-
body["sql_params"] = self.sql_params
|
|
5755
|
-
return body
|
|
5756
|
-
|
|
5757
|
-
@classmethod
|
|
5758
|
-
def from_dict(cls, d: Dict[str, Any]) -> RunJobTask:
|
|
5759
|
-
"""Deserializes the RunJobTask from a dictionary."""
|
|
5760
|
-
return cls(
|
|
5761
|
-
dbt_commands=d.get("dbt_commands", None),
|
|
5762
|
-
jar_params=d.get("jar_params", None),
|
|
5763
|
-
job_id=d.get("job_id", None),
|
|
5764
|
-
job_parameters=d.get("job_parameters", None),
|
|
5765
|
-
notebook_params=d.get("notebook_params", None),
|
|
5766
|
-
pipeline_params=_from_dict(d, "pipeline_params", PipelineParams),
|
|
5767
|
-
python_named_params=d.get("python_named_params", None),
|
|
5768
|
-
python_params=d.get("python_params", None),
|
|
5769
|
-
spark_submit_params=d.get("spark_submit_params", None),
|
|
5770
|
-
sql_params=d.get("sql_params", None),
|
|
5771
|
-
)
|
|
5772
|
-
|
|
5773
|
-
|
|
5774
|
-
class RunLifeCycleState(Enum):
|
|
5775
|
-
"""A value indicating the run's lifecycle state. The possible values are: * `QUEUED`: The run is
|
|
5776
|
-
queued. * `PENDING`: The run is waiting to be executed while the cluster and execution context
|
|
5777
|
-
are being prepared. * `RUNNING`: The task of this run is being executed. * `TERMINATING`: The
|
|
5778
|
-
task of this run has completed, and the cluster and execution context are being cleaned up. *
|
|
5779
|
-
`TERMINATED`: The task of this run has completed, and the cluster and execution context have
|
|
5780
|
-
been cleaned up. This state is terminal. * `SKIPPED`: This run was aborted because a previous
|
|
5781
|
-
run of the same job was already active. This state is terminal. * `INTERNAL_ERROR`: An
|
|
5782
|
-
exceptional state that indicates a failure in the Jobs service, such as network failure over a
|
|
5783
|
-
long period. If a run on a new cluster ends in the `INTERNAL_ERROR` state, the Jobs service
|
|
5784
|
-
terminates the cluster as soon as possible. This state is terminal. * `BLOCKED`: The run is
|
|
5785
|
-
blocked on an upstream dependency. * `WAITING_FOR_RETRY`: The run is waiting for a retry."""
|
|
4926
|
+
dependencies have failed"""
|
|
5786
4927
|
|
|
5787
|
-
|
|
5788
|
-
|
|
5789
|
-
|
|
5790
|
-
|
|
5791
|
-
|
|
5792
|
-
|
|
5793
|
-
TERMINATED = "TERMINATED"
|
|
5794
|
-
TERMINATING = "TERMINATING"
|
|
5795
|
-
WAITING_FOR_RETRY = "WAITING_FOR_RETRY"
|
|
4928
|
+
ALL_DONE = "ALL_DONE"
|
|
4929
|
+
ALL_FAILED = "ALL_FAILED"
|
|
4930
|
+
ALL_SUCCESS = "ALL_SUCCESS"
|
|
4931
|
+
AT_LEAST_ONE_FAILED = "AT_LEAST_ONE_FAILED"
|
|
4932
|
+
AT_LEAST_ONE_SUCCESS = "AT_LEAST_ONE_SUCCESS"
|
|
4933
|
+
NONE_FAILED = "NONE_FAILED"
|
|
5796
4934
|
|
|
5797
4935
|
|
|
5798
|
-
|
|
5799
|
-
|
|
4936
|
+
@dataclass
|
|
4937
|
+
class RunJobOutput:
|
|
4938
|
+
run_id: Optional[int] = None
|
|
4939
|
+
"""The run id of the triggered job run"""
|
|
5800
4940
|
|
|
5801
|
-
|
|
5802
|
-
|
|
5803
|
-
|
|
5804
|
-
|
|
5805
|
-
|
|
5806
|
-
|
|
5807
|
-
|
|
4941
|
+
def as_dict(self) -> dict:
|
|
4942
|
+
"""Serializes the RunJobOutput into a dictionary suitable for use as a JSON request body."""
|
|
4943
|
+
body = {}
|
|
4944
|
+
if self.run_id is not None:
|
|
4945
|
+
body["run_id"] = self.run_id
|
|
4946
|
+
return body
|
|
4947
|
+
|
|
4948
|
+
def as_shallow_dict(self) -> dict:
|
|
4949
|
+
"""Serializes the RunJobOutput into a shallow dictionary of its immediate attributes."""
|
|
4950
|
+
body = {}
|
|
4951
|
+
if self.run_id is not None:
|
|
4952
|
+
body["run_id"] = self.run_id
|
|
4953
|
+
return body
|
|
4954
|
+
|
|
4955
|
+
@classmethod
|
|
4956
|
+
def from_dict(cls, d: Dict[str, Any]) -> RunJobOutput:
|
|
4957
|
+
"""Deserializes the RunJobOutput from a dictionary."""
|
|
4958
|
+
return cls(run_id=d.get("run_id", None))
|
|
5808
4959
|
|
|
5809
4960
|
|
|
5810
4961
|
@dataclass
|
|
5811
|
-
class
|
|
4962
|
+
class RunJobTask:
|
|
5812
4963
|
job_id: int
|
|
5813
|
-
"""
|
|
4964
|
+
"""ID of the job to trigger."""
|
|
5814
4965
|
|
|
5815
4966
|
dbt_commands: Optional[List[str]] = None
|
|
5816
4967
|
"""An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
|
|
5817
4968
|
deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`"""
|
|
5818
4969
|
|
|
5819
|
-
idempotency_token: Optional[str] = None
|
|
5820
|
-
"""An optional token to guarantee the idempotency of job run requests. If a run with the provided
|
|
5821
|
-
token already exists, the request does not create a new run but returns the ID of the existing
|
|
5822
|
-
run instead. If a run with the provided token is deleted, an error is returned.
|
|
5823
|
-
|
|
5824
|
-
If you specify the idempotency token, upon failure you can retry until the request succeeds.
|
|
5825
|
-
Databricks guarantees that exactly one run is launched with that idempotency token.
|
|
5826
|
-
|
|
5827
|
-
This token must have at most 64 characters.
|
|
5828
|
-
|
|
5829
|
-
For more information, see [How to ensure idempotency for jobs].
|
|
5830
|
-
|
|
5831
|
-
[How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html"""
|
|
5832
|
-
|
|
5833
4970
|
jar_params: Optional[List[str]] = None
|
|
5834
4971
|
"""A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe",
|
|
5835
4972
|
"35"]`. The parameters are used to invoke the main function of the main class specified in the
|
|
@@ -5842,7 +4979,7 @@ class RunNow:
|
|
|
5842
4979
|
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
5843
4980
|
|
|
5844
4981
|
job_parameters: Optional[Dict[str, str]] = None
|
|
5845
|
-
"""Job-level parameters used
|
|
4982
|
+
"""Job-level parameters used to trigger the job."""
|
|
5846
4983
|
|
|
5847
4984
|
notebook_params: Optional[Dict[str, str]] = None
|
|
5848
4985
|
"""A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
|
|
@@ -5861,19 +4998,6 @@ class RunNow:
|
|
|
5861
4998
|
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
5862
4999
|
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
|
|
5863
5000
|
|
|
5864
|
-
only: Optional[List[str]] = None
|
|
5865
|
-
"""A list of task keys to run inside of the job. If this field is not provided, all tasks in the
|
|
5866
|
-
job will be run."""
|
|
5867
|
-
|
|
5868
|
-
performance_target: Optional[PerformanceTarget] = None
|
|
5869
|
-
"""The performance mode on a serverless job. The performance target determines the level of compute
|
|
5870
|
-
performance or cost-efficiency for the run. This field overrides the performance target defined
|
|
5871
|
-
on the job level.
|
|
5872
|
-
|
|
5873
|
-
* `STANDARD`: Enables cost-efficient execution of serverless workloads. *
|
|
5874
|
-
`PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and
|
|
5875
|
-
optimized cluster performance."""
|
|
5876
|
-
|
|
5877
5001
|
pipeline_params: Optional[PipelineParams] = None
|
|
5878
5002
|
"""Controls whether the pipeline should perform a full refresh"""
|
|
5879
5003
|
|
|
@@ -5895,9 +5019,6 @@ class RunNow:
|
|
|
5895
5019
|
|
|
5896
5020
|
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
|
|
5897
5021
|
|
|
5898
|
-
queue: Optional[QueueSettings] = None
|
|
5899
|
-
"""The queue settings of the run."""
|
|
5900
|
-
|
|
5901
5022
|
spark_submit_params: Optional[List[str]] = None
|
|
5902
5023
|
"""A list of parameters for jobs with spark submit task, for example `"spark_submit_params":
|
|
5903
5024
|
["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit
|
|
@@ -5920,12 +5041,10 @@ class RunNow:
|
|
|
5920
5041
|
doe", "age": "35"}`. The SQL alert task does not support custom parameters."""
|
|
5921
5042
|
|
|
5922
5043
|
def as_dict(self) -> dict:
|
|
5923
|
-
"""Serializes the
|
|
5044
|
+
"""Serializes the RunJobTask into a dictionary suitable for use as a JSON request body."""
|
|
5924
5045
|
body = {}
|
|
5925
5046
|
if self.dbt_commands:
|
|
5926
5047
|
body["dbt_commands"] = [v for v in self.dbt_commands]
|
|
5927
|
-
if self.idempotency_token is not None:
|
|
5928
|
-
body["idempotency_token"] = self.idempotency_token
|
|
5929
5048
|
if self.jar_params:
|
|
5930
5049
|
body["jar_params"] = [v for v in self.jar_params]
|
|
5931
5050
|
if self.job_id is not None:
|
|
@@ -5934,18 +5053,12 @@ class RunNow:
|
|
|
5934
5053
|
body["job_parameters"] = self.job_parameters
|
|
5935
5054
|
if self.notebook_params:
|
|
5936
5055
|
body["notebook_params"] = self.notebook_params
|
|
5937
|
-
if self.only:
|
|
5938
|
-
body["only"] = [v for v in self.only]
|
|
5939
|
-
if self.performance_target is not None:
|
|
5940
|
-
body["performance_target"] = self.performance_target.value
|
|
5941
5056
|
if self.pipeline_params:
|
|
5942
5057
|
body["pipeline_params"] = self.pipeline_params.as_dict()
|
|
5943
5058
|
if self.python_named_params:
|
|
5944
5059
|
body["python_named_params"] = self.python_named_params
|
|
5945
5060
|
if self.python_params:
|
|
5946
5061
|
body["python_params"] = [v for v in self.python_params]
|
|
5947
|
-
if self.queue:
|
|
5948
|
-
body["queue"] = self.queue.as_dict()
|
|
5949
5062
|
if self.spark_submit_params:
|
|
5950
5063
|
body["spark_submit_params"] = [v for v in self.spark_submit_params]
|
|
5951
5064
|
if self.sql_params:
|
|
@@ -5953,12 +5066,10 @@ class RunNow:
|
|
|
5953
5066
|
return body
|
|
5954
5067
|
|
|
5955
5068
|
def as_shallow_dict(self) -> dict:
|
|
5956
|
-
"""Serializes the
|
|
5069
|
+
"""Serializes the RunJobTask into a shallow dictionary of its immediate attributes."""
|
|
5957
5070
|
body = {}
|
|
5958
5071
|
if self.dbt_commands:
|
|
5959
5072
|
body["dbt_commands"] = self.dbt_commands
|
|
5960
|
-
if self.idempotency_token is not None:
|
|
5961
|
-
body["idempotency_token"] = self.idempotency_token
|
|
5962
5073
|
if self.jar_params:
|
|
5963
5074
|
body["jar_params"] = self.jar_params
|
|
5964
5075
|
if self.job_id is not None:
|
|
@@ -5967,18 +5078,12 @@ class RunNow:
|
|
|
5967
5078
|
body["job_parameters"] = self.job_parameters
|
|
5968
5079
|
if self.notebook_params:
|
|
5969
5080
|
body["notebook_params"] = self.notebook_params
|
|
5970
|
-
if self.only:
|
|
5971
|
-
body["only"] = self.only
|
|
5972
|
-
if self.performance_target is not None:
|
|
5973
|
-
body["performance_target"] = self.performance_target
|
|
5974
5081
|
if self.pipeline_params:
|
|
5975
5082
|
body["pipeline_params"] = self.pipeline_params
|
|
5976
5083
|
if self.python_named_params:
|
|
5977
5084
|
body["python_named_params"] = self.python_named_params
|
|
5978
5085
|
if self.python_params:
|
|
5979
5086
|
body["python_params"] = self.python_params
|
|
5980
|
-
if self.queue:
|
|
5981
|
-
body["queue"] = self.queue
|
|
5982
5087
|
if self.spark_submit_params:
|
|
5983
5088
|
body["spark_submit_params"] = self.spark_submit_params
|
|
5984
5089
|
if self.sql_params:
|
|
@@ -5986,26 +5091,58 @@ class RunNow:
|
|
|
5986
5091
|
return body
|
|
5987
5092
|
|
|
5988
5093
|
@classmethod
|
|
5989
|
-
def from_dict(cls, d: Dict[str, Any]) ->
|
|
5990
|
-
"""Deserializes the
|
|
5094
|
+
def from_dict(cls, d: Dict[str, Any]) -> RunJobTask:
|
|
5095
|
+
"""Deserializes the RunJobTask from a dictionary."""
|
|
5991
5096
|
return cls(
|
|
5992
5097
|
dbt_commands=d.get("dbt_commands", None),
|
|
5993
|
-
idempotency_token=d.get("idempotency_token", None),
|
|
5994
5098
|
jar_params=d.get("jar_params", None),
|
|
5995
5099
|
job_id=d.get("job_id", None),
|
|
5996
5100
|
job_parameters=d.get("job_parameters", None),
|
|
5997
5101
|
notebook_params=d.get("notebook_params", None),
|
|
5998
|
-
only=d.get("only", None),
|
|
5999
|
-
performance_target=_enum(d, "performance_target", PerformanceTarget),
|
|
6000
5102
|
pipeline_params=_from_dict(d, "pipeline_params", PipelineParams),
|
|
6001
5103
|
python_named_params=d.get("python_named_params", None),
|
|
6002
5104
|
python_params=d.get("python_params", None),
|
|
6003
|
-
queue=_from_dict(d, "queue", QueueSettings),
|
|
6004
5105
|
spark_submit_params=d.get("spark_submit_params", None),
|
|
6005
5106
|
sql_params=d.get("sql_params", None),
|
|
6006
5107
|
)
|
|
6007
5108
|
|
|
6008
5109
|
|
|
5110
|
+
class RunLifeCycleState(Enum):
|
|
5111
|
+
"""A value indicating the run's lifecycle state. The possible values are: * `QUEUED`: The run is
|
|
5112
|
+
queued. * `PENDING`: The run is waiting to be executed while the cluster and execution context
|
|
5113
|
+
are being prepared. * `RUNNING`: The task of this run is being executed. * `TERMINATING`: The
|
|
5114
|
+
task of this run has completed, and the cluster and execution context are being cleaned up. *
|
|
5115
|
+
`TERMINATED`: The task of this run has completed, and the cluster and execution context have
|
|
5116
|
+
been cleaned up. This state is terminal. * `SKIPPED`: This run was aborted because a previous
|
|
5117
|
+
run of the same job was already active. This state is terminal. * `INTERNAL_ERROR`: An
|
|
5118
|
+
exceptional state that indicates a failure in the Jobs service, such as network failure over a
|
|
5119
|
+
long period. If a run on a new cluster ends in the `INTERNAL_ERROR` state, the Jobs service
|
|
5120
|
+
terminates the cluster as soon as possible. This state is terminal. * `BLOCKED`: The run is
|
|
5121
|
+
blocked on an upstream dependency. * `WAITING_FOR_RETRY`: The run is waiting for a retry."""
|
|
5122
|
+
|
|
5123
|
+
BLOCKED = "BLOCKED"
|
|
5124
|
+
INTERNAL_ERROR = "INTERNAL_ERROR"
|
|
5125
|
+
PENDING = "PENDING"
|
|
5126
|
+
QUEUED = "QUEUED"
|
|
5127
|
+
RUNNING = "RUNNING"
|
|
5128
|
+
SKIPPED = "SKIPPED"
|
|
5129
|
+
TERMINATED = "TERMINATED"
|
|
5130
|
+
TERMINATING = "TERMINATING"
|
|
5131
|
+
WAITING_FOR_RETRY = "WAITING_FOR_RETRY"
|
|
5132
|
+
|
|
5133
|
+
|
|
5134
|
+
class RunLifecycleStateV2State(Enum):
|
|
5135
|
+
"""The current state of the run."""
|
|
5136
|
+
|
|
5137
|
+
BLOCKED = "BLOCKED"
|
|
5138
|
+
PENDING = "PENDING"
|
|
5139
|
+
QUEUED = "QUEUED"
|
|
5140
|
+
RUNNING = "RUNNING"
|
|
5141
|
+
TERMINATED = "TERMINATED"
|
|
5142
|
+
TERMINATING = "TERMINATING"
|
|
5143
|
+
WAITING = "WAITING"
|
|
5144
|
+
|
|
5145
|
+
|
|
6009
5146
|
@dataclass
|
|
6010
5147
|
class RunNowResponse:
|
|
6011
5148
|
"""Run was started successfully."""
|
|
@@ -7690,156 +6827,6 @@ class StorageMode(Enum):
|
|
|
7690
6827
|
IMPORT = "IMPORT"
|
|
7691
6828
|
|
|
7692
6829
|
|
|
7693
|
-
@dataclass
|
|
7694
|
-
class SubmitRun:
|
|
7695
|
-
access_control_list: Optional[List[JobAccessControlRequest]] = None
|
|
7696
|
-
"""List of permissions to set on the job."""
|
|
7697
|
-
|
|
7698
|
-
budget_policy_id: Optional[str] = None
|
|
7699
|
-
"""The user specified id of the budget policy to use for this one-time run. If not specified, the
|
|
7700
|
-
run will be not be attributed to any budget policy."""
|
|
7701
|
-
|
|
7702
|
-
email_notifications: Optional[JobEmailNotifications] = None
|
|
7703
|
-
"""An optional set of email addresses notified when the run begins or completes."""
|
|
7704
|
-
|
|
7705
|
-
environments: Optional[List[JobEnvironment]] = None
|
|
7706
|
-
"""A list of task execution environment specifications that can be referenced by tasks of this run."""
|
|
7707
|
-
|
|
7708
|
-
git_source: Optional[GitSource] = None
|
|
7709
|
-
"""An optional specification for a remote Git repository containing the source code used by tasks.
|
|
7710
|
-
Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
|
|
7711
|
-
|
|
7712
|
-
If `git_source` is set, these tasks retrieve the file from the remote repository by default.
|
|
7713
|
-
However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.
|
|
7714
|
-
|
|
7715
|
-
Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks
|
|
7716
|
-
are used, `git_source` must be defined on the job."""
|
|
7717
|
-
|
|
7718
|
-
health: Optional[JobsHealthRules] = None
|
|
7719
|
-
|
|
7720
|
-
idempotency_token: Optional[str] = None
|
|
7721
|
-
"""An optional token that can be used to guarantee the idempotency of job run requests. If a run
|
|
7722
|
-
with the provided token already exists, the request does not create a new run but returns the ID
|
|
7723
|
-
of the existing run instead. If a run with the provided token is deleted, an error is returned.
|
|
7724
|
-
|
|
7725
|
-
If you specify the idempotency token, upon failure you can retry until the request succeeds.
|
|
7726
|
-
Databricks guarantees that exactly one run is launched with that idempotency token.
|
|
7727
|
-
|
|
7728
|
-
This token must have at most 64 characters.
|
|
7729
|
-
|
|
7730
|
-
For more information, see [How to ensure idempotency for jobs].
|
|
7731
|
-
|
|
7732
|
-
[How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html"""
|
|
7733
|
-
|
|
7734
|
-
notification_settings: Optional[JobNotificationSettings] = None
|
|
7735
|
-
"""Optional notification settings that are used when sending notifications to each of the
|
|
7736
|
-
`email_notifications` and `webhook_notifications` for this run."""
|
|
7737
|
-
|
|
7738
|
-
queue: Optional[QueueSettings] = None
|
|
7739
|
-
"""The queue settings of the one-time run."""
|
|
7740
|
-
|
|
7741
|
-
run_as: Optional[JobRunAs] = None
|
|
7742
|
-
"""Specifies the user or service principal that the job runs as. If not specified, the job runs as
|
|
7743
|
-
the user who submits the request."""
|
|
7744
|
-
|
|
7745
|
-
run_name: Optional[str] = None
|
|
7746
|
-
"""An optional name for the run. The default value is `Untitled`."""
|
|
7747
|
-
|
|
7748
|
-
tasks: Optional[List[SubmitTask]] = None
|
|
7749
|
-
|
|
7750
|
-
timeout_seconds: Optional[int] = None
|
|
7751
|
-
"""An optional timeout applied to each run of this job. A value of `0` means no timeout."""
|
|
7752
|
-
|
|
7753
|
-
webhook_notifications: Optional[WebhookNotifications] = None
|
|
7754
|
-
"""A collection of system notification IDs to notify when the run begins or completes."""
|
|
7755
|
-
|
|
7756
|
-
def as_dict(self) -> dict:
|
|
7757
|
-
"""Serializes the SubmitRun into a dictionary suitable for use as a JSON request body."""
|
|
7758
|
-
body = {}
|
|
7759
|
-
if self.access_control_list:
|
|
7760
|
-
body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
|
|
7761
|
-
if self.budget_policy_id is not None:
|
|
7762
|
-
body["budget_policy_id"] = self.budget_policy_id
|
|
7763
|
-
if self.email_notifications:
|
|
7764
|
-
body["email_notifications"] = self.email_notifications.as_dict()
|
|
7765
|
-
if self.environments:
|
|
7766
|
-
body["environments"] = [v.as_dict() for v in self.environments]
|
|
7767
|
-
if self.git_source:
|
|
7768
|
-
body["git_source"] = self.git_source.as_dict()
|
|
7769
|
-
if self.health:
|
|
7770
|
-
body["health"] = self.health.as_dict()
|
|
7771
|
-
if self.idempotency_token is not None:
|
|
7772
|
-
body["idempotency_token"] = self.idempotency_token
|
|
7773
|
-
if self.notification_settings:
|
|
7774
|
-
body["notification_settings"] = self.notification_settings.as_dict()
|
|
7775
|
-
if self.queue:
|
|
7776
|
-
body["queue"] = self.queue.as_dict()
|
|
7777
|
-
if self.run_as:
|
|
7778
|
-
body["run_as"] = self.run_as.as_dict()
|
|
7779
|
-
if self.run_name is not None:
|
|
7780
|
-
body["run_name"] = self.run_name
|
|
7781
|
-
if self.tasks:
|
|
7782
|
-
body["tasks"] = [v.as_dict() for v in self.tasks]
|
|
7783
|
-
if self.timeout_seconds is not None:
|
|
7784
|
-
body["timeout_seconds"] = self.timeout_seconds
|
|
7785
|
-
if self.webhook_notifications:
|
|
7786
|
-
body["webhook_notifications"] = self.webhook_notifications.as_dict()
|
|
7787
|
-
return body
|
|
7788
|
-
|
|
7789
|
-
def as_shallow_dict(self) -> dict:
|
|
7790
|
-
"""Serializes the SubmitRun into a shallow dictionary of its immediate attributes."""
|
|
7791
|
-
body = {}
|
|
7792
|
-
if self.access_control_list:
|
|
7793
|
-
body["access_control_list"] = self.access_control_list
|
|
7794
|
-
if self.budget_policy_id is not None:
|
|
7795
|
-
body["budget_policy_id"] = self.budget_policy_id
|
|
7796
|
-
if self.email_notifications:
|
|
7797
|
-
body["email_notifications"] = self.email_notifications
|
|
7798
|
-
if self.environments:
|
|
7799
|
-
body["environments"] = self.environments
|
|
7800
|
-
if self.git_source:
|
|
7801
|
-
body["git_source"] = self.git_source
|
|
7802
|
-
if self.health:
|
|
7803
|
-
body["health"] = self.health
|
|
7804
|
-
if self.idempotency_token is not None:
|
|
7805
|
-
body["idempotency_token"] = self.idempotency_token
|
|
7806
|
-
if self.notification_settings:
|
|
7807
|
-
body["notification_settings"] = self.notification_settings
|
|
7808
|
-
if self.queue:
|
|
7809
|
-
body["queue"] = self.queue
|
|
7810
|
-
if self.run_as:
|
|
7811
|
-
body["run_as"] = self.run_as
|
|
7812
|
-
if self.run_name is not None:
|
|
7813
|
-
body["run_name"] = self.run_name
|
|
7814
|
-
if self.tasks:
|
|
7815
|
-
body["tasks"] = self.tasks
|
|
7816
|
-
if self.timeout_seconds is not None:
|
|
7817
|
-
body["timeout_seconds"] = self.timeout_seconds
|
|
7818
|
-
if self.webhook_notifications:
|
|
7819
|
-
body["webhook_notifications"] = self.webhook_notifications
|
|
7820
|
-
return body
|
|
7821
|
-
|
|
7822
|
-
@classmethod
|
|
7823
|
-
def from_dict(cls, d: Dict[str, Any]) -> SubmitRun:
|
|
7824
|
-
"""Deserializes the SubmitRun from a dictionary."""
|
|
7825
|
-
return cls(
|
|
7826
|
-
access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest),
|
|
7827
|
-
budget_policy_id=d.get("budget_policy_id", None),
|
|
7828
|
-
email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications),
|
|
7829
|
-
environments=_repeated_dict(d, "environments", JobEnvironment),
|
|
7830
|
-
git_source=_from_dict(d, "git_source", GitSource),
|
|
7831
|
-
health=_from_dict(d, "health", JobsHealthRules),
|
|
7832
|
-
idempotency_token=d.get("idempotency_token", None),
|
|
7833
|
-
notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings),
|
|
7834
|
-
queue=_from_dict(d, "queue", QueueSettings),
|
|
7835
|
-
run_as=_from_dict(d, "run_as", JobRunAs),
|
|
7836
|
-
run_name=d.get("run_name", None),
|
|
7837
|
-
tasks=_repeated_dict(d, "tasks", SubmitTask),
|
|
7838
|
-
timeout_seconds=d.get("timeout_seconds", None),
|
|
7839
|
-
webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications),
|
|
7840
|
-
)
|
|
7841
|
-
|
|
7842
|
-
|
|
7843
6830
|
@dataclass
|
|
7844
6831
|
class SubmitRunResponse:
|
|
7845
6832
|
"""Run was created and started successfully."""
|
|
@@ -9049,59 +8036,6 @@ class TriggerType(Enum):
|
|
|
9049
8036
|
TABLE = "TABLE"
|
|
9050
8037
|
|
|
9051
8038
|
|
|
9052
|
-
@dataclass
|
|
9053
|
-
class UpdateJob:
|
|
9054
|
-
job_id: int
|
|
9055
|
-
"""The canonical identifier of the job to update. This field is required."""
|
|
9056
|
-
|
|
9057
|
-
fields_to_remove: Optional[List[str]] = None
|
|
9058
|
-
"""Remove top-level fields in the job settings. Removing nested fields is not supported, except for
|
|
9059
|
-
tasks and job clusters (`tasks/task_1`). This field is optional."""
|
|
9060
|
-
|
|
9061
|
-
new_settings: Optional[JobSettings] = None
|
|
9062
|
-
"""The new settings for the job.
|
|
9063
|
-
|
|
9064
|
-
Top-level fields specified in `new_settings` are completely replaced, except for arrays which
|
|
9065
|
-
are merged. That is, new and existing entries are completely replaced based on the respective
|
|
9066
|
-
key fields, i.e. `task_key` or `job_cluster_key`, while previous entries are kept.
|
|
9067
|
-
|
|
9068
|
-
Partially updating nested fields is not supported.
|
|
9069
|
-
|
|
9070
|
-
Changes to the field `JobSettings.timeout_seconds` are applied to active runs. Changes to other
|
|
9071
|
-
fields are applied to future runs only."""
|
|
9072
|
-
|
|
9073
|
-
def as_dict(self) -> dict:
|
|
9074
|
-
"""Serializes the UpdateJob into a dictionary suitable for use as a JSON request body."""
|
|
9075
|
-
body = {}
|
|
9076
|
-
if self.fields_to_remove:
|
|
9077
|
-
body["fields_to_remove"] = [v for v in self.fields_to_remove]
|
|
9078
|
-
if self.job_id is not None:
|
|
9079
|
-
body["job_id"] = self.job_id
|
|
9080
|
-
if self.new_settings:
|
|
9081
|
-
body["new_settings"] = self.new_settings.as_dict()
|
|
9082
|
-
return body
|
|
9083
|
-
|
|
9084
|
-
def as_shallow_dict(self) -> dict:
|
|
9085
|
-
"""Serializes the UpdateJob into a shallow dictionary of its immediate attributes."""
|
|
9086
|
-
body = {}
|
|
9087
|
-
if self.fields_to_remove:
|
|
9088
|
-
body["fields_to_remove"] = self.fields_to_remove
|
|
9089
|
-
if self.job_id is not None:
|
|
9090
|
-
body["job_id"] = self.job_id
|
|
9091
|
-
if self.new_settings:
|
|
9092
|
-
body["new_settings"] = self.new_settings
|
|
9093
|
-
return body
|
|
9094
|
-
|
|
9095
|
-
@classmethod
|
|
9096
|
-
def from_dict(cls, d: Dict[str, Any]) -> UpdateJob:
|
|
9097
|
-
"""Deserializes the UpdateJob from a dictionary."""
|
|
9098
|
-
return cls(
|
|
9099
|
-
fields_to_remove=d.get("fields_to_remove", None),
|
|
9100
|
-
job_id=d.get("job_id", None),
|
|
9101
|
-
new_settings=_from_dict(d, "new_settings", JobSettings),
|
|
9102
|
-
)
|
|
9103
|
-
|
|
9104
|
-
|
|
9105
8039
|
@dataclass
|
|
9106
8040
|
class UpdateResponse:
|
|
9107
8041
|
def as_dict(self) -> dict:
|
|
@@ -9499,6 +8433,9 @@ class JobsAPI:
|
|
|
9499
8433
|
:param queue: :class:`QueueSettings` (optional)
|
|
9500
8434
|
The queue settings of the job.
|
|
9501
8435
|
:param run_as: :class:`JobRunAs` (optional)
|
|
8436
|
+
The user or service principal that the job runs as, if specified in the request. This field
|
|
8437
|
+
indicates the explicit configuration of `run_as` for the job. To find the value in all cases,
|
|
8438
|
+
explicit or implicit, use `run_as_user_name`.
|
|
9502
8439
|
:param schedule: :class:`CronSchedule` (optional)
|
|
9503
8440
|
An optional periodic schedule for this job. The default behavior is that the job only runs when
|
|
9504
8441
|
triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.
|