databricks-sdk 0.19.1__py3-none-any.whl → 0.21.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (35) hide show
  1. databricks/sdk/__init__.py +28 -6
  2. databricks/sdk/_widgets/__init__.py +2 -2
  3. databricks/sdk/config.py +3 -2
  4. databricks/sdk/core.py +4 -2
  5. databricks/sdk/mixins/workspace.py +2 -1
  6. databricks/sdk/oauth.py +1 -1
  7. databricks/sdk/runtime/__init__.py +85 -11
  8. databricks/sdk/runtime/dbutils_stub.py +1 -1
  9. databricks/sdk/service/_internal.py +1 -1
  10. databricks/sdk/service/billing.py +64 -1
  11. databricks/sdk/service/catalog.py +796 -84
  12. databricks/sdk/service/compute.py +391 -13
  13. databricks/sdk/service/dashboards.py +15 -0
  14. databricks/sdk/service/files.py +289 -15
  15. databricks/sdk/service/iam.py +214 -0
  16. databricks/sdk/service/jobs.py +242 -143
  17. databricks/sdk/service/ml.py +407 -0
  18. databricks/sdk/service/oauth2.py +83 -0
  19. databricks/sdk/service/pipelines.py +78 -8
  20. databricks/sdk/service/provisioning.py +108 -36
  21. databricks/sdk/service/serving.py +101 -35
  22. databricks/sdk/service/settings.py +1316 -186
  23. databricks/sdk/service/sharing.py +94 -18
  24. databricks/sdk/service/sql.py +230 -13
  25. databricks/sdk/service/vectorsearch.py +105 -60
  26. databricks/sdk/service/workspace.py +175 -1
  27. databricks/sdk/version.py +1 -1
  28. {databricks_sdk-0.19.1.dist-info → databricks_sdk-0.21.0.dist-info}/METADATA +3 -1
  29. databricks_sdk-0.21.0.dist-info/RECORD +53 -0
  30. databricks/sdk/runtime/stub.py +0 -48
  31. databricks_sdk-0.19.1.dist-info/RECORD +0 -54
  32. {databricks_sdk-0.19.1.dist-info → databricks_sdk-0.21.0.dist-info}/LICENSE +0 -0
  33. {databricks_sdk-0.19.1.dist-info → databricks_sdk-0.21.0.dist-info}/NOTICE +0 -0
  34. {databricks_sdk-0.19.1.dist-info → databricks_sdk-0.21.0.dist-info}/WHEEL +0 -0
  35. {databricks_sdk-0.19.1.dist-info → databricks_sdk-0.21.0.dist-info}/top_level.txt +0 -0
@@ -170,9 +170,9 @@ class BaseRun:
170
170
  One time triggers that fire a single run. This occurs you triggered a single run on demand
171
171
  through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a
172
172
  previously failed run. This occurs when you request to re-run the job in case of failures. *
173
- `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task.
174
-
175
- * `FILE_ARRIVAL`: Indicates a run that is triggered by a file arrival."""
173
+ `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`:
174
+ Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is
175
+ triggered by a table update."""
176
176
 
177
177
  trigger_info: Optional[TriggerInfo] = None
178
178
 
@@ -261,6 +261,20 @@ class CancelAllRuns:
261
261
  return cls(all_queued_runs=d.get('all_queued_runs', None), job_id=d.get('job_id', None))
262
262
 
263
263
 
264
+ @dataclass
265
+ class CancelAllRunsResponse:
266
+
267
+ def as_dict(self) -> dict:
268
+ """Serializes the CancelAllRunsResponse into a dictionary suitable for use as a JSON request body."""
269
+ body = {}
270
+ return body
271
+
272
+ @classmethod
273
+ def from_dict(cls, d: Dict[str, any]) -> CancelAllRunsResponse:
274
+ """Deserializes the CancelAllRunsResponse from a dictionary."""
275
+ return cls()
276
+
277
+
264
278
  @dataclass
265
279
  class CancelRun:
266
280
  run_id: int
@@ -278,6 +292,20 @@ class CancelRun:
278
292
  return cls(run_id=d.get('run_id', None))
279
293
 
280
294
 
295
+ @dataclass
296
+ class CancelRunResponse:
297
+
298
+ def as_dict(self) -> dict:
299
+ """Serializes the CancelRunResponse into a dictionary suitable for use as a JSON request body."""
300
+ body = {}
301
+ return body
302
+
303
+ @classmethod
304
+ def from_dict(cls, d: Dict[str, any]) -> CancelRunResponse:
305
+ """Deserializes the CancelRunResponse from a dictionary."""
306
+ return cls()
307
+
308
+
281
309
  @dataclass
282
310
  class ClusterInstance:
283
311
  cluster_id: Optional[str] = None
@@ -340,6 +368,12 @@ class ClusterSpec:
340
368
  new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec))
341
369
 
342
370
 
371
+ class Condition(Enum):
372
+
373
+ ALL_UPDATED = 'ALL_UPDATED'
374
+ ANY_UPDATED = 'ANY_UPDATED'
375
+
376
+
343
377
  @dataclass
344
378
  class ConditionTask:
345
379
  left: Optional[str] = None
@@ -512,9 +546,9 @@ class CreateJob:
512
546
  """An optional timeout applied to each run of this job. A value of `0` means no timeout."""
513
547
 
514
548
  trigger: Optional[TriggerSettings] = None
515
- """Trigger settings for the job. Can be used to trigger a run when new files arrive in an external
516
- location. The default behavior is that the job runs only when triggered by clicking “Run
517
- Now” in the Jobs UI or sending an API request to `runNow`."""
549
+ """A configuration to trigger a run when certain conditions are met. The default behavior is that
550
+ the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API
551
+ request to `runNow`."""
518
552
 
519
553
  webhook_notifications: Optional[WebhookNotifications] = None
520
554
  """A collection of system notification IDs to notify when runs of this job begin or complete."""
@@ -738,6 +772,20 @@ class DeleteJob:
738
772
  return cls(job_id=d.get('job_id', None))
739
773
 
740
774
 
775
+ @dataclass
776
+ class DeleteResponse:
777
+
778
+ def as_dict(self) -> dict:
779
+ """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body."""
780
+ body = {}
781
+ return body
782
+
783
+ @classmethod
784
+ def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
785
+ """Deserializes the DeleteResponse from a dictionary."""
786
+ return cls()
787
+
788
+
741
789
  @dataclass
742
790
  class DeleteRun:
743
791
  run_id: int
@@ -755,6 +803,20 @@ class DeleteRun:
755
803
  return cls(run_id=d.get('run_id', None))
756
804
 
757
805
 
806
+ @dataclass
807
+ class DeleteRunResponse:
808
+
809
+ def as_dict(self) -> dict:
810
+ """Serializes the DeleteRunResponse into a dictionary suitable for use as a JSON request body."""
811
+ body = {}
812
+ return body
813
+
814
+ @classmethod
815
+ def from_dict(cls, d: Dict[str, any]) -> DeleteRunResponse:
816
+ """Deserializes the DeleteRunResponse from a dictionary."""
817
+ return cls()
818
+
819
+
758
820
  @dataclass
759
821
  class ExportRunOutput:
760
822
  views: Optional[List[ViewItem]] = None
@@ -838,7 +900,7 @@ class ForEachTask:
838
900
  task: Task
839
901
 
840
902
  concurrency: Optional[int] = None
841
- """Controls the number of active iterations task runs. Default is 100 (maximal value)."""
903
+ """Controls the number of active iterations task runs. Default is 20, maximum allowed is 100."""
842
904
 
843
905
  def as_dict(self) -> dict:
844
906
  """Serializes the ForEachTask into a dictionary suitable for use as a JSON request body."""
@@ -1061,9 +1123,6 @@ class Job:
1061
1123
  """Settings for this job and all of its runs. These settings can be updated using the `resetJob`
1062
1124
  method."""
1063
1125
 
1064
- trigger_history: Optional[TriggerHistory] = None
1065
- """History of the file arrival trigger associated with the job."""
1066
-
1067
1126
  def as_dict(self) -> dict:
1068
1127
  """Serializes the Job into a dictionary suitable for use as a JSON request body."""
1069
1128
  body = {}
@@ -1072,7 +1131,6 @@ class Job:
1072
1131
  if self.job_id is not None: body['job_id'] = self.job_id
1073
1132
  if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
1074
1133
  if self.settings: body['settings'] = self.settings.as_dict()
1075
- if self.trigger_history: body['trigger_history'] = self.trigger_history.as_dict()
1076
1134
  return body
1077
1135
 
1078
1136
  @classmethod
@@ -1082,8 +1140,7 @@ class Job:
1082
1140
  creator_user_name=d.get('creator_user_name', None),
1083
1141
  job_id=d.get('job_id', None),
1084
1142
  run_as_user_name=d.get('run_as_user_name', None),
1085
- settings=_from_dict(d, 'settings', JobSettings),
1086
- trigger_history=_from_dict(d, 'trigger_history', TriggerHistory))
1143
+ settings=_from_dict(d, 'settings', JobSettings))
1087
1144
 
1088
1145
 
1089
1146
  @dataclass
@@ -1591,9 +1648,9 @@ class JobSettings:
1591
1648
  """An optional timeout applied to each run of this job. A value of `0` means no timeout."""
1592
1649
 
1593
1650
  trigger: Optional[TriggerSettings] = None
1594
- """Trigger settings for the job. Can be used to trigger a run when new files arrive in an external
1595
- location. The default behavior is that the job runs only when triggered by clicking “Run
1596
- Now” in the Jobs UI or sending an API request to `runNow`."""
1651
+ """A configuration to trigger a run when certain conditions are met. The default behavior is that
1652
+ the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API
1653
+ request to `runNow`."""
1597
1654
 
1598
1655
  webhook_notifications: Optional[WebhookNotifications] = None
1599
1656
  """A collection of system notification IDs to notify when runs of this job begin or complete."""
@@ -1776,13 +1833,13 @@ class ListJobsResponse:
1776
1833
  """If true, additional jobs matching the provided filter are available for listing."""
1777
1834
 
1778
1835
  jobs: Optional[List[BaseJob]] = None
1779
- """The list of jobs."""
1836
+ """The list of jobs. Only included in the response if there are jobs to list."""
1780
1837
 
1781
1838
  next_page_token: Optional[str] = None
1782
- """A token that can be used to list the next page of jobs."""
1839
+ """A token that can be used to list the next page of jobs (if applicable)."""
1783
1840
 
1784
1841
  prev_page_token: Optional[str] = None
1785
- """A token that can be used to list the previous page of jobs."""
1842
+ """A token that can be used to list the previous page of jobs (if applicable)."""
1786
1843
 
1787
1844
  def as_dict(self) -> dict:
1788
1845
  """Serializes the ListJobsResponse into a dictionary suitable for use as a JSON request body."""
@@ -1808,13 +1865,14 @@ class ListRunsResponse:
1808
1865
  """If true, additional runs matching the provided filter are available for listing."""
1809
1866
 
1810
1867
  next_page_token: Optional[str] = None
1811
- """A token that can be used to list the next page of runs."""
1868
+ """A token that can be used to list the next page of runs (if applicable)."""
1812
1869
 
1813
1870
  prev_page_token: Optional[str] = None
1814
- """A token that can be used to list the previous page of runs."""
1871
+ """A token that can be used to list the previous page of runs (if applicable)."""
1815
1872
 
1816
1873
  runs: Optional[List[BaseRun]] = None
1817
- """A list of runs, from most recently started to least."""
1874
+ """A list of runs, from most recently started to least. Only included in the response if there are
1875
+ runs to list."""
1818
1876
 
1819
1877
  def as_dict(self) -> dict:
1820
1878
  """Serializes the ListRunsResponse into a dictionary suitable for use as a JSON request body."""
@@ -1883,7 +1941,7 @@ class NotebookTask:
1883
1941
  :method:jobs/runNow with parameters specified, the two parameters maps are merged. If the same
1884
1942
  key is specified in `base_parameters` and in `run-now`, the value from `run-now` is used.
1885
1943
 
1886
- Use [Task parameter variables] to set parameters containing information about job runs.
1944
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
1887
1945
 
1888
1946
  If the notebook takes a parameter that is not specified in the job’s `base_parameters` or the
1889
1947
  `run-now` override parameters, the default value from the notebook is used.
@@ -1892,8 +1950,8 @@ class NotebookTask:
1892
1950
 
1893
1951
  The JSON representation of this field cannot exceed 1MB.
1894
1952
 
1895
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
1896
- [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-widgets"""
1953
+ [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-widgets
1954
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
1897
1955
 
1898
1956
  source: Optional[Source] = None
1899
1957
  """Optional location type of the notebook. When set to `WORKSPACE`, the notebook will be retrieved
@@ -2084,8 +2142,9 @@ class RepairRun:
2084
2142
  be specified in conjunction with notebook_params. The JSON representation of this field (for
2085
2143
  example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
2086
2144
 
2087
- Use [Task parameter variables](/jobs.html"#parameter-variables") to set parameters containing
2088
- information about job runs."""
2145
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
2146
+
2147
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
2089
2148
 
2090
2149
  job_parameters: Optional[Dict[str, str]] = None
2091
2150
  """Job-level parameters used in the run. for example `"param": "overriding_val"`"""
@@ -2103,13 +2162,13 @@ class RepairRun:
2103
2162
 
2104
2163
  notebook_params cannot be specified in conjunction with jar_params.
2105
2164
 
2106
- Use [Task parameter variables] to set parameters containing information about job runs.
2165
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
2107
2166
 
2108
2167
  The JSON representation of this field (for example `{"notebook_params":{"name":"john
2109
2168
  doe","age":"35"}}`) cannot exceed 10,000 bytes.
2110
2169
 
2111
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
2112
- [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
2170
+ [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
2171
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
2113
2172
 
2114
2173
  pipeline_params: Optional[PipelineParams] = None
2115
2174
 
@@ -2123,7 +2182,7 @@ class RepairRun:
2123
2182
  `run-now`, it would overwrite the parameters specified in job setting. The JSON representation
2124
2183
  of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
2125
2184
 
2126
- Use [Task parameter variables] to set parameters containing information about job runs.
2185
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
2127
2186
 
2128
2187
  Important
2129
2188
 
@@ -2131,7 +2190,7 @@ class RepairRun:
2131
2190
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
2132
2191
  emojis.
2133
2192
 
2134
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
2193
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
2135
2194
 
2136
2195
  rerun_all_failed_tasks: Optional[bool] = None
2137
2196
  """If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be
@@ -2151,7 +2210,7 @@ class RepairRun:
2151
2210
  parameters specified in job setting. The JSON representation of this field (for example
2152
2211
  `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
2153
2212
 
2154
- Use [Task parameter variables] to set parameters containing information about job runs
2213
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
2155
2214
 
2156
2215
  Important
2157
2216
 
@@ -2159,7 +2218,7 @@ class RepairRun:
2159
2218
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
2160
2219
  emojis.
2161
2220
 
2162
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
2221
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
2163
2222
 
2164
2223
  sql_params: Optional[Dict[str, str]] = None
2165
2224
  """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
@@ -2246,6 +2305,20 @@ class ResetJob:
2246
2305
  return cls(job_id=d.get('job_id', None), new_settings=_from_dict(d, 'new_settings', JobSettings))
2247
2306
 
2248
2307
 
2308
+ @dataclass
2309
+ class ResetResponse:
2310
+
2311
+ def as_dict(self) -> dict:
2312
+ """Serializes the ResetResponse into a dictionary suitable for use as a JSON request body."""
2313
+ body = {}
2314
+ return body
2315
+
2316
+ @classmethod
2317
+ def from_dict(cls, d: Dict[str, any]) -> ResetResponse:
2318
+ """Deserializes the ResetResponse from a dictionary."""
2319
+ return cls()
2320
+
2321
+
2249
2322
  @dataclass
2250
2323
  class ResolvedConditionTaskValues:
2251
2324
  left: Optional[str] = None
@@ -2538,9 +2611,9 @@ class Run:
2538
2611
  One time triggers that fire a single run. This occurs you triggered a single run on demand
2539
2612
  through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a
2540
2613
  previously failed run. This occurs when you request to re-run the job in case of failures. *
2541
- `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task.
2542
-
2543
- * `FILE_ARRIVAL`: Indicates a run that is triggered by a file arrival."""
2614
+ `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`:
2615
+ Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is
2616
+ triggered by a table update."""
2544
2617
 
2545
2618
  trigger_info: Optional[TriggerInfo] = None
2546
2619
 
@@ -2656,7 +2729,7 @@ class RunConditionTaskOp(Enum):
2656
2729
  @dataclass
2657
2730
  class RunForEachTask:
2658
2731
  concurrency: Optional[int] = None
2659
- """Controls the number of active iterations task runs. Default is 100 (maximal value)."""
2732
+ """Controls the number of active iterations task runs. Default is 20, maximum allowed is 100."""
2660
2733
 
2661
2734
  inputs: Optional[str] = None
2662
2735
  """Array for task to iterate on. This can be a JSON string or a reference to an array parameter."""
@@ -2793,8 +2866,9 @@ class RunNow:
2793
2866
  be specified in conjunction with notebook_params. The JSON representation of this field (for
2794
2867
  example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
2795
2868
 
2796
- Use [Task parameter variables](/jobs.html"#parameter-variables") to set parameters containing
2797
- information about job runs."""
2869
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
2870
+
2871
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
2798
2872
 
2799
2873
  job_parameters: Optional[Dict[str, str]] = None
2800
2874
  """Job-level parameters used in the run. for example `"param": "overriding_val"`"""
@@ -2808,13 +2882,13 @@ class RunNow:
2808
2882
 
2809
2883
  notebook_params cannot be specified in conjunction with jar_params.
2810
2884
 
2811
- Use [Task parameter variables] to set parameters containing information about job runs.
2885
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
2812
2886
 
2813
2887
  The JSON representation of this field (for example `{"notebook_params":{"name":"john
2814
2888
  doe","age":"35"}}`) cannot exceed 10,000 bytes.
2815
2889
 
2816
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
2817
- [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
2890
+ [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
2891
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
2818
2892
 
2819
2893
  pipeline_params: Optional[PipelineParams] = None
2820
2894
 
@@ -2828,7 +2902,7 @@ class RunNow:
2828
2902
  `run-now`, it would overwrite the parameters specified in job setting. The JSON representation
2829
2903
  of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
2830
2904
 
2831
- Use [Task parameter variables] to set parameters containing information about job runs.
2905
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
2832
2906
 
2833
2907
  Important
2834
2908
 
@@ -2836,7 +2910,7 @@ class RunNow:
2836
2910
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
2837
2911
  emojis.
2838
2912
 
2839
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
2913
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
2840
2914
 
2841
2915
  queue: Optional[QueueSettings] = None
2842
2916
  """The queue settings of the run."""
@@ -2848,7 +2922,7 @@ class RunNow:
2848
2922
  parameters specified in job setting. The JSON representation of this field (for example
2849
2923
  `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
2850
2924
 
2851
- Use [Task parameter variables] to set parameters containing information about job runs
2925
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
2852
2926
 
2853
2927
  Important
2854
2928
 
@@ -2856,7 +2930,7 @@ class RunNow:
2856
2930
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
2857
2931
  emojis.
2858
2932
 
2859
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
2933
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
2860
2934
 
2861
2935
  sql_params: Optional[Dict[str, str]] = None
2862
2936
  """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
@@ -2998,8 +3072,9 @@ class RunParameters:
2998
3072
  be specified in conjunction with notebook_params. The JSON representation of this field (for
2999
3073
  example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
3000
3074
 
3001
- Use [Task parameter variables](/jobs.html"#parameter-variables") to set parameters containing
3002
- information about job runs."""
3075
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
3076
+
3077
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
3003
3078
 
3004
3079
  job_parameters: Optional[Dict[str, str]] = None
3005
3080
  """Job-level parameters used in the run. for example `"param": "overriding_val"`"""
@@ -3013,13 +3088,13 @@ class RunParameters:
3013
3088
 
3014
3089
  notebook_params cannot be specified in conjunction with jar_params.
3015
3090
 
3016
- Use [Task parameter variables] to set parameters containing information about job runs.
3091
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
3017
3092
 
3018
3093
  The JSON representation of this field (for example `{"notebook_params":{"name":"john
3019
3094
  doe","age":"35"}}`) cannot exceed 10,000 bytes.
3020
3095
 
3021
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
3022
- [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
3096
+ [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
3097
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
3023
3098
 
3024
3099
  pipeline_params: Optional[PipelineParams] = None
3025
3100
 
@@ -3033,7 +3108,7 @@ class RunParameters:
3033
3108
  `run-now`, it would overwrite the parameters specified in job setting. The JSON representation
3034
3109
  of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
3035
3110
 
3036
- Use [Task parameter variables] to set parameters containing information about job runs.
3111
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
3037
3112
 
3038
3113
  Important
3039
3114
 
@@ -3041,7 +3116,7 @@ class RunParameters:
3041
3116
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
3042
3117
  emojis.
3043
3118
 
3044
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
3119
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
3045
3120
 
3046
3121
  spark_submit_params: Optional[List[str]] = None
3047
3122
  """A list of parameters for jobs with spark submit task, for example `"spark_submit_params":
@@ -3050,7 +3125,7 @@ class RunParameters:
3050
3125
  parameters specified in job setting. The JSON representation of this field (for example
3051
3126
  `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
3052
3127
 
3053
- Use [Task parameter variables] to set parameters containing information about job runs
3128
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
3054
3129
 
3055
3130
  Important
3056
3131
 
@@ -3058,7 +3133,7 @@ class RunParameters:
3058
3133
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
3059
3134
  emojis.
3060
3135
 
3061
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
3136
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
3062
3137
 
3063
3138
  sql_params: Optional[Dict[str, str]] = None
3064
3139
  """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
@@ -3405,9 +3480,9 @@ class SparkJarTask:
3405
3480
  parameters: Optional[List[str]] = None
3406
3481
  """Parameters passed to the main method.
3407
3482
 
3408
- Use [Task parameter variables] to set parameters containing information about job runs.
3483
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
3409
3484
 
3410
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
3485
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
3411
3486
 
3412
3487
  def as_dict(self) -> dict:
3413
3488
  """Serializes the SparkJarTask into a dictionary suitable for use as a JSON request body."""
@@ -3436,9 +3511,9 @@ class SparkPythonTask:
3436
3511
  parameters: Optional[List[str]] = None
3437
3512
  """Command line parameters passed to the Python file.
3438
3513
 
3439
- Use [Task parameter variables] to set parameters containing information about job runs.
3514
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
3440
3515
 
3441
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
3516
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
3442
3517
 
3443
3518
  source: Optional[Source] = None
3444
3519
  """Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file
@@ -3470,9 +3545,9 @@ class SparkSubmitTask:
3470
3545
  parameters: Optional[List[str]] = None
3471
3546
  """Command-line parameters passed to spark submit.
3472
3547
 
3473
- Use [Task parameter variables] to set parameters containing information about job runs.
3548
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
3474
3549
 
3475
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
3550
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
3476
3551
 
3477
3552
  def as_dict(self) -> dict:
3478
3553
  """Serializes the SparkSubmitTask into a dictionary suitable for use as a JSON request body."""
@@ -4137,6 +4212,44 @@ class SubmitTask:
4137
4212
  webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications))
4138
4213
 
4139
4214
 
4215
+ @dataclass
4216
+ class TableTriggerConfiguration:
4217
+ condition: Optional[Condition] = None
4218
+ """The table(s) condition based on which to trigger a job run."""
4219
+
4220
+ min_time_between_triggers_seconds: Optional[int] = None
4221
+ """If set, the trigger starts a run only after the specified amount of time has passed since the
4222
+ last time the trigger fired. The minimum allowed value is 60 seconds."""
4223
+
4224
+ table_names: Optional[List[str]] = None
4225
+ """A list of Delta tables to monitor for changes. The table name must be in the format
4226
+ `catalog_name.schema_name.table_name`."""
4227
+
4228
+ wait_after_last_change_seconds: Optional[int] = None
4229
+ """If set, the trigger starts a run only after no table updates have occurred for the specified
4230
+ time and can be used to wait for a series of table updates before triggering a run. The minimum
4231
+ allowed value is 60 seconds."""
4232
+
4233
+ def as_dict(self) -> dict:
4234
+ """Serializes the TableTriggerConfiguration into a dictionary suitable for use as a JSON request body."""
4235
+ body = {}
4236
+ if self.condition is not None: body['condition'] = self.condition.value
4237
+ if self.min_time_between_triggers_seconds is not None:
4238
+ body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds
4239
+ if self.table_names: body['table_names'] = [v for v in self.table_names]
4240
+ if self.wait_after_last_change_seconds is not None:
4241
+ body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds
4242
+ return body
4243
+
4244
+ @classmethod
4245
+ def from_dict(cls, d: Dict[str, any]) -> TableTriggerConfiguration:
4246
+ """Deserializes the TableTriggerConfiguration from a dictionary."""
4247
+ return cls(condition=_enum(d, 'condition', Condition),
4248
+ min_time_between_triggers_seconds=d.get('min_time_between_triggers_seconds', None),
4249
+ table_names=d.get('table_names', None),
4250
+ wait_after_last_change_seconds=d.get('wait_after_last_change_seconds', None))
4251
+
4252
+
4140
4253
  @dataclass
4141
4254
  class Task:
4142
4255
  task_key: str
@@ -4434,62 +4547,6 @@ class TaskNotificationSettings:
4434
4547
  no_alert_for_skipped_runs=d.get('no_alert_for_skipped_runs', None))
4435
4548
 
4436
4549
 
4437
- @dataclass
4438
- class TriggerEvaluation:
4439
- description: Optional[str] = None
4440
- """Human-readable description of the the trigger evaluation result. Explains why the trigger
4441
- evaluation triggered or did not trigger a run, or failed."""
4442
-
4443
- run_id: Optional[int] = None
4444
- """The ID of the run that was triggered by the trigger evaluation. Only returned if a run was
4445
- triggered."""
4446
-
4447
- timestamp: Optional[int] = None
4448
- """Timestamp at which the trigger was evaluated."""
4449
-
4450
- def as_dict(self) -> dict:
4451
- """Serializes the TriggerEvaluation into a dictionary suitable for use as a JSON request body."""
4452
- body = {}
4453
- if self.description is not None: body['description'] = self.description
4454
- if self.run_id is not None: body['run_id'] = self.run_id
4455
- if self.timestamp is not None: body['timestamp'] = self.timestamp
4456
- return body
4457
-
4458
- @classmethod
4459
- def from_dict(cls, d: Dict[str, any]) -> TriggerEvaluation:
4460
- """Deserializes the TriggerEvaluation from a dictionary."""
4461
- return cls(description=d.get('description', None),
4462
- run_id=d.get('run_id', None),
4463
- timestamp=d.get('timestamp', None))
4464
-
4465
-
4466
- @dataclass
4467
- class TriggerHistory:
4468
- last_failed: Optional[TriggerEvaluation] = None
4469
- """The last time the trigger failed to evaluate."""
4470
-
4471
- last_not_triggered: Optional[TriggerEvaluation] = None
4472
- """The last time the trigger was evaluated but did not trigger a run."""
4473
-
4474
- last_triggered: Optional[TriggerEvaluation] = None
4475
- """The last time the run was triggered due to a file arrival."""
4476
-
4477
- def as_dict(self) -> dict:
4478
- """Serializes the TriggerHistory into a dictionary suitable for use as a JSON request body."""
4479
- body = {}
4480
- if self.last_failed: body['last_failed'] = self.last_failed.as_dict()
4481
- if self.last_not_triggered: body['last_not_triggered'] = self.last_not_triggered.as_dict()
4482
- if self.last_triggered: body['last_triggered'] = self.last_triggered.as_dict()
4483
- return body
4484
-
4485
- @classmethod
4486
- def from_dict(cls, d: Dict[str, any]) -> TriggerHistory:
4487
- """Deserializes the TriggerHistory from a dictionary."""
4488
- return cls(last_failed=_from_dict(d, 'last_failed', TriggerEvaluation),
4489
- last_not_triggered=_from_dict(d, 'last_not_triggered', TriggerEvaluation),
4490
- last_triggered=_from_dict(d, 'last_triggered', TriggerEvaluation))
4491
-
4492
-
4493
4550
  @dataclass
4494
4551
  class TriggerInfo:
4495
4552
  run_id: Optional[int] = None
@@ -4515,18 +4572,23 @@ class TriggerSettings:
4515
4572
  pause_status: Optional[PauseStatus] = None
4516
4573
  """Whether this trigger is paused or not."""
4517
4574
 
4575
+ table: Optional[TableTriggerConfiguration] = None
4576
+ """Table trigger settings."""
4577
+
4518
4578
  def as_dict(self) -> dict:
4519
4579
  """Serializes the TriggerSettings into a dictionary suitable for use as a JSON request body."""
4520
4580
  body = {}
4521
4581
  if self.file_arrival: body['file_arrival'] = self.file_arrival.as_dict()
4522
4582
  if self.pause_status is not None: body['pause_status'] = self.pause_status.value
4583
+ if self.table: body['table'] = self.table.as_dict()
4523
4584
  return body
4524
4585
 
4525
4586
  @classmethod
4526
4587
  def from_dict(cls, d: Dict[str, any]) -> TriggerSettings:
4527
4588
  """Deserializes the TriggerSettings from a dictionary."""
4528
4589
  return cls(file_arrival=_from_dict(d, 'file_arrival', FileArrivalTriggerConfiguration),
4529
- pause_status=_enum(d, 'pause_status', PauseStatus))
4590
+ pause_status=_enum(d, 'pause_status', PauseStatus),
4591
+ table=_from_dict(d, 'table', TableTriggerConfiguration))
4530
4592
 
4531
4593
 
4532
4594
  class TriggerType(Enum):
@@ -4536,15 +4598,16 @@ class TriggerType(Enum):
4536
4598
  One time triggers that fire a single run. This occurs you triggered a single run on demand
4537
4599
  through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a
4538
4600
  previously failed run. This occurs when you request to re-run the job in case of failures. *
4539
- `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task.
4540
-
4541
- * `FILE_ARRIVAL`: Indicates a run that is triggered by a file arrival."""
4601
+ `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`:
4602
+ Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is
4603
+ triggered by a table update."""
4542
4604
 
4543
4605
  FILE_ARRIVAL = 'FILE_ARRIVAL'
4544
4606
  ONE_TIME = 'ONE_TIME'
4545
4607
  PERIODIC = 'PERIODIC'
4546
4608
  RETRY = 'RETRY'
4547
4609
  RUN_JOB_TASK = 'RUN_JOB_TASK'
4610
+ TABLE = 'TABLE'
4548
4611
 
4549
4612
 
4550
4613
  @dataclass
@@ -4584,6 +4647,20 @@ class UpdateJob:
4584
4647
  new_settings=_from_dict(d, 'new_settings', JobSettings))
4585
4648
 
4586
4649
 
4650
+ @dataclass
4651
+ class UpdateResponse:
4652
+
4653
+ def as_dict(self) -> dict:
4654
+ """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body."""
4655
+ body = {}
4656
+ return body
4657
+
4658
+ @classmethod
4659
+ def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
4660
+ """Deserializes the UpdateResponse from a dictionary."""
4661
+ return cls()
4662
+
4663
+
4587
4664
  @dataclass
4588
4665
  class ViewItem:
4589
4666
  content: Optional[str] = None
@@ -4771,6 +4848,7 @@ class JobsAPI:
4771
4848
  if all_queued_runs is not None: body['all_queued_runs'] = all_queued_runs
4772
4849
  if job_id is not None: body['job_id'] = job_id
4773
4850
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
4851
+
4774
4852
  self._api.do('POST', '/api/2.1/jobs/runs/cancel-all', body=body, headers=headers)
4775
4853
 
4776
4854
  def cancel_run(self, run_id: int) -> Wait[Run]:
@@ -4789,8 +4867,11 @@ class JobsAPI:
4789
4867
  body = {}
4790
4868
  if run_id is not None: body['run_id'] = run_id
4791
4869
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
4792
- self._api.do('POST', '/api/2.1/jobs/runs/cancel', body=body, headers=headers)
4793
- return Wait(self.wait_get_run_job_terminated_or_skipped, run_id=run_id)
4870
+
4871
+ op_response = self._api.do('POST', '/api/2.1/jobs/runs/cancel', body=body, headers=headers)
4872
+ return Wait(self.wait_get_run_job_terminated_or_skipped,
4873
+ response=CancelRunResponse.from_dict(op_response),
4874
+ run_id=run_id)
4794
4875
 
4795
4876
  def cancel_run_and_wait(self, run_id: int, timeout=timedelta(minutes=20)) -> Run:
4796
4877
  return self.cancel_run(run_id=run_id).result(timeout=timeout)
@@ -4901,9 +4982,9 @@ class JobsAPI:
4901
4982
  :param timeout_seconds: int (optional)
4902
4983
  An optional timeout applied to each run of this job. A value of `0` means no timeout.
4903
4984
  :param trigger: :class:`TriggerSettings` (optional)
4904
- Trigger settings for the job. Can be used to trigger a run when new files arrive in an external
4905
- location. The default behavior is that the job runs only when triggered by clicking “Run Now” in
4906
- the Jobs UI or sending an API request to `runNow`.
4985
+ A configuration to trigger a run when certain conditions are met. The default behavior is that the
4986
+ job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to
4987
+ `runNow`.
4907
4988
  :param webhook_notifications: :class:`WebhookNotifications` (optional)
4908
4989
  A collection of system notification IDs to notify when runs of this job begin or complete.
4909
4990
 
@@ -4935,6 +5016,7 @@ class JobsAPI:
4935
5016
  if trigger is not None: body['trigger'] = trigger.as_dict()
4936
5017
  if webhook_notifications is not None: body['webhook_notifications'] = webhook_notifications.as_dict()
4937
5018
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
5019
+
4938
5020
  res = self._api.do('POST', '/api/2.1/jobs/create', body=body, headers=headers)
4939
5021
  return CreateResponse.from_dict(res)
4940
5022
 
@@ -4951,6 +5033,7 @@ class JobsAPI:
4951
5033
  body = {}
4952
5034
  if job_id is not None: body['job_id'] = job_id
4953
5035
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
5036
+
4954
5037
  self._api.do('POST', '/api/2.1/jobs/delete', body=body, headers=headers)
4955
5038
 
4956
5039
  def delete_run(self, run_id: int):
@@ -4966,6 +5049,7 @@ class JobsAPI:
4966
5049
  body = {}
4967
5050
  if run_id is not None: body['run_id'] = run_id
4968
5051
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
5052
+
4969
5053
  self._api.do('POST', '/api/2.1/jobs/runs/delete', body=body, headers=headers)
4970
5054
 
4971
5055
  def export_run(self, run_id: int, *, views_to_export: Optional[ViewsToExport] = None) -> ExportRunOutput:
@@ -4985,6 +5069,7 @@ class JobsAPI:
4985
5069
  if run_id is not None: query['run_id'] = run_id
4986
5070
  if views_to_export is not None: query['views_to_export'] = views_to_export.value
4987
5071
  headers = {'Accept': 'application/json', }
5072
+
4988
5073
  res = self._api.do('GET', '/api/2.1/jobs/runs/export', query=query, headers=headers)
4989
5074
  return ExportRunOutput.from_dict(res)
4990
5075
 
@@ -5002,6 +5087,7 @@ class JobsAPI:
5002
5087
  query = {}
5003
5088
  if job_id is not None: query['job_id'] = job_id
5004
5089
  headers = {'Accept': 'application/json', }
5090
+
5005
5091
  res = self._api.do('GET', '/api/2.1/jobs/get', query=query, headers=headers)
5006
5092
  return Job.from_dict(res)
5007
5093
 
@@ -5017,6 +5103,7 @@ class JobsAPI:
5017
5103
  """
5018
5104
 
5019
5105
  headers = {'Accept': 'application/json', }
5106
+
5020
5107
  res = self._api.do('GET', f'/api/2.0/permissions/jobs/{job_id}/permissionLevels', headers=headers)
5021
5108
  return GetJobPermissionLevelsResponse.from_dict(res)
5022
5109
 
@@ -5032,6 +5119,7 @@ class JobsAPI:
5032
5119
  """
5033
5120
 
5034
5121
  headers = {'Accept': 'application/json', }
5122
+
5035
5123
  res = self._api.do('GET', f'/api/2.0/permissions/jobs/{job_id}', headers=headers)
5036
5124
  return JobPermissions.from_dict(res)
5037
5125
 
@@ -5059,6 +5147,7 @@ class JobsAPI:
5059
5147
  if include_resolved_values is not None: query['include_resolved_values'] = include_resolved_values
5060
5148
  if run_id is not None: query['run_id'] = run_id
5061
5149
  headers = {'Accept': 'application/json', }
5150
+
5062
5151
  res = self._api.do('GET', '/api/2.1/jobs/runs/get', query=query, headers=headers)
5063
5152
  return Run.from_dict(res)
5064
5153
 
@@ -5083,6 +5172,7 @@ class JobsAPI:
5083
5172
  query = {}
5084
5173
  if run_id is not None: query['run_id'] = run_id
5085
5174
  headers = {'Accept': 'application/json', }
5175
+
5086
5176
  res = self._api.do('GET', '/api/2.1/jobs/runs/get-output', query=query, headers=headers)
5087
5177
  return RunOutput.from_dict(res)
5088
5178
 
@@ -5236,8 +5326,9 @@ class JobsAPI:
5236
5326
  in conjunction with notebook_params. The JSON representation of this field (for example
5237
5327
  `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
5238
5328
 
5239
- Use [Task parameter variables](/jobs.html"#parameter-variables") to set parameters containing
5240
- information about job runs.
5329
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
5330
+
5331
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
5241
5332
  :param job_parameters: Dict[str,str] (optional)
5242
5333
  Job-level parameters used in the run. for example `"param": "overriding_val"`
5243
5334
  :param latest_repair_id: int (optional)
@@ -5252,13 +5343,13 @@ class JobsAPI:
5252
5343
 
5253
5344
  notebook_params cannot be specified in conjunction with jar_params.
5254
5345
 
5255
- Use [Task parameter variables] to set parameters containing information about job runs.
5346
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
5256
5347
 
5257
5348
  The JSON representation of this field (for example `{"notebook_params":{"name":"john
5258
5349
  doe","age":"35"}}`) cannot exceed 10,000 bytes.
5259
5350
 
5260
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
5261
5351
  [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
5352
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
5262
5353
  :param pipeline_params: :class:`PipelineParams` (optional)
5263
5354
  :param python_named_params: Dict[str,str] (optional)
5264
5355
  A map from keys to values for jobs with Python wheel task, for example `"python_named_params":
@@ -5269,7 +5360,7 @@ class JobsAPI:
5269
5360
  would overwrite the parameters specified in job setting. The JSON representation of this field (for
5270
5361
  example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
5271
5362
 
5272
- Use [Task parameter variables] to set parameters containing information about job runs.
5363
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
5273
5364
 
5274
5365
  Important
5275
5366
 
@@ -5277,7 +5368,7 @@ class JobsAPI:
5277
5368
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
5278
5369
  emojis.
5279
5370
 
5280
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
5371
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
5281
5372
  :param rerun_all_failed_tasks: bool (optional)
5282
5373
  If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be used.
5283
5374
  :param rerun_dependent_tasks: bool (optional)
@@ -5292,7 +5383,7 @@ class JobsAPI:
5292
5383
  in job setting. The JSON representation of this field (for example `{"python_params":["john
5293
5384
  doe","35"]}`) cannot exceed 10,000 bytes.
5294
5385
 
5295
- Use [Task parameter variables] to set parameters containing information about job runs
5386
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
5296
5387
 
5297
5388
  Important
5298
5389
 
@@ -5300,7 +5391,7 @@ class JobsAPI:
5300
5391
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
5301
5392
  emojis.
5302
5393
 
5303
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
5394
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
5304
5395
  :param sql_params: Dict[str,str] (optional)
5305
5396
  A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
5306
5397
  "age": "35"}`. The SQL alert task does not support custom parameters.
@@ -5325,6 +5416,7 @@ class JobsAPI:
5325
5416
  if spark_submit_params is not None: body['spark_submit_params'] = [v for v in spark_submit_params]
5326
5417
  if sql_params is not None: body['sql_params'] = sql_params
5327
5418
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
5419
+
5328
5420
  op_response = self._api.do('POST', '/api/2.1/jobs/runs/repair', body=body, headers=headers)
5329
5421
  return Wait(self.wait_get_run_job_terminated_or_skipped,
5330
5422
  response=RepairRunResponse.from_dict(op_response),
@@ -5383,6 +5475,7 @@ class JobsAPI:
5383
5475
  if job_id is not None: body['job_id'] = job_id
5384
5476
  if new_settings is not None: body['new_settings'] = new_settings.as_dict()
5385
5477
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
5478
+
5386
5479
  self._api.do('POST', '/api/2.1/jobs/reset', body=body, headers=headers)
5387
5480
 
5388
5481
  def run_now(self,
@@ -5428,8 +5521,9 @@ class JobsAPI:
5428
5521
  in conjunction with notebook_params. The JSON representation of this field (for example
5429
5522
  `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
5430
5523
 
5431
- Use [Task parameter variables](/jobs.html"#parameter-variables") to set parameters containing
5432
- information about job runs.
5524
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
5525
+
5526
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
5433
5527
  :param job_parameters: Dict[str,str] (optional)
5434
5528
  Job-level parameters used in the run. for example `"param": "overriding_val"`
5435
5529
  :param notebook_params: Dict[str,str] (optional)
@@ -5441,13 +5535,13 @@ class JobsAPI:
5441
5535
 
5442
5536
  notebook_params cannot be specified in conjunction with jar_params.
5443
5537
 
5444
- Use [Task parameter variables] to set parameters containing information about job runs.
5538
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
5445
5539
 
5446
5540
  The JSON representation of this field (for example `{"notebook_params":{"name":"john
5447
5541
  doe","age":"35"}}`) cannot exceed 10,000 bytes.
5448
5542
 
5449
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
5450
5543
  [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
5544
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
5451
5545
  :param pipeline_params: :class:`PipelineParams` (optional)
5452
5546
  :param python_named_params: Dict[str,str] (optional)
5453
5547
  A map from keys to values for jobs with Python wheel task, for example `"python_named_params":
@@ -5458,7 +5552,7 @@ class JobsAPI:
5458
5552
  would overwrite the parameters specified in job setting. The JSON representation of this field (for
5459
5553
  example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
5460
5554
 
5461
- Use [Task parameter variables] to set parameters containing information about job runs.
5555
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
5462
5556
 
5463
5557
  Important
5464
5558
 
@@ -5466,7 +5560,7 @@ class JobsAPI:
5466
5560
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
5467
5561
  emojis.
5468
5562
 
5469
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
5563
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
5470
5564
  :param queue: :class:`QueueSettings` (optional)
5471
5565
  The queue settings of the run.
5472
5566
  :param spark_submit_params: List[str] (optional)
@@ -5476,7 +5570,7 @@ class JobsAPI:
5476
5570
  in job setting. The JSON representation of this field (for example `{"python_params":["john
5477
5571
  doe","35"]}`) cannot exceed 10,000 bytes.
5478
5572
 
5479
- Use [Task parameter variables] to set parameters containing information about job runs
5573
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
5480
5574
 
5481
5575
  Important
5482
5576
 
@@ -5484,7 +5578,7 @@ class JobsAPI:
5484
5578
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
5485
5579
  emojis.
5486
5580
 
5487
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
5581
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
5488
5582
  :param sql_params: Dict[str,str] (optional)
5489
5583
  A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
5490
5584
  "age": "35"}`. The SQL alert task does not support custom parameters.
@@ -5507,6 +5601,7 @@ class JobsAPI:
5507
5601
  if spark_submit_params is not None: body['spark_submit_params'] = [v for v in spark_submit_params]
5508
5602
  if sql_params is not None: body['sql_params'] = sql_params
5509
5603
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
5604
+
5510
5605
  op_response = self._api.do('POST', '/api/2.1/jobs/run-now', body=body, headers=headers)
5511
5606
  return Wait(self.wait_get_run_job_terminated_or_skipped,
5512
5607
  response=RunNowResponse.from_dict(op_response),
@@ -5559,6 +5654,7 @@ class JobsAPI:
5559
5654
  if access_control_list is not None:
5560
5655
  body['access_control_list'] = [v.as_dict() for v in access_control_list]
5561
5656
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
5657
+
5562
5658
  res = self._api.do('PUT', f'/api/2.0/permissions/jobs/{job_id}', body=body, headers=headers)
5563
5659
  return JobPermissions.from_dict(res)
5564
5660
 
@@ -5640,6 +5736,7 @@ class JobsAPI:
5640
5736
  if timeout_seconds is not None: body['timeout_seconds'] = timeout_seconds
5641
5737
  if webhook_notifications is not None: body['webhook_notifications'] = webhook_notifications.as_dict()
5642
5738
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
5739
+
5643
5740
  op_response = self._api.do('POST', '/api/2.1/jobs/runs/submit', body=body, headers=headers)
5644
5741
  return Wait(self.wait_get_run_job_terminated_or_skipped,
5645
5742
  response=SubmitRunResponse.from_dict(op_response),
@@ -5706,6 +5803,7 @@ class JobsAPI:
5706
5803
  if job_id is not None: body['job_id'] = job_id
5707
5804
  if new_settings is not None: body['new_settings'] = new_settings.as_dict()
5708
5805
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
5806
+
5709
5807
  self._api.do('POST', '/api/2.1/jobs/update', body=body, headers=headers)
5710
5808
 
5711
5809
  def update_permissions(
@@ -5727,5 +5825,6 @@ class JobsAPI:
5727
5825
  if access_control_list is not None:
5728
5826
  body['access_control_list'] = [v.as_dict() for v in access_control_list]
5729
5827
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
5828
+
5730
5829
  res = self._api.do('PATCH', f'/api/2.0/permissions/jobs/{job_id}', body=body, headers=headers)
5731
5830
  return JobPermissions.from_dict(res)