databricks-sdk 0.19.0__py3-none-any.whl → 0.20.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -170,9 +170,9 @@ class BaseRun:
170
170
  One time triggers that fire a single run. This occurs you triggered a single run on demand
171
171
  through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a
172
172
  previously failed run. This occurs when you request to re-run the job in case of failures. *
173
- `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task.
174
-
175
- * `FILE_ARRIVAL`: Indicates a run that is triggered by a file arrival."""
173
+ `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`:
174
+ Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is
175
+ triggered by a table update."""
176
176
 
177
177
  trigger_info: Optional[TriggerInfo] = None
178
178
 
@@ -340,6 +340,12 @@ class ClusterSpec:
340
340
  new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec))
341
341
 
342
342
 
343
+ class Condition(Enum):
344
+
345
+ ALL_UPDATED = 'ALL_UPDATED'
346
+ ANY_UPDATED = 'ANY_UPDATED'
347
+
348
+
343
349
  @dataclass
344
350
  class ConditionTask:
345
351
  left: Optional[str] = None
@@ -512,9 +518,9 @@ class CreateJob:
512
518
  """An optional timeout applied to each run of this job. A value of `0` means no timeout."""
513
519
 
514
520
  trigger: Optional[TriggerSettings] = None
515
- """Trigger settings for the job. Can be used to trigger a run when new files arrive in an external
516
- location. The default behavior is that the job runs only when triggered by clicking “Run
517
- Now” in the Jobs UI or sending an API request to `runNow`."""
521
+ """A configuration to trigger a run when certain conditions are met. The default behavior is that
522
+ the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API
523
+ request to `runNow`."""
518
524
 
519
525
  webhook_notifications: Optional[WebhookNotifications] = None
520
526
  """A collection of system notification IDs to notify when runs of this job begin or complete."""
@@ -838,7 +844,7 @@ class ForEachTask:
838
844
  task: Task
839
845
 
840
846
  concurrency: Optional[int] = None
841
- """Controls the number of active iterations task runs. Default is 100 (maximal value)."""
847
+ """Controls the number of active iterations task runs. Default is 20, maximum allowed is 100."""
842
848
 
843
849
  def as_dict(self) -> dict:
844
850
  """Serializes the ForEachTask into a dictionary suitable for use as a JSON request body."""
@@ -1061,9 +1067,6 @@ class Job:
1061
1067
  """Settings for this job and all of its runs. These settings can be updated using the `resetJob`
1062
1068
  method."""
1063
1069
 
1064
- trigger_history: Optional[TriggerHistory] = None
1065
- """History of the file arrival trigger associated with the job."""
1066
-
1067
1070
  def as_dict(self) -> dict:
1068
1071
  """Serializes the Job into a dictionary suitable for use as a JSON request body."""
1069
1072
  body = {}
@@ -1072,7 +1075,6 @@ class Job:
1072
1075
  if self.job_id is not None: body['job_id'] = self.job_id
1073
1076
  if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
1074
1077
  if self.settings: body['settings'] = self.settings.as_dict()
1075
- if self.trigger_history: body['trigger_history'] = self.trigger_history.as_dict()
1076
1078
  return body
1077
1079
 
1078
1080
  @classmethod
@@ -1082,8 +1084,7 @@ class Job:
1082
1084
  creator_user_name=d.get('creator_user_name', None),
1083
1085
  job_id=d.get('job_id', None),
1084
1086
  run_as_user_name=d.get('run_as_user_name', None),
1085
- settings=_from_dict(d, 'settings', JobSettings),
1086
- trigger_history=_from_dict(d, 'trigger_history', TriggerHistory))
1087
+ settings=_from_dict(d, 'settings', JobSettings))
1087
1088
 
1088
1089
 
1089
1090
  @dataclass
@@ -1591,9 +1592,9 @@ class JobSettings:
1591
1592
  """An optional timeout applied to each run of this job. A value of `0` means no timeout."""
1592
1593
 
1593
1594
  trigger: Optional[TriggerSettings] = None
1594
- """Trigger settings for the job. Can be used to trigger a run when new files arrive in an external
1595
- location. The default behavior is that the job runs only when triggered by clicking “Run
1596
- Now” in the Jobs UI or sending an API request to `runNow`."""
1595
+ """A configuration to trigger a run when certain conditions are met. The default behavior is that
1596
+ the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API
1597
+ request to `runNow`."""
1597
1598
 
1598
1599
  webhook_notifications: Optional[WebhookNotifications] = None
1599
1600
  """A collection of system notification IDs to notify when runs of this job begin or complete."""
@@ -1883,7 +1884,7 @@ class NotebookTask:
1883
1884
  :method:jobs/runNow with parameters specified, the two parameters maps are merged. If the same
1884
1885
  key is specified in `base_parameters` and in `run-now`, the value from `run-now` is used.
1885
1886
 
1886
- Use [Task parameter variables] to set parameters containing information about job runs.
1887
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
1887
1888
 
1888
1889
  If the notebook takes a parameter that is not specified in the job’s `base_parameters` or the
1889
1890
  `run-now` override parameters, the default value from the notebook is used.
@@ -1892,8 +1893,8 @@ class NotebookTask:
1892
1893
 
1893
1894
  The JSON representation of this field cannot exceed 1MB.
1894
1895
 
1895
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
1896
- [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-widgets"""
1896
+ [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-widgets
1897
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
1897
1898
 
1898
1899
  source: Optional[Source] = None
1899
1900
  """Optional location type of the notebook. When set to `WORKSPACE`, the notebook will be retrieved
@@ -2084,8 +2085,9 @@ class RepairRun:
2084
2085
  be specified in conjunction with notebook_params. The JSON representation of this field (for
2085
2086
  example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
2086
2087
 
2087
- Use [Task parameter variables](/jobs.html"#parameter-variables") to set parameters containing
2088
- information about job runs."""
2088
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
2089
+
2090
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
2089
2091
 
2090
2092
  job_parameters: Optional[Dict[str, str]] = None
2091
2093
  """Job-level parameters used in the run. for example `"param": "overriding_val"`"""
@@ -2103,13 +2105,13 @@ class RepairRun:
2103
2105
 
2104
2106
  notebook_params cannot be specified in conjunction with jar_params.
2105
2107
 
2106
- Use [Task parameter variables] to set parameters containing information about job runs.
2108
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
2107
2109
 
2108
2110
  The JSON representation of this field (for example `{"notebook_params":{"name":"john
2109
2111
  doe","age":"35"}}`) cannot exceed 10,000 bytes.
2110
2112
 
2111
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
2112
- [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
2113
+ [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
2114
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
2113
2115
 
2114
2116
  pipeline_params: Optional[PipelineParams] = None
2115
2117
 
@@ -2123,7 +2125,7 @@ class RepairRun:
2123
2125
  `run-now`, it would overwrite the parameters specified in job setting. The JSON representation
2124
2126
  of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
2125
2127
 
2126
- Use [Task parameter variables] to set parameters containing information about job runs.
2128
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
2127
2129
 
2128
2130
  Important
2129
2131
 
@@ -2131,7 +2133,7 @@ class RepairRun:
2131
2133
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
2132
2134
  emojis.
2133
2135
 
2134
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
2136
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
2135
2137
 
2136
2138
  rerun_all_failed_tasks: Optional[bool] = None
2137
2139
  """If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be
@@ -2151,7 +2153,7 @@ class RepairRun:
2151
2153
  parameters specified in job setting. The JSON representation of this field (for example
2152
2154
  `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
2153
2155
 
2154
- Use [Task parameter variables] to set parameters containing information about job runs
2156
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
2155
2157
 
2156
2158
  Important
2157
2159
 
@@ -2159,7 +2161,7 @@ class RepairRun:
2159
2161
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
2160
2162
  emojis.
2161
2163
 
2162
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
2164
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
2163
2165
 
2164
2166
  sql_params: Optional[Dict[str, str]] = None
2165
2167
  """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
@@ -2538,9 +2540,9 @@ class Run:
2538
2540
  One time triggers that fire a single run. This occurs you triggered a single run on demand
2539
2541
  through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a
2540
2542
  previously failed run. This occurs when you request to re-run the job in case of failures. *
2541
- `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task.
2542
-
2543
- * `FILE_ARRIVAL`: Indicates a run that is triggered by a file arrival."""
2543
+ `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`:
2544
+ Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is
2545
+ triggered by a table update."""
2544
2546
 
2545
2547
  trigger_info: Optional[TriggerInfo] = None
2546
2548
 
@@ -2656,7 +2658,7 @@ class RunConditionTaskOp(Enum):
2656
2658
  @dataclass
2657
2659
  class RunForEachTask:
2658
2660
  concurrency: Optional[int] = None
2659
- """Controls the number of active iterations task runs. Default is 100 (maximal value)."""
2661
+ """Controls the number of active iterations task runs. Default is 20, maximum allowed is 100."""
2660
2662
 
2661
2663
  inputs: Optional[str] = None
2662
2664
  """Array for task to iterate on. This can be a JSON string or a reference to an array parameter."""
@@ -2793,8 +2795,9 @@ class RunNow:
2793
2795
  be specified in conjunction with notebook_params. The JSON representation of this field (for
2794
2796
  example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
2795
2797
 
2796
- Use [Task parameter variables](/jobs.html"#parameter-variables") to set parameters containing
2797
- information about job runs."""
2798
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
2799
+
2800
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
2798
2801
 
2799
2802
  job_parameters: Optional[Dict[str, str]] = None
2800
2803
  """Job-level parameters used in the run. for example `"param": "overriding_val"`"""
@@ -2808,13 +2811,13 @@ class RunNow:
2808
2811
 
2809
2812
  notebook_params cannot be specified in conjunction with jar_params.
2810
2813
 
2811
- Use [Task parameter variables] to set parameters containing information about job runs.
2814
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
2812
2815
 
2813
2816
  The JSON representation of this field (for example `{"notebook_params":{"name":"john
2814
2817
  doe","age":"35"}}`) cannot exceed 10,000 bytes.
2815
2818
 
2816
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
2817
- [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
2819
+ [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
2820
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
2818
2821
 
2819
2822
  pipeline_params: Optional[PipelineParams] = None
2820
2823
 
@@ -2828,7 +2831,7 @@ class RunNow:
2828
2831
  `run-now`, it would overwrite the parameters specified in job setting. The JSON representation
2829
2832
  of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
2830
2833
 
2831
- Use [Task parameter variables] to set parameters containing information about job runs.
2834
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
2832
2835
 
2833
2836
  Important
2834
2837
 
@@ -2836,7 +2839,7 @@ class RunNow:
2836
2839
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
2837
2840
  emojis.
2838
2841
 
2839
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
2842
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
2840
2843
 
2841
2844
  queue: Optional[QueueSettings] = None
2842
2845
  """The queue settings of the run."""
@@ -2848,7 +2851,7 @@ class RunNow:
2848
2851
  parameters specified in job setting. The JSON representation of this field (for example
2849
2852
  `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
2850
2853
 
2851
- Use [Task parameter variables] to set parameters containing information about job runs
2854
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
2852
2855
 
2853
2856
  Important
2854
2857
 
@@ -2856,7 +2859,7 @@ class RunNow:
2856
2859
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
2857
2860
  emojis.
2858
2861
 
2859
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
2862
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
2860
2863
 
2861
2864
  sql_params: Optional[Dict[str, str]] = None
2862
2865
  """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
@@ -2998,8 +3001,9 @@ class RunParameters:
2998
3001
  be specified in conjunction with notebook_params. The JSON representation of this field (for
2999
3002
  example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
3000
3003
 
3001
- Use [Task parameter variables](/jobs.html"#parameter-variables") to set parameters containing
3002
- information about job runs."""
3004
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
3005
+
3006
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
3003
3007
 
3004
3008
  job_parameters: Optional[Dict[str, str]] = None
3005
3009
  """Job-level parameters used in the run. for example `"param": "overriding_val"`"""
@@ -3013,13 +3017,13 @@ class RunParameters:
3013
3017
 
3014
3018
  notebook_params cannot be specified in conjunction with jar_params.
3015
3019
 
3016
- Use [Task parameter variables] to set parameters containing information about job runs.
3020
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
3017
3021
 
3018
3022
  The JSON representation of this field (for example `{"notebook_params":{"name":"john
3019
3023
  doe","age":"35"}}`) cannot exceed 10,000 bytes.
3020
3024
 
3021
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
3022
- [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
3025
+ [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
3026
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
3023
3027
 
3024
3028
  pipeline_params: Optional[PipelineParams] = None
3025
3029
 
@@ -3033,7 +3037,7 @@ class RunParameters:
3033
3037
  `run-now`, it would overwrite the parameters specified in job setting. The JSON representation
3034
3038
  of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
3035
3039
 
3036
- Use [Task parameter variables] to set parameters containing information about job runs.
3040
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
3037
3041
 
3038
3042
  Important
3039
3043
 
@@ -3041,7 +3045,7 @@ class RunParameters:
3041
3045
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
3042
3046
  emojis.
3043
3047
 
3044
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
3048
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
3045
3049
 
3046
3050
  spark_submit_params: Optional[List[str]] = None
3047
3051
  """A list of parameters for jobs with spark submit task, for example `"spark_submit_params":
@@ -3050,7 +3054,7 @@ class RunParameters:
3050
3054
  parameters specified in job setting. The JSON representation of this field (for example
3051
3055
  `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
3052
3056
 
3053
- Use [Task parameter variables] to set parameters containing information about job runs
3057
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
3054
3058
 
3055
3059
  Important
3056
3060
 
@@ -3058,7 +3062,7 @@ class RunParameters:
3058
3062
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
3059
3063
  emojis.
3060
3064
 
3061
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
3065
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
3062
3066
 
3063
3067
  sql_params: Optional[Dict[str, str]] = None
3064
3068
  """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
@@ -3405,9 +3409,9 @@ class SparkJarTask:
3405
3409
  parameters: Optional[List[str]] = None
3406
3410
  """Parameters passed to the main method.
3407
3411
 
3408
- Use [Task parameter variables] to set parameters containing information about job runs.
3412
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
3409
3413
 
3410
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
3414
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
3411
3415
 
3412
3416
  def as_dict(self) -> dict:
3413
3417
  """Serializes the SparkJarTask into a dictionary suitable for use as a JSON request body."""
@@ -3436,9 +3440,9 @@ class SparkPythonTask:
3436
3440
  parameters: Optional[List[str]] = None
3437
3441
  """Command line parameters passed to the Python file.
3438
3442
 
3439
- Use [Task parameter variables] to set parameters containing information about job runs.
3443
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
3440
3444
 
3441
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
3445
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
3442
3446
 
3443
3447
  source: Optional[Source] = None
3444
3448
  """Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file
@@ -3470,9 +3474,9 @@ class SparkSubmitTask:
3470
3474
  parameters: Optional[List[str]] = None
3471
3475
  """Command-line parameters passed to spark submit.
3472
3476
 
3473
- Use [Task parameter variables] to set parameters containing information about job runs.
3477
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
3474
3478
 
3475
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
3479
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html"""
3476
3480
 
3477
3481
  def as_dict(self) -> dict:
3478
3482
  """Serializes the SparkSubmitTask into a dictionary suitable for use as a JSON request body."""
@@ -4137,6 +4141,44 @@ class SubmitTask:
4137
4141
  webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications))
4138
4142
 
4139
4143
 
4144
+ @dataclass
4145
+ class TableTriggerConfiguration:
4146
+ condition: Optional[Condition] = None
4147
+ """The table(s) condition based on which to trigger a job run."""
4148
+
4149
+ min_time_between_triggers_seconds: Optional[int] = None
4150
+ """If set, the trigger starts a run only after the specified amount of time has passed since the
4151
+ last time the trigger fired. The minimum allowed value is 60 seconds."""
4152
+
4153
+ table_names: Optional[List[str]] = None
4154
+ """A list of Delta tables to monitor for changes. The table name must be in the format
4155
+ `catalog_name.schema_name.table_name`."""
4156
+
4157
+ wait_after_last_change_seconds: Optional[int] = None
4158
+ """If set, the trigger starts a run only after no table updates have occurred for the specified
4159
+ time and can be used to wait for a series of table updates before triggering a run. The minimum
4160
+ allowed value is 60 seconds."""
4161
+
4162
+ def as_dict(self) -> dict:
4163
+ """Serializes the TableTriggerConfiguration into a dictionary suitable for use as a JSON request body."""
4164
+ body = {}
4165
+ if self.condition is not None: body['condition'] = self.condition.value
4166
+ if self.min_time_between_triggers_seconds is not None:
4167
+ body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds
4168
+ if self.table_names: body['table_names'] = [v for v in self.table_names]
4169
+ if self.wait_after_last_change_seconds is not None:
4170
+ body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds
4171
+ return body
4172
+
4173
+ @classmethod
4174
+ def from_dict(cls, d: Dict[str, any]) -> TableTriggerConfiguration:
4175
+ """Deserializes the TableTriggerConfiguration from a dictionary."""
4176
+ return cls(condition=_enum(d, 'condition', Condition),
4177
+ min_time_between_triggers_seconds=d.get('min_time_between_triggers_seconds', None),
4178
+ table_names=d.get('table_names', None),
4179
+ wait_after_last_change_seconds=d.get('wait_after_last_change_seconds', None))
4180
+
4181
+
4140
4182
  @dataclass
4141
4183
  class Task:
4142
4184
  task_key: str
@@ -4434,62 +4476,6 @@ class TaskNotificationSettings:
4434
4476
  no_alert_for_skipped_runs=d.get('no_alert_for_skipped_runs', None))
4435
4477
 
4436
4478
 
4437
- @dataclass
4438
- class TriggerEvaluation:
4439
- description: Optional[str] = None
4440
- """Human-readable description of the the trigger evaluation result. Explains why the trigger
4441
- evaluation triggered or did not trigger a run, or failed."""
4442
-
4443
- run_id: Optional[int] = None
4444
- """The ID of the run that was triggered by the trigger evaluation. Only returned if a run was
4445
- triggered."""
4446
-
4447
- timestamp: Optional[int] = None
4448
- """Timestamp at which the trigger was evaluated."""
4449
-
4450
- def as_dict(self) -> dict:
4451
- """Serializes the TriggerEvaluation into a dictionary suitable for use as a JSON request body."""
4452
- body = {}
4453
- if self.description is not None: body['description'] = self.description
4454
- if self.run_id is not None: body['run_id'] = self.run_id
4455
- if self.timestamp is not None: body['timestamp'] = self.timestamp
4456
- return body
4457
-
4458
- @classmethod
4459
- def from_dict(cls, d: Dict[str, any]) -> TriggerEvaluation:
4460
- """Deserializes the TriggerEvaluation from a dictionary."""
4461
- return cls(description=d.get('description', None),
4462
- run_id=d.get('run_id', None),
4463
- timestamp=d.get('timestamp', None))
4464
-
4465
-
4466
- @dataclass
4467
- class TriggerHistory:
4468
- last_failed: Optional[TriggerEvaluation] = None
4469
- """The last time the trigger failed to evaluate."""
4470
-
4471
- last_not_triggered: Optional[TriggerEvaluation] = None
4472
- """The last time the trigger was evaluated but did not trigger a run."""
4473
-
4474
- last_triggered: Optional[TriggerEvaluation] = None
4475
- """The last time the run was triggered due to a file arrival."""
4476
-
4477
- def as_dict(self) -> dict:
4478
- """Serializes the TriggerHistory into a dictionary suitable for use as a JSON request body."""
4479
- body = {}
4480
- if self.last_failed: body['last_failed'] = self.last_failed.as_dict()
4481
- if self.last_not_triggered: body['last_not_triggered'] = self.last_not_triggered.as_dict()
4482
- if self.last_triggered: body['last_triggered'] = self.last_triggered.as_dict()
4483
- return body
4484
-
4485
- @classmethod
4486
- def from_dict(cls, d: Dict[str, any]) -> TriggerHistory:
4487
- """Deserializes the TriggerHistory from a dictionary."""
4488
- return cls(last_failed=_from_dict(d, 'last_failed', TriggerEvaluation),
4489
- last_not_triggered=_from_dict(d, 'last_not_triggered', TriggerEvaluation),
4490
- last_triggered=_from_dict(d, 'last_triggered', TriggerEvaluation))
4491
-
4492
-
4493
4479
  @dataclass
4494
4480
  class TriggerInfo:
4495
4481
  run_id: Optional[int] = None
@@ -4515,18 +4501,23 @@ class TriggerSettings:
4515
4501
  pause_status: Optional[PauseStatus] = None
4516
4502
  """Whether this trigger is paused or not."""
4517
4503
 
4504
+ table: Optional[TableTriggerConfiguration] = None
4505
+ """Table trigger settings."""
4506
+
4518
4507
  def as_dict(self) -> dict:
4519
4508
  """Serializes the TriggerSettings into a dictionary suitable for use as a JSON request body."""
4520
4509
  body = {}
4521
4510
  if self.file_arrival: body['file_arrival'] = self.file_arrival.as_dict()
4522
4511
  if self.pause_status is not None: body['pause_status'] = self.pause_status.value
4512
+ if self.table: body['table'] = self.table.as_dict()
4523
4513
  return body
4524
4514
 
4525
4515
  @classmethod
4526
4516
  def from_dict(cls, d: Dict[str, any]) -> TriggerSettings:
4527
4517
  """Deserializes the TriggerSettings from a dictionary."""
4528
4518
  return cls(file_arrival=_from_dict(d, 'file_arrival', FileArrivalTriggerConfiguration),
4529
- pause_status=_enum(d, 'pause_status', PauseStatus))
4519
+ pause_status=_enum(d, 'pause_status', PauseStatus),
4520
+ table=_from_dict(d, 'table', TableTriggerConfiguration))
4530
4521
 
4531
4522
 
4532
4523
  class TriggerType(Enum):
@@ -4536,15 +4527,16 @@ class TriggerType(Enum):
4536
4527
  One time triggers that fire a single run. This occurs you triggered a single run on demand
4537
4528
  through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a
4538
4529
  previously failed run. This occurs when you request to re-run the job in case of failures. *
4539
- `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task.
4540
-
4541
- * `FILE_ARRIVAL`: Indicates a run that is triggered by a file arrival."""
4530
+ `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`:
4531
+ Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is
4532
+ triggered by a table update."""
4542
4533
 
4543
4534
  FILE_ARRIVAL = 'FILE_ARRIVAL'
4544
4535
  ONE_TIME = 'ONE_TIME'
4545
4536
  PERIODIC = 'PERIODIC'
4546
4537
  RETRY = 'RETRY'
4547
4538
  RUN_JOB_TASK = 'RUN_JOB_TASK'
4539
+ TABLE = 'TABLE'
4548
4540
 
4549
4541
 
4550
4542
  @dataclass
@@ -4771,6 +4763,7 @@ class JobsAPI:
4771
4763
  if all_queued_runs is not None: body['all_queued_runs'] = all_queued_runs
4772
4764
  if job_id is not None: body['job_id'] = job_id
4773
4765
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
4766
+
4774
4767
  self._api.do('POST', '/api/2.1/jobs/runs/cancel-all', body=body, headers=headers)
4775
4768
 
4776
4769
  def cancel_run(self, run_id: int) -> Wait[Run]:
@@ -4789,6 +4782,7 @@ class JobsAPI:
4789
4782
  body = {}
4790
4783
  if run_id is not None: body['run_id'] = run_id
4791
4784
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
4785
+
4792
4786
  self._api.do('POST', '/api/2.1/jobs/runs/cancel', body=body, headers=headers)
4793
4787
  return Wait(self.wait_get_run_job_terminated_or_skipped, run_id=run_id)
4794
4788
 
@@ -4901,9 +4895,9 @@ class JobsAPI:
4901
4895
  :param timeout_seconds: int (optional)
4902
4896
  An optional timeout applied to each run of this job. A value of `0` means no timeout.
4903
4897
  :param trigger: :class:`TriggerSettings` (optional)
4904
- Trigger settings for the job. Can be used to trigger a run when new files arrive in an external
4905
- location. The default behavior is that the job runs only when triggered by clicking “Run Now” in
4906
- the Jobs UI or sending an API request to `runNow`.
4898
+ A configuration to trigger a run when certain conditions are met. The default behavior is that the
4899
+ job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to
4900
+ `runNow`.
4907
4901
  :param webhook_notifications: :class:`WebhookNotifications` (optional)
4908
4902
  A collection of system notification IDs to notify when runs of this job begin or complete.
4909
4903
 
@@ -4935,6 +4929,7 @@ class JobsAPI:
4935
4929
  if trigger is not None: body['trigger'] = trigger.as_dict()
4936
4930
  if webhook_notifications is not None: body['webhook_notifications'] = webhook_notifications.as_dict()
4937
4931
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
4932
+
4938
4933
  res = self._api.do('POST', '/api/2.1/jobs/create', body=body, headers=headers)
4939
4934
  return CreateResponse.from_dict(res)
4940
4935
 
@@ -4951,6 +4946,7 @@ class JobsAPI:
4951
4946
  body = {}
4952
4947
  if job_id is not None: body['job_id'] = job_id
4953
4948
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
4949
+
4954
4950
  self._api.do('POST', '/api/2.1/jobs/delete', body=body, headers=headers)
4955
4951
 
4956
4952
  def delete_run(self, run_id: int):
@@ -4966,6 +4962,7 @@ class JobsAPI:
4966
4962
  body = {}
4967
4963
  if run_id is not None: body['run_id'] = run_id
4968
4964
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
4965
+
4969
4966
  self._api.do('POST', '/api/2.1/jobs/runs/delete', body=body, headers=headers)
4970
4967
 
4971
4968
  def export_run(self, run_id: int, *, views_to_export: Optional[ViewsToExport] = None) -> ExportRunOutput:
@@ -4985,6 +4982,7 @@ class JobsAPI:
4985
4982
  if run_id is not None: query['run_id'] = run_id
4986
4983
  if views_to_export is not None: query['views_to_export'] = views_to_export.value
4987
4984
  headers = {'Accept': 'application/json', }
4985
+
4988
4986
  res = self._api.do('GET', '/api/2.1/jobs/runs/export', query=query, headers=headers)
4989
4987
  return ExportRunOutput.from_dict(res)
4990
4988
 
@@ -5002,6 +5000,7 @@ class JobsAPI:
5002
5000
  query = {}
5003
5001
  if job_id is not None: query['job_id'] = job_id
5004
5002
  headers = {'Accept': 'application/json', }
5003
+
5005
5004
  res = self._api.do('GET', '/api/2.1/jobs/get', query=query, headers=headers)
5006
5005
  return Job.from_dict(res)
5007
5006
 
@@ -5017,6 +5016,7 @@ class JobsAPI:
5017
5016
  """
5018
5017
 
5019
5018
  headers = {'Accept': 'application/json', }
5019
+
5020
5020
  res = self._api.do('GET', f'/api/2.0/permissions/jobs/{job_id}/permissionLevels', headers=headers)
5021
5021
  return GetJobPermissionLevelsResponse.from_dict(res)
5022
5022
 
@@ -5032,6 +5032,7 @@ class JobsAPI:
5032
5032
  """
5033
5033
 
5034
5034
  headers = {'Accept': 'application/json', }
5035
+
5035
5036
  res = self._api.do('GET', f'/api/2.0/permissions/jobs/{job_id}', headers=headers)
5036
5037
  return JobPermissions.from_dict(res)
5037
5038
 
@@ -5059,6 +5060,7 @@ class JobsAPI:
5059
5060
  if include_resolved_values is not None: query['include_resolved_values'] = include_resolved_values
5060
5061
  if run_id is not None: query['run_id'] = run_id
5061
5062
  headers = {'Accept': 'application/json', }
5063
+
5062
5064
  res = self._api.do('GET', '/api/2.1/jobs/runs/get', query=query, headers=headers)
5063
5065
  return Run.from_dict(res)
5064
5066
 
@@ -5083,6 +5085,7 @@ class JobsAPI:
5083
5085
  query = {}
5084
5086
  if run_id is not None: query['run_id'] = run_id
5085
5087
  headers = {'Accept': 'application/json', }
5088
+
5086
5089
  res = self._api.do('GET', '/api/2.1/jobs/runs/get-output', query=query, headers=headers)
5087
5090
  return RunOutput.from_dict(res)
5088
5091
 
@@ -5236,8 +5239,9 @@ class JobsAPI:
5236
5239
  in conjunction with notebook_params. The JSON representation of this field (for example
5237
5240
  `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
5238
5241
 
5239
- Use [Task parameter variables](/jobs.html"#parameter-variables") to set parameters containing
5240
- information about job runs.
5242
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
5243
+
5244
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
5241
5245
  :param job_parameters: Dict[str,str] (optional)
5242
5246
  Job-level parameters used in the run. for example `"param": "overriding_val"`
5243
5247
  :param latest_repair_id: int (optional)
@@ -5252,13 +5256,13 @@ class JobsAPI:
5252
5256
 
5253
5257
  notebook_params cannot be specified in conjunction with jar_params.
5254
5258
 
5255
- Use [Task parameter variables] to set parameters containing information about job runs.
5259
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
5256
5260
 
5257
5261
  The JSON representation of this field (for example `{"notebook_params":{"name":"john
5258
5262
  doe","age":"35"}}`) cannot exceed 10,000 bytes.
5259
5263
 
5260
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
5261
5264
  [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
5265
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
5262
5266
  :param pipeline_params: :class:`PipelineParams` (optional)
5263
5267
  :param python_named_params: Dict[str,str] (optional)
5264
5268
  A map from keys to values for jobs with Python wheel task, for example `"python_named_params":
@@ -5269,7 +5273,7 @@ class JobsAPI:
5269
5273
  would overwrite the parameters specified in job setting. The JSON representation of this field (for
5270
5274
  example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
5271
5275
 
5272
- Use [Task parameter variables] to set parameters containing information about job runs.
5276
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
5273
5277
 
5274
5278
  Important
5275
5279
 
@@ -5277,7 +5281,7 @@ class JobsAPI:
5277
5281
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
5278
5282
  emojis.
5279
5283
 
5280
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
5284
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
5281
5285
  :param rerun_all_failed_tasks: bool (optional)
5282
5286
  If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be used.
5283
5287
  :param rerun_dependent_tasks: bool (optional)
@@ -5292,7 +5296,7 @@ class JobsAPI:
5292
5296
  in job setting. The JSON representation of this field (for example `{"python_params":["john
5293
5297
  doe","35"]}`) cannot exceed 10,000 bytes.
5294
5298
 
5295
- Use [Task parameter variables] to set parameters containing information about job runs
5299
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
5296
5300
 
5297
5301
  Important
5298
5302
 
@@ -5300,7 +5304,7 @@ class JobsAPI:
5300
5304
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
5301
5305
  emojis.
5302
5306
 
5303
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
5307
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
5304
5308
  :param sql_params: Dict[str,str] (optional)
5305
5309
  A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
5306
5310
  "age": "35"}`. The SQL alert task does not support custom parameters.
@@ -5325,6 +5329,7 @@ class JobsAPI:
5325
5329
  if spark_submit_params is not None: body['spark_submit_params'] = [v for v in spark_submit_params]
5326
5330
  if sql_params is not None: body['sql_params'] = sql_params
5327
5331
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
5332
+
5328
5333
  op_response = self._api.do('POST', '/api/2.1/jobs/runs/repair', body=body, headers=headers)
5329
5334
  return Wait(self.wait_get_run_job_terminated_or_skipped,
5330
5335
  response=RepairRunResponse.from_dict(op_response),
@@ -5383,6 +5388,7 @@ class JobsAPI:
5383
5388
  if job_id is not None: body['job_id'] = job_id
5384
5389
  if new_settings is not None: body['new_settings'] = new_settings.as_dict()
5385
5390
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
5391
+
5386
5392
  self._api.do('POST', '/api/2.1/jobs/reset', body=body, headers=headers)
5387
5393
 
5388
5394
  def run_now(self,
@@ -5428,8 +5434,9 @@ class JobsAPI:
5428
5434
  in conjunction with notebook_params. The JSON representation of this field (for example
5429
5435
  `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
5430
5436
 
5431
- Use [Task parameter variables](/jobs.html"#parameter-variables") to set parameters containing
5432
- information about job runs.
5437
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
5438
+
5439
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
5433
5440
  :param job_parameters: Dict[str,str] (optional)
5434
5441
  Job-level parameters used in the run. for example `"param": "overriding_val"`
5435
5442
  :param notebook_params: Dict[str,str] (optional)
@@ -5441,13 +5448,13 @@ class JobsAPI:
5441
5448
 
5442
5449
  notebook_params cannot be specified in conjunction with jar_params.
5443
5450
 
5444
- Use [Task parameter variables] to set parameters containing information about job runs.
5451
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
5445
5452
 
5446
5453
  The JSON representation of this field (for example `{"notebook_params":{"name":"john
5447
5454
  doe","age":"35"}}`) cannot exceed 10,000 bytes.
5448
5455
 
5449
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
5450
5456
  [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
5457
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
5451
5458
  :param pipeline_params: :class:`PipelineParams` (optional)
5452
5459
  :param python_named_params: Dict[str,str] (optional)
5453
5460
  A map from keys to values for jobs with Python wheel task, for example `"python_named_params":
@@ -5458,7 +5465,7 @@ class JobsAPI:
5458
5465
  would overwrite the parameters specified in job setting. The JSON representation of this field (for
5459
5466
  example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
5460
5467
 
5461
- Use [Task parameter variables] to set parameters containing information about job runs.
5468
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
5462
5469
 
5463
5470
  Important
5464
5471
 
@@ -5466,7 +5473,7 @@ class JobsAPI:
5466
5473
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
5467
5474
  emojis.
5468
5475
 
5469
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
5476
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
5470
5477
  :param queue: :class:`QueueSettings` (optional)
5471
5478
  The queue settings of the run.
5472
5479
  :param spark_submit_params: List[str] (optional)
@@ -5476,7 +5483,7 @@ class JobsAPI:
5476
5483
  in job setting. The JSON representation of this field (for example `{"python_params":["john
5477
5484
  doe","35"]}`) cannot exceed 10,000 bytes.
5478
5485
 
5479
- Use [Task parameter variables] to set parameters containing information about job runs
5486
+ Use [task parameter variables] such as `{{job.id}}` to pass context about job runs.
5480
5487
 
5481
5488
  Important
5482
5489
 
@@ -5484,7 +5491,7 @@ class JobsAPI:
5484
5491
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
5485
5492
  emojis.
5486
5493
 
5487
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
5494
+ [task parameter variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html
5488
5495
  :param sql_params: Dict[str,str] (optional)
5489
5496
  A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
5490
5497
  "age": "35"}`. The SQL alert task does not support custom parameters.
@@ -5507,6 +5514,7 @@ class JobsAPI:
5507
5514
  if spark_submit_params is not None: body['spark_submit_params'] = [v for v in spark_submit_params]
5508
5515
  if sql_params is not None: body['sql_params'] = sql_params
5509
5516
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
5517
+
5510
5518
  op_response = self._api.do('POST', '/api/2.1/jobs/run-now', body=body, headers=headers)
5511
5519
  return Wait(self.wait_get_run_job_terminated_or_skipped,
5512
5520
  response=RunNowResponse.from_dict(op_response),
@@ -5559,6 +5567,7 @@ class JobsAPI:
5559
5567
  if access_control_list is not None:
5560
5568
  body['access_control_list'] = [v.as_dict() for v in access_control_list]
5561
5569
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
5570
+
5562
5571
  res = self._api.do('PUT', f'/api/2.0/permissions/jobs/{job_id}', body=body, headers=headers)
5563
5572
  return JobPermissions.from_dict(res)
5564
5573
 
@@ -5640,6 +5649,7 @@ class JobsAPI:
5640
5649
  if timeout_seconds is not None: body['timeout_seconds'] = timeout_seconds
5641
5650
  if webhook_notifications is not None: body['webhook_notifications'] = webhook_notifications.as_dict()
5642
5651
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
5652
+
5643
5653
  op_response = self._api.do('POST', '/api/2.1/jobs/runs/submit', body=body, headers=headers)
5644
5654
  return Wait(self.wait_get_run_job_terminated_or_skipped,
5645
5655
  response=SubmitRunResponse.from_dict(op_response),
@@ -5706,6 +5716,7 @@ class JobsAPI:
5706
5716
  if job_id is not None: body['job_id'] = job_id
5707
5717
  if new_settings is not None: body['new_settings'] = new_settings.as_dict()
5708
5718
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
5719
+
5709
5720
  self._api.do('POST', '/api/2.1/jobs/update', body=body, headers=headers)
5710
5721
 
5711
5722
  def update_permissions(
@@ -5727,5 +5738,6 @@ class JobsAPI:
5727
5738
  if access_control_list is not None:
5728
5739
  body['access_control_list'] = [v.as_dict() for v in access_control_list]
5729
5740
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
5741
+
5730
5742
  res = self._api.do('PATCH', f'/api/2.0/permissions/jobs/{job_id}', body=body, headers=headers)
5731
5743
  return JobPermissions.from_dict(res)