databricks-sdk 0.28.0__py3-none-any.whl → 0.29.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -940,17 +940,23 @@ class ForEachTaskErrorMessageStats:
940
940
  error_message: Optional[str] = None
941
941
  """Describes the error message occured during the iterations."""
942
942
 
943
+ termination_category: Optional[str] = None
944
+ """Describes the termination reason for the error message."""
945
+
943
946
  def as_dict(self) -> dict:
944
947
  """Serializes the ForEachTaskErrorMessageStats into a dictionary suitable for use as a JSON request body."""
945
948
  body = {}
946
949
  if self.count is not None: body['count'] = self.count
947
950
  if self.error_message is not None: body['error_message'] = self.error_message
951
+ if self.termination_category is not None: body['termination_category'] = self.termination_category
948
952
  return body
949
953
 
950
954
  @classmethod
951
955
  def from_dict(cls, d: Dict[str, any]) -> ForEachTaskErrorMessageStats:
952
956
  """Deserializes the ForEachTaskErrorMessageStats from a dictionary."""
953
- return cls(count=d.get('count', None), error_message=d.get('error_message', None))
957
+ return cls(count=d.get('count', None),
958
+ error_message=d.get('error_message', None),
959
+ termination_category=d.get('termination_category', None))
954
960
 
955
961
 
956
962
  @dataclass
@@ -1315,6 +1321,13 @@ class JobEmailNotifications:
1315
1321
  """A list of email addresses to be notified when a run begins. If not specified on job creation,
1316
1322
  reset, or update, the list is empty, and notifications are not sent."""
1317
1323
 
1324
+ on_streaming_backlog_exceeded: Optional[List[str]] = None
1325
+ """A list of email addresses to notify when any streaming backlog thresholds are exceeded for any
1326
+ stream. Streaming backlog thresholds can be set in the `health` field using the following
1327
+ metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or
1328
+ `STREAMING_BACKLOG_FILES`. Alerting is based on the 10-minute average of these metrics. If the
1329
+ issue persists, notifications are resent every 30 minutes."""
1330
+
1318
1331
  on_success: Optional[List[str]] = None
1319
1332
  """A list of email addresses to be notified when a run successfully completes. A run is considered
1320
1333
  to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS`
@@ -1332,6 +1345,8 @@ class JobEmailNotifications:
1332
1345
  ]
1333
1346
  if self.on_failure: body['on_failure'] = [v for v in self.on_failure]
1334
1347
  if self.on_start: body['on_start'] = [v for v in self.on_start]
1348
+ if self.on_streaming_backlog_exceeded:
1349
+ body['on_streaming_backlog_exceeded'] = [v for v in self.on_streaming_backlog_exceeded]
1335
1350
  if self.on_success: body['on_success'] = [v for v in self.on_success]
1336
1351
  return body
1337
1352
 
@@ -1343,6 +1358,7 @@ class JobEmailNotifications:
1343
1358
  None),
1344
1359
  on_failure=d.get('on_failure', None),
1345
1360
  on_start=d.get('on_start', None),
1361
+ on_streaming_backlog_exceeded=d.get('on_streaming_backlog_exceeded', None),
1346
1362
  on_success=d.get('on_success', None))
1347
1363
 
1348
1364
 
@@ -1352,9 +1368,8 @@ class JobEnvironment:
1352
1368
  """The key of an environment. It has to be unique within a job."""
1353
1369
 
1354
1370
  spec: Optional[compute.Environment] = None
1355
- """The a environment entity used to preserve serverless environment side panel and jobs'
1356
- environment for non-notebook task. In this minimal environment spec, only pip dependencies are
1357
- supported. Next ID: 5"""
1371
+ """The environment entity used to preserve serverless environment side panel and jobs' environment
1372
+ for non-notebook task. In this minimal environment spec, only pip dependencies are supported."""
1358
1373
 
1359
1374
  def as_dict(self) -> dict:
1360
1375
  """Serializes the JobEnvironment into a dictionary suitable for use as a JSON request body."""
@@ -1783,9 +1798,21 @@ class JobSourceDirtyState(Enum):
1783
1798
 
1784
1799
 
1785
1800
  class JobsHealthMetric(Enum):
1786
- """Specifies the health metric that is being evaluated for a particular health rule."""
1801
+ """Specifies the health metric that is being evaluated for a particular health rule.
1802
+
1803
+ * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`:
1804
+ An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric
1805
+ is in Private Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag
1806
+ across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_SECONDS`: An
1807
+ estimate of the maximum consumer delay across all streams. This metric is in Private Preview. *
1808
+ `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all
1809
+ streams. This metric is in Private Preview."""
1787
1810
 
1788
1811
  RUN_DURATION_SECONDS = 'RUN_DURATION_SECONDS'
1812
+ STREAMING_BACKLOG_BYTES = 'STREAMING_BACKLOG_BYTES'
1813
+ STREAMING_BACKLOG_FILES = 'STREAMING_BACKLOG_FILES'
1814
+ STREAMING_BACKLOG_RECORDS = 'STREAMING_BACKLOG_RECORDS'
1815
+ STREAMING_BACKLOG_SECONDS = 'STREAMING_BACKLOG_SECONDS'
1789
1816
 
1790
1817
 
1791
1818
  class JobsHealthOperator(Enum):
@@ -1797,7 +1824,15 @@ class JobsHealthOperator(Enum):
1797
1824
  @dataclass
1798
1825
  class JobsHealthRule:
1799
1826
  metric: JobsHealthMetric
1800
- """Specifies the health metric that is being evaluated for a particular health rule."""
1827
+ """Specifies the health metric that is being evaluated for a particular health rule.
1828
+
1829
+ * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`:
1830
+ An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric
1831
+ is in Private Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag
1832
+ across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_SECONDS`: An
1833
+ estimate of the maximum consumer delay across all streams. This metric is in Private Preview. *
1834
+ `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all
1835
+ streams. This metric is in Private Preview."""
1801
1836
 
1802
1837
  op: JobsHealthOperator
1803
1838
  """Specifies the operator used to compare the health metric value with the specified threshold."""
@@ -1994,6 +2029,36 @@ class PauseStatus(Enum):
1994
2029
  UNPAUSED = 'UNPAUSED'
1995
2030
 
1996
2031
 
2032
+ @dataclass
2033
+ class PeriodicTriggerConfiguration:
2034
+ interval: int
2035
+ """The interval at which the trigger should run."""
2036
+
2037
+ unit: PeriodicTriggerConfigurationTimeUnit
2038
+ """The unit of time for the interval."""
2039
+
2040
+ def as_dict(self) -> dict:
2041
+ """Serializes the PeriodicTriggerConfiguration into a dictionary suitable for use as a JSON request body."""
2042
+ body = {}
2043
+ if self.interval is not None: body['interval'] = self.interval
2044
+ if self.unit is not None: body['unit'] = self.unit.value
2045
+ return body
2046
+
2047
+ @classmethod
2048
+ def from_dict(cls, d: Dict[str, any]) -> PeriodicTriggerConfiguration:
2049
+ """Deserializes the PeriodicTriggerConfiguration from a dictionary."""
2050
+ return cls(interval=d.get('interval', None),
2051
+ unit=_enum(d, 'unit', PeriodicTriggerConfigurationTimeUnit))
2052
+
2053
+
2054
+ class PeriodicTriggerConfigurationTimeUnit(Enum):
2055
+
2056
+ DAYS = 'DAYS'
2057
+ HOURS = 'HOURS'
2058
+ TIME_UNIT_UNSPECIFIED = 'TIME_UNIT_UNSPECIFIED'
2059
+ WEEKS = 'WEEKS'
2060
+
2061
+
1997
2062
  @dataclass
1998
2063
  class PipelineParams:
1999
2064
  full_refresh: Optional[bool] = None
@@ -2448,7 +2513,6 @@ class ResolvedStringParamsValues:
2448
2513
 
2449
2514
  @dataclass
2450
2515
  class ResolvedValues:
2451
-
2452
2516
  condition_task: Optional[ResolvedConditionTaskValues] = None
2453
2517
 
2454
2518
  dbt_task: Optional[ResolvedDbtTaskValues] = None
@@ -3391,6 +3455,10 @@ class RunTask:
3391
3455
  """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This
3392
3456
  field is set to 0 if the job is still running."""
3393
3457
 
3458
+ environment_key: Optional[str] = None
3459
+ """The key that references an environment spec in a job. This field is required for Python script,
3460
+ Python wheel and dbt tasks when using serverless compute."""
3461
+
3394
3462
  execution_duration: Optional[int] = None
3395
3463
  """The time in milliseconds it took to execute the commands in the JAR or notebook until they
3396
3464
  completed, failed, timed out, were cancelled, or encountered an unexpected error. The duration
@@ -3522,6 +3590,7 @@ class RunTask:
3522
3590
  if self.description is not None: body['description'] = self.description
3523
3591
  if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
3524
3592
  if self.end_time is not None: body['end_time'] = self.end_time
3593
+ if self.environment_key is not None: body['environment_key'] = self.environment_key
3525
3594
  if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
3526
3595
  if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
3527
3596
  if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict()
@@ -3564,6 +3633,7 @@ class RunTask:
3564
3633
  description=d.get('description', None),
3565
3634
  email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
3566
3635
  end_time=d.get('end_time', None),
3636
+ environment_key=d.get('environment_key', None),
3567
3637
  execution_duration=d.get('execution_duration', None),
3568
3638
  existing_cluster_id=d.get('existing_cluster_id', None),
3569
3639
  for_each_task=_from_dict(d, 'for_each_task', RunForEachTask),
@@ -4121,18 +4191,12 @@ class SubmitRun:
4121
4191
  access_control_list: Optional[List[iam.AccessControlRequest]] = None
4122
4192
  """List of permissions to set on the job."""
4123
4193
 
4124
- condition_task: Optional[ConditionTask] = None
4125
- """If condition_task, specifies a condition with an outcome that can be used to control the
4126
- execution of other tasks. Does not require a cluster to execute and does not support retries or
4127
- notifications."""
4128
-
4129
- dbt_task: Optional[DbtTask] = None
4130
- """If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and
4131
- the ability to use a serverless or a pro SQL warehouse."""
4132
-
4133
4194
  email_notifications: Optional[JobEmailNotifications] = None
4134
4195
  """An optional set of email addresses notified when the run begins or completes."""
4135
4196
 
4197
+ environments: Optional[List[JobEnvironment]] = None
4198
+ """A list of task execution environment specifications that can be referenced by tasks of this run."""
4199
+
4136
4200
  git_source: Optional[GitSource] = None
4137
4201
  """An optional specification for a remote Git repository containing the source code used by tasks.
4138
4202
  Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
@@ -4160,20 +4224,10 @@ class SubmitRun:
4160
4224
 
4161
4225
  [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html"""
4162
4226
 
4163
- notebook_task: Optional[NotebookTask] = None
4164
- """If notebook_task, indicates that this task must run a notebook. This field may not be specified
4165
- in conjunction with spark_jar_task."""
4166
-
4167
4227
  notification_settings: Optional[JobNotificationSettings] = None
4168
4228
  """Optional notification settings that are used when sending notifications to each of the
4169
4229
  `email_notifications` and `webhook_notifications` for this run."""
4170
4230
 
4171
- pipeline_task: Optional[PipelineTask] = None
4172
- """If pipeline_task, indicates that this task must execute a Pipeline."""
4173
-
4174
- python_wheel_task: Optional[PythonWheelTask] = None
4175
- """If python_wheel_task, indicates that this job must execute a PythonWheel."""
4176
-
4177
4231
  queue: Optional[QueueSettings] = None
4178
4232
  """The queue settings of the one-time run."""
4179
4233
 
@@ -4181,38 +4235,9 @@ class SubmitRun:
4181
4235
  """Specifies the user or service principal that the job runs as. If not specified, the job runs as
4182
4236
  the user who submits the request."""
4183
4237
 
4184
- run_job_task: Optional[RunJobTask] = None
4185
- """If run_job_task, indicates that this task must execute another job."""
4186
-
4187
4238
  run_name: Optional[str] = None
4188
4239
  """An optional name for the run. The default value is `Untitled`."""
4189
4240
 
4190
- spark_jar_task: Optional[SparkJarTask] = None
4191
- """If spark_jar_task, indicates that this task must run a JAR."""
4192
-
4193
- spark_python_task: Optional[SparkPythonTask] = None
4194
- """If spark_python_task, indicates that this task must run a Python file."""
4195
-
4196
- spark_submit_task: Optional[SparkSubmitTask] = None
4197
- """If `spark_submit_task`, indicates that this task must be launched by the spark submit script.
4198
- This task can run only on new clusters.
4199
-
4200
- In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
4201
- `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
4202
- configurations.
4203
-
4204
- `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you
4205
- _cannot_ specify them in parameters.
4206
-
4207
- By default, the Spark submit job uses all available memory (excluding reserved memory for
4208
- Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value
4209
- to leave some room for off-heap usage.
4210
-
4211
- The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
4212
-
4213
- sql_task: Optional[SqlTask] = None
4214
- """If sql_task, indicates that this job must execute a SQL task."""
4215
-
4216
4241
  tasks: Optional[List[SubmitTask]] = None
4217
4242
 
4218
4243
  timeout_seconds: Optional[int] = None
@@ -4226,24 +4251,15 @@ class SubmitRun:
4226
4251
  body = {}
4227
4252
  if self.access_control_list:
4228
4253
  body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
4229
- if self.condition_task: body['condition_task'] = self.condition_task.as_dict()
4230
- if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
4231
4254
  if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
4255
+ if self.environments: body['environments'] = [v.as_dict() for v in self.environments]
4232
4256
  if self.git_source: body['git_source'] = self.git_source.as_dict()
4233
4257
  if self.health: body['health'] = self.health.as_dict()
4234
4258
  if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token
4235
- if self.notebook_task: body['notebook_task'] = self.notebook_task.as_dict()
4236
4259
  if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict()
4237
- if self.pipeline_task: body['pipeline_task'] = self.pipeline_task.as_dict()
4238
- if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict()
4239
4260
  if self.queue: body['queue'] = self.queue.as_dict()
4240
4261
  if self.run_as: body['run_as'] = self.run_as.as_dict()
4241
- if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict()
4242
4262
  if self.run_name is not None: body['run_name'] = self.run_name
4243
- if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict()
4244
- if self.spark_python_task: body['spark_python_task'] = self.spark_python_task.as_dict()
4245
- if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task.as_dict()
4246
- if self.sql_task: body['sql_task'] = self.sql_task.as_dict()
4247
4263
  if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks]
4248
4264
  if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
4249
4265
  if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
@@ -4253,24 +4269,15 @@ class SubmitRun:
4253
4269
  def from_dict(cls, d: Dict[str, any]) -> SubmitRun:
4254
4270
  """Deserializes the SubmitRun from a dictionary."""
4255
4271
  return cls(access_control_list=_repeated_dict(d, 'access_control_list', iam.AccessControlRequest),
4256
- condition_task=_from_dict(d, 'condition_task', ConditionTask),
4257
- dbt_task=_from_dict(d, 'dbt_task', DbtTask),
4258
4272
  email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
4273
+ environments=_repeated_dict(d, 'environments', JobEnvironment),
4259
4274
  git_source=_from_dict(d, 'git_source', GitSource),
4260
4275
  health=_from_dict(d, 'health', JobsHealthRules),
4261
4276
  idempotency_token=d.get('idempotency_token', None),
4262
- notebook_task=_from_dict(d, 'notebook_task', NotebookTask),
4263
4277
  notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings),
4264
- pipeline_task=_from_dict(d, 'pipeline_task', PipelineTask),
4265
- python_wheel_task=_from_dict(d, 'python_wheel_task', PythonWheelTask),
4266
4278
  queue=_from_dict(d, 'queue', QueueSettings),
4267
4279
  run_as=_from_dict(d, 'run_as', JobRunAs),
4268
- run_job_task=_from_dict(d, 'run_job_task', RunJobTask),
4269
4280
  run_name=d.get('run_name', None),
4270
- spark_jar_task=_from_dict(d, 'spark_jar_task', SparkJarTask),
4271
- spark_python_task=_from_dict(d, 'spark_python_task', SparkPythonTask),
4272
- spark_submit_task=_from_dict(d, 'spark_submit_task', SparkSubmitTask),
4273
- sql_task=_from_dict(d, 'sql_task', SqlTask),
4274
4281
  tasks=_repeated_dict(d, 'tasks', SubmitTask),
4275
4282
  timeout_seconds=d.get('timeout_seconds', None),
4276
4283
  webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications))
@@ -4307,6 +4314,10 @@ class SubmitTask:
4307
4314
  execution of other tasks. Does not require a cluster to execute and does not support retries or
4308
4315
  notifications."""
4309
4316
 
4317
+ dbt_task: Optional[DbtTask] = None
4318
+ """If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and
4319
+ the ability to use a serverless or a pro SQL warehouse."""
4320
+
4310
4321
  depends_on: Optional[List[TaskDependency]] = None
4311
4322
  """An optional array of objects specifying the dependency graph of the task. All tasks specified in
4312
4323
  this field must complete successfully before executing this task. The key is `task_key`, and the
@@ -4319,6 +4330,10 @@ class SubmitTask:
4319
4330
  """An optional set of email addresses notified when the task run begins or completes. The default
4320
4331
  behavior is to not send any emails."""
4321
4332
 
4333
+ environment_key: Optional[str] = None
4334
+ """The key that references an environment spec in a job. This field is required for Python script,
4335
+ Python wheel and dbt tasks when using serverless compute."""
4336
+
4322
4337
  existing_cluster_id: Optional[str] = None
4323
4338
  """If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running
4324
4339
  jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops
@@ -4397,9 +4412,11 @@ class SubmitTask:
4397
4412
  """Serializes the SubmitTask into a dictionary suitable for use as a JSON request body."""
4398
4413
  body = {}
4399
4414
  if self.condition_task: body['condition_task'] = self.condition_task.as_dict()
4415
+ if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
4400
4416
  if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on]
4401
4417
  if self.description is not None: body['description'] = self.description
4402
4418
  if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
4419
+ if self.environment_key is not None: body['environment_key'] = self.environment_key
4403
4420
  if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
4404
4421
  if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict()
4405
4422
  if self.health: body['health'] = self.health.as_dict()
@@ -4424,9 +4441,11 @@ class SubmitTask:
4424
4441
  def from_dict(cls, d: Dict[str, any]) -> SubmitTask:
4425
4442
  """Deserializes the SubmitTask from a dictionary."""
4426
4443
  return cls(condition_task=_from_dict(d, 'condition_task', ConditionTask),
4444
+ dbt_task=_from_dict(d, 'dbt_task', DbtTask),
4427
4445
  depends_on=_repeated_dict(d, 'depends_on', TaskDependency),
4428
4446
  description=d.get('description', None),
4429
4447
  email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
4448
+ environment_key=d.get('environment_key', None),
4430
4449
  existing_cluster_id=d.get('existing_cluster_id', None),
4431
4450
  for_each_task=_from_dict(d, 'for_each_task', ForEachTask),
4432
4451
  health=_from_dict(d, 'health', JobsHealthRules),
@@ -4729,6 +4748,13 @@ class TaskEmailNotifications:
4729
4748
  """A list of email addresses to be notified when a run begins. If not specified on job creation,
4730
4749
  reset, or update, the list is empty, and notifications are not sent."""
4731
4750
 
4751
+ on_streaming_backlog_exceeded: Optional[List[str]] = None
4752
+ """A list of email addresses to notify when any streaming backlog thresholds are exceeded for any
4753
+ stream. Streaming backlog thresholds can be set in the `health` field using the following
4754
+ metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or
4755
+ `STREAMING_BACKLOG_FILES`. Alerting is based on the 10-minute average of these metrics. If the
4756
+ issue persists, notifications are resent every 30 minutes."""
4757
+
4732
4758
  on_success: Optional[List[str]] = None
4733
4759
  """A list of email addresses to be notified when a run successfully completes. A run is considered
4734
4760
  to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS`
@@ -4746,6 +4772,8 @@ class TaskEmailNotifications:
4746
4772
  ]
4747
4773
  if self.on_failure: body['on_failure'] = [v for v in self.on_failure]
4748
4774
  if self.on_start: body['on_start'] = [v for v in self.on_start]
4775
+ if self.on_streaming_backlog_exceeded:
4776
+ body['on_streaming_backlog_exceeded'] = [v for v in self.on_streaming_backlog_exceeded]
4749
4777
  if self.on_success: body['on_success'] = [v for v in self.on_success]
4750
4778
  return body
4751
4779
 
@@ -4757,6 +4785,7 @@ class TaskEmailNotifications:
4757
4785
  None),
4758
4786
  on_failure=d.get('on_failure', None),
4759
4787
  on_start=d.get('on_start', None),
4788
+ on_streaming_backlog_exceeded=d.get('on_streaming_backlog_exceeded', None),
4760
4789
  on_success=d.get('on_success', None))
4761
4790
 
4762
4791
 
@@ -4820,6 +4849,9 @@ class TriggerSettings:
4820
4849
  pause_status: Optional[PauseStatus] = None
4821
4850
  """Whether this trigger is paused or not."""
4822
4851
 
4852
+ periodic: Optional[PeriodicTriggerConfiguration] = None
4853
+ """Periodic trigger settings."""
4854
+
4823
4855
  table: Optional[TableUpdateTriggerConfiguration] = None
4824
4856
  """Old table trigger settings name. Deprecated in favor of `table_update`."""
4825
4857
 
@@ -4830,6 +4862,7 @@ class TriggerSettings:
4830
4862
  body = {}
4831
4863
  if self.file_arrival: body['file_arrival'] = self.file_arrival.as_dict()
4832
4864
  if self.pause_status is not None: body['pause_status'] = self.pause_status.value
4865
+ if self.periodic: body['periodic'] = self.periodic.as_dict()
4833
4866
  if self.table: body['table'] = self.table.as_dict()
4834
4867
  if self.table_update: body['table_update'] = self.table_update.as_dict()
4835
4868
  return body
@@ -4839,6 +4872,7 @@ class TriggerSettings:
4839
4872
  """Deserializes the TriggerSettings from a dictionary."""
4840
4873
  return cls(file_arrival=_from_dict(d, 'file_arrival', FileArrivalTriggerConfiguration),
4841
4874
  pause_status=_enum(d, 'pause_status', PauseStatus),
4875
+ periodic=_from_dict(d, 'periodic', PeriodicTriggerConfiguration),
4842
4876
  table=_from_dict(d, 'table', TableUpdateTriggerConfiguration),
4843
4877
  table_update=_from_dict(d, 'table_update', TableUpdateTriggerConfiguration))
4844
4878
 
@@ -4986,6 +5020,14 @@ class WebhookNotifications:
4986
5020
  """An optional list of system notification IDs to call when the run starts. A maximum of 3
4987
5021
  destinations can be specified for the `on_start` property."""
4988
5022
 
5023
+ on_streaming_backlog_exceeded: Optional[List[Webhook]] = None
5024
+ """An optional list of system notification IDs to call when any streaming backlog thresholds are
5025
+ exceeded for any stream. Streaming backlog thresholds can be set in the `health` field using the
5026
+ following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`,
5027
+ `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`. Alerting is based on the 10-minute
5028
+ average of these metrics. If the issue persists, notifications are resent every 30 minutes. A
5029
+ maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property."""
5030
+
4989
5031
  on_success: Optional[List[Webhook]] = None
4990
5032
  """An optional list of system notification IDs to call when the run completes successfully. A
4991
5033
  maximum of 3 destinations can be specified for the `on_success` property."""
@@ -4999,6 +5041,8 @@ class WebhookNotifications:
4999
5041
  ]
5000
5042
  if self.on_failure: body['on_failure'] = [v.as_dict() for v in self.on_failure]
5001
5043
  if self.on_start: body['on_start'] = [v.as_dict() for v in self.on_start]
5044
+ if self.on_streaming_backlog_exceeded:
5045
+ body['on_streaming_backlog_exceeded'] = [v.as_dict() for v in self.on_streaming_backlog_exceeded]
5002
5046
  if self.on_success: body['on_success'] = [v.as_dict() for v in self.on_success]
5003
5047
  return body
5004
5048
 
@@ -5009,6 +5053,7 @@ class WebhookNotifications:
5009
5053
  d, 'on_duration_warning_threshold_exceeded', Webhook),
5010
5054
  on_failure=_repeated_dict(d, 'on_failure', Webhook),
5011
5055
  on_start=_repeated_dict(d, 'on_start', Webhook),
5056
+ on_streaming_backlog_exceeded=_repeated_dict(d, 'on_streaming_backlog_exceeded', Webhook),
5012
5057
  on_success=_repeated_dict(d, 'on_success', Webhook))
5013
5058
 
5014
5059
 
@@ -5883,24 +5928,15 @@ class JobsAPI:
5883
5928
  def submit(self,
5884
5929
  *,
5885
5930
  access_control_list: Optional[List[iam.AccessControlRequest]] = None,
5886
- condition_task: Optional[ConditionTask] = None,
5887
- dbt_task: Optional[DbtTask] = None,
5888
5931
  email_notifications: Optional[JobEmailNotifications] = None,
5932
+ environments: Optional[List[JobEnvironment]] = None,
5889
5933
  git_source: Optional[GitSource] = None,
5890
5934
  health: Optional[JobsHealthRules] = None,
5891
5935
  idempotency_token: Optional[str] = None,
5892
- notebook_task: Optional[NotebookTask] = None,
5893
5936
  notification_settings: Optional[JobNotificationSettings] = None,
5894
- pipeline_task: Optional[PipelineTask] = None,
5895
- python_wheel_task: Optional[PythonWheelTask] = None,
5896
5937
  queue: Optional[QueueSettings] = None,
5897
5938
  run_as: Optional[JobRunAs] = None,
5898
- run_job_task: Optional[RunJobTask] = None,
5899
5939
  run_name: Optional[str] = None,
5900
- spark_jar_task: Optional[SparkJarTask] = None,
5901
- spark_python_task: Optional[SparkPythonTask] = None,
5902
- spark_submit_task: Optional[SparkSubmitTask] = None,
5903
- sql_task: Optional[SqlTask] = None,
5904
5940
  tasks: Optional[List[SubmitTask]] = None,
5905
5941
  timeout_seconds: Optional[int] = None,
5906
5942
  webhook_notifications: Optional[WebhookNotifications] = None) -> Wait[Run]:
@@ -5912,14 +5948,10 @@ class JobsAPI:
5912
5948
 
5913
5949
  :param access_control_list: List[:class:`AccessControlRequest`] (optional)
5914
5950
  List of permissions to set on the job.
5915
- :param condition_task: :class:`ConditionTask` (optional)
5916
- If condition_task, specifies a condition with an outcome that can be used to control the execution
5917
- of other tasks. Does not require a cluster to execute and does not support retries or notifications.
5918
- :param dbt_task: :class:`DbtTask` (optional)
5919
- If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and the
5920
- ability to use a serverless or a pro SQL warehouse.
5921
5951
  :param email_notifications: :class:`JobEmailNotifications` (optional)
5922
5952
  An optional set of email addresses notified when the run begins or completes.
5953
+ :param environments: List[:class:`JobEnvironment`] (optional)
5954
+ A list of task execution environment specifications that can be referenced by tasks of this run.
5923
5955
  :param git_source: :class:`GitSource` (optional)
5924
5956
  An optional specification for a remote Git repository containing the source code used by tasks.
5925
5957
  Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
@@ -5944,47 +5976,16 @@ class JobsAPI:
5944
5976
  For more information, see [How to ensure idempotency for jobs].
5945
5977
 
5946
5978
  [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html
5947
- :param notebook_task: :class:`NotebookTask` (optional)
5948
- If notebook_task, indicates that this task must run a notebook. This field may not be specified in
5949
- conjunction with spark_jar_task.
5950
5979
  :param notification_settings: :class:`JobNotificationSettings` (optional)
5951
5980
  Optional notification settings that are used when sending notifications to each of the
5952
5981
  `email_notifications` and `webhook_notifications` for this run.
5953
- :param pipeline_task: :class:`PipelineTask` (optional)
5954
- If pipeline_task, indicates that this task must execute a Pipeline.
5955
- :param python_wheel_task: :class:`PythonWheelTask` (optional)
5956
- If python_wheel_task, indicates that this job must execute a PythonWheel.
5957
5982
  :param queue: :class:`QueueSettings` (optional)
5958
5983
  The queue settings of the one-time run.
5959
5984
  :param run_as: :class:`JobRunAs` (optional)
5960
5985
  Specifies the user or service principal that the job runs as. If not specified, the job runs as the
5961
5986
  user who submits the request.
5962
- :param run_job_task: :class:`RunJobTask` (optional)
5963
- If run_job_task, indicates that this task must execute another job.
5964
5987
  :param run_name: str (optional)
5965
5988
  An optional name for the run. The default value is `Untitled`.
5966
- :param spark_jar_task: :class:`SparkJarTask` (optional)
5967
- If spark_jar_task, indicates that this task must run a JAR.
5968
- :param spark_python_task: :class:`SparkPythonTask` (optional)
5969
- If spark_python_task, indicates that this task must run a Python file.
5970
- :param spark_submit_task: :class:`SparkSubmitTask` (optional)
5971
- If `spark_submit_task`, indicates that this task must be launched by the spark submit script. This
5972
- task can run only on new clusters.
5973
-
5974
- In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
5975
- `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
5976
- configurations.
5977
-
5978
- `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you
5979
- _cannot_ specify them in parameters.
5980
-
5981
- By default, the Spark submit job uses all available memory (excluding reserved memory for Databricks
5982
- services). You can set `--driver-memory`, and `--executor-memory` to a smaller value to leave some
5983
- room for off-heap usage.
5984
-
5985
- The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.
5986
- :param sql_task: :class:`SqlTask` (optional)
5987
- If sql_task, indicates that this job must execute a SQL task.
5988
5989
  :param tasks: List[:class:`SubmitTask`] (optional)
5989
5990
  :param timeout_seconds: int (optional)
5990
5991
  An optional timeout applied to each run of this job. A value of `0` means no timeout.
@@ -5998,24 +5999,15 @@ class JobsAPI:
5998
5999
  body = {}
5999
6000
  if access_control_list is not None:
6000
6001
  body['access_control_list'] = [v.as_dict() for v in access_control_list]
6001
- if condition_task is not None: body['condition_task'] = condition_task.as_dict()
6002
- if dbt_task is not None: body['dbt_task'] = dbt_task.as_dict()
6003
6002
  if email_notifications is not None: body['email_notifications'] = email_notifications.as_dict()
6003
+ if environments is not None: body['environments'] = [v.as_dict() for v in environments]
6004
6004
  if git_source is not None: body['git_source'] = git_source.as_dict()
6005
6005
  if health is not None: body['health'] = health.as_dict()
6006
6006
  if idempotency_token is not None: body['idempotency_token'] = idempotency_token
6007
- if notebook_task is not None: body['notebook_task'] = notebook_task.as_dict()
6008
6007
  if notification_settings is not None: body['notification_settings'] = notification_settings.as_dict()
6009
- if pipeline_task is not None: body['pipeline_task'] = pipeline_task.as_dict()
6010
- if python_wheel_task is not None: body['python_wheel_task'] = python_wheel_task.as_dict()
6011
6008
  if queue is not None: body['queue'] = queue.as_dict()
6012
6009
  if run_as is not None: body['run_as'] = run_as.as_dict()
6013
- if run_job_task is not None: body['run_job_task'] = run_job_task.as_dict()
6014
6010
  if run_name is not None: body['run_name'] = run_name
6015
- if spark_jar_task is not None: body['spark_jar_task'] = spark_jar_task.as_dict()
6016
- if spark_python_task is not None: body['spark_python_task'] = spark_python_task.as_dict()
6017
- if spark_submit_task is not None: body['spark_submit_task'] = spark_submit_task.as_dict()
6018
- if sql_task is not None: body['sql_task'] = sql_task.as_dict()
6019
6011
  if tasks is not None: body['tasks'] = [v.as_dict() for v in tasks]
6020
6012
  if timeout_seconds is not None: body['timeout_seconds'] = timeout_seconds
6021
6013
  if webhook_notifications is not None: body['webhook_notifications'] = webhook_notifications.as_dict()
@@ -6030,47 +6022,29 @@ class JobsAPI:
6030
6022
  self,
6031
6023
  *,
6032
6024
  access_control_list: Optional[List[iam.AccessControlRequest]] = None,
6033
- condition_task: Optional[ConditionTask] = None,
6034
- dbt_task: Optional[DbtTask] = None,
6035
6025
  email_notifications: Optional[JobEmailNotifications] = None,
6026
+ environments: Optional[List[JobEnvironment]] = None,
6036
6027
  git_source: Optional[GitSource] = None,
6037
6028
  health: Optional[JobsHealthRules] = None,
6038
6029
  idempotency_token: Optional[str] = None,
6039
- notebook_task: Optional[NotebookTask] = None,
6040
6030
  notification_settings: Optional[JobNotificationSettings] = None,
6041
- pipeline_task: Optional[PipelineTask] = None,
6042
- python_wheel_task: Optional[PythonWheelTask] = None,
6043
6031
  queue: Optional[QueueSettings] = None,
6044
6032
  run_as: Optional[JobRunAs] = None,
6045
- run_job_task: Optional[RunJobTask] = None,
6046
6033
  run_name: Optional[str] = None,
6047
- spark_jar_task: Optional[SparkJarTask] = None,
6048
- spark_python_task: Optional[SparkPythonTask] = None,
6049
- spark_submit_task: Optional[SparkSubmitTask] = None,
6050
- sql_task: Optional[SqlTask] = None,
6051
6034
  tasks: Optional[List[SubmitTask]] = None,
6052
6035
  timeout_seconds: Optional[int] = None,
6053
6036
  webhook_notifications: Optional[WebhookNotifications] = None,
6054
6037
  timeout=timedelta(minutes=20)) -> Run:
6055
6038
  return self.submit(access_control_list=access_control_list,
6056
- condition_task=condition_task,
6057
- dbt_task=dbt_task,
6058
6039
  email_notifications=email_notifications,
6040
+ environments=environments,
6059
6041
  git_source=git_source,
6060
6042
  health=health,
6061
6043
  idempotency_token=idempotency_token,
6062
- notebook_task=notebook_task,
6063
6044
  notification_settings=notification_settings,
6064
- pipeline_task=pipeline_task,
6065
- python_wheel_task=python_wheel_task,
6066
6045
  queue=queue,
6067
6046
  run_as=run_as,
6068
- run_job_task=run_job_task,
6069
6047
  run_name=run_name,
6070
- spark_jar_task=spark_jar_task,
6071
- spark_python_task=spark_python_task,
6072
- spark_submit_task=spark_submit_task,
6073
- sql_task=sql_task,
6074
6048
  tasks=tasks,
6075
6049
  timeout_seconds=timeout_seconds,
6076
6050
  webhook_notifications=webhook_notifications).result(timeout=timeout)