databricks-sdk 0.69.0__py3-none-any.whl → 0.71.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (37) hide show
  1. databricks/sdk/__init__.py +24 -24
  2. databricks/sdk/dbutils.py +17 -0
  3. databricks/sdk/mixins/files.py +10 -10
  4. databricks/sdk/service/agentbricks.py +2 -0
  5. databricks/sdk/service/apps.py +10 -0
  6. databricks/sdk/service/billing.py +13 -3
  7. databricks/sdk/service/catalog.py +131 -47
  8. databricks/sdk/service/cleanrooms.py +11 -3
  9. databricks/sdk/service/compute.py +64 -0
  10. databricks/sdk/service/dashboards.py +10 -0
  11. databricks/sdk/service/database.py +12 -0
  12. databricks/sdk/service/dataquality.py +201 -52
  13. databricks/sdk/service/files.py +7 -72
  14. databricks/sdk/service/iam.py +26 -36
  15. databricks/sdk/service/iamv2.py +6 -0
  16. databricks/sdk/service/jobs.py +86 -154
  17. databricks/sdk/service/marketplace.py +18 -0
  18. databricks/sdk/service/ml.py +464 -13
  19. databricks/sdk/service/oauth2.py +37 -19
  20. databricks/sdk/service/pipelines.py +25 -2
  21. databricks/sdk/service/provisioning.py +19 -1
  22. databricks/sdk/service/qualitymonitorv2.py +2 -0
  23. databricks/sdk/service/serving.py +16 -21
  24. databricks/sdk/service/settings.py +45 -72
  25. databricks/sdk/service/settingsv2.py +2 -0
  26. databricks/sdk/service/sharing.py +23 -69
  27. databricks/sdk/service/sql.py +85 -62
  28. databricks/sdk/service/tags.py +2 -0
  29. databricks/sdk/service/vectorsearch.py +8 -0
  30. databricks/sdk/service/workspace.py +18 -91
  31. databricks/sdk/version.py +1 -1
  32. {databricks_sdk-0.69.0.dist-info → databricks_sdk-0.71.0.dist-info}/METADATA +1 -1
  33. {databricks_sdk-0.69.0.dist-info → databricks_sdk-0.71.0.dist-info}/RECORD +37 -37
  34. {databricks_sdk-0.69.0.dist-info → databricks_sdk-0.71.0.dist-info}/WHEEL +0 -0
  35. {databricks_sdk-0.69.0.dist-info → databricks_sdk-0.71.0.dist-info}/licenses/LICENSE +0 -0
  36. {databricks_sdk-0.69.0.dist-info → databricks_sdk-0.71.0.dist-info}/licenses/NOTICE +0 -0
  37. {databricks_sdk-0.69.0.dist-info → databricks_sdk-0.71.0.dist-info}/top_level.txt +0 -0
@@ -449,42 +449,6 @@ class BaseRun:
449
449
  )
450
450
 
451
451
 
452
- @dataclass
453
- class CancelAllRunsResponse:
454
- def as_dict(self) -> dict:
455
- """Serializes the CancelAllRunsResponse into a dictionary suitable for use as a JSON request body."""
456
- body = {}
457
- return body
458
-
459
- def as_shallow_dict(self) -> dict:
460
- """Serializes the CancelAllRunsResponse into a shallow dictionary of its immediate attributes."""
461
- body = {}
462
- return body
463
-
464
- @classmethod
465
- def from_dict(cls, d: Dict[str, Any]) -> CancelAllRunsResponse:
466
- """Deserializes the CancelAllRunsResponse from a dictionary."""
467
- return cls()
468
-
469
-
470
- @dataclass
471
- class CancelRunResponse:
472
- def as_dict(self) -> dict:
473
- """Serializes the CancelRunResponse into a dictionary suitable for use as a JSON request body."""
474
- body = {}
475
- return body
476
-
477
- def as_shallow_dict(self) -> dict:
478
- """Serializes the CancelRunResponse into a shallow dictionary of its immediate attributes."""
479
- body = {}
480
- return body
481
-
482
- @classmethod
483
- def from_dict(cls, d: Dict[str, Any]) -> CancelRunResponse:
484
- """Deserializes the CancelRunResponse from a dictionary."""
485
- return cls()
486
-
487
-
488
452
  class CleanRoomTaskRunLifeCycleState(Enum):
489
453
  """Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to
490
454
  remove coupling with jobs API definition"""
@@ -561,6 +525,9 @@ class CleanRoomTaskRunState:
561
525
 
562
526
  @dataclass
563
527
  class CleanRoomsNotebookTask:
528
+ """Clean Rooms notebook task for V1 Clean Room service (GA). Replaces the deprecated
529
+ CleanRoomNotebookTask (defined above) which was for V0 service."""
530
+
564
531
  clean_room_name: str
565
532
  """The clean room that the notebook belongs to."""
566
533
 
@@ -1512,42 +1479,6 @@ class DbtTask:
1512
1479
  )
1513
1480
 
1514
1481
 
1515
- @dataclass
1516
- class DeleteResponse:
1517
- def as_dict(self) -> dict:
1518
- """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body."""
1519
- body = {}
1520
- return body
1521
-
1522
- def as_shallow_dict(self) -> dict:
1523
- """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
1524
- body = {}
1525
- return body
1526
-
1527
- @classmethod
1528
- def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse:
1529
- """Deserializes the DeleteResponse from a dictionary."""
1530
- return cls()
1531
-
1532
-
1533
- @dataclass
1534
- class DeleteRunResponse:
1535
- def as_dict(self) -> dict:
1536
- """Serializes the DeleteRunResponse into a dictionary suitable for use as a JSON request body."""
1537
- body = {}
1538
- return body
1539
-
1540
- def as_shallow_dict(self) -> dict:
1541
- """Serializes the DeleteRunResponse into a shallow dictionary of its immediate attributes."""
1542
- body = {}
1543
- return body
1544
-
1545
- @classmethod
1546
- def from_dict(cls, d: Dict[str, Any]) -> DeleteRunResponse:
1547
- """Deserializes the DeleteRunResponse from a dictionary."""
1548
- return cls()
1549
-
1550
-
1551
1482
  @dataclass
1552
1483
  class EnforcePolicyComplianceForJobResponseJobClusterSettingsChange:
1553
1484
  """Represents a change to the job cluster's settings that would be required for the job clusters to
@@ -4209,24 +4140,6 @@ class RepairRunResponse:
4209
4140
  return cls(repair_id=d.get("repair_id", None))
4210
4141
 
4211
4142
 
4212
- @dataclass
4213
- class ResetResponse:
4214
- def as_dict(self) -> dict:
4215
- """Serializes the ResetResponse into a dictionary suitable for use as a JSON request body."""
4216
- body = {}
4217
- return body
4218
-
4219
- def as_shallow_dict(self) -> dict:
4220
- """Serializes the ResetResponse into a shallow dictionary of its immediate attributes."""
4221
- body = {}
4222
- return body
4223
-
4224
- @classmethod
4225
- def from_dict(cls, d: Dict[str, Any]) -> ResetResponse:
4226
- """Deserializes the ResetResponse from a dictionary."""
4227
- return cls()
4228
-
4229
-
4230
4143
  @dataclass
4231
4144
  class ResolvedConditionTaskValues:
4232
4145
  left: Optional[str] = None
@@ -5016,7 +4929,11 @@ class RunJobTask:
5016
4929
 
5017
4930
  dbt_commands: Optional[List[str]] = None
5018
4931
  """An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
5019
- deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`"""
4932
+ deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
4933
+
4934
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
4935
+
4936
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown"""
5020
4937
 
5021
4938
  jar_params: Optional[List[str]] = None
5022
4939
  """A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe",
@@ -5025,9 +4942,9 @@ class RunJobTask:
5025
4942
  be specified in conjunction with notebook_params. The JSON representation of this field (for
5026
4943
  example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
5027
4944
 
5028
- Use [Task parameter variables] to set parameters containing information about job runs.
4945
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
5029
4946
 
5030
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
4947
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown"""
5031
4948
 
5032
4949
  job_parameters: Optional[Dict[str, str]] = None
5033
4950
  """Job-level parameters used to trigger the job."""
@@ -5041,13 +4958,13 @@ class RunJobTask:
5041
4958
 
5042
4959
  notebook_params cannot be specified in conjunction with jar_params.
5043
4960
 
5044
- Use [Task parameter variables] to set parameters containing information about job runs.
4961
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
5045
4962
 
5046
4963
  The JSON representation of this field (for example `{"notebook_params":{"name":"john
5047
4964
  doe","age":"35"}}`) cannot exceed 10,000 bytes.
5048
4965
 
5049
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
5050
- [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
4966
+ [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
4967
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown"""
5051
4968
 
5052
4969
  pipeline_params: Optional[PipelineParams] = None
5053
4970
  """Controls whether the pipeline should perform a full refresh"""
@@ -5060,7 +4977,7 @@ class RunJobTask:
5060
4977
  `run-now`, it would overwrite the parameters specified in job setting. The JSON representation
5061
4978
  of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
5062
4979
 
5063
- Use [Task parameter variables] to set parameters containing information about job runs.
4980
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
5064
4981
 
5065
4982
  Important
5066
4983
 
@@ -5068,7 +4985,7 @@ class RunJobTask:
5068
4985
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
5069
4986
  emojis.
5070
4987
 
5071
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
4988
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown"""
5072
4989
 
5073
4990
  spark_submit_params: Optional[List[str]] = None
5074
4991
  """A list of parameters for jobs with spark submit task, for example `"spark_submit_params":
@@ -5077,7 +4994,7 @@ class RunJobTask:
5077
4994
  parameters specified in job setting. The JSON representation of this field (for example
5078
4995
  `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
5079
4996
 
5080
- Use [Task parameter variables] to set parameters containing information about job runs
4997
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
5081
4998
 
5082
4999
  Important
5083
5000
 
@@ -5085,11 +5002,15 @@ class RunJobTask:
5085
5002
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
5086
5003
  emojis.
5087
5004
 
5088
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
5005
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown"""
5089
5006
 
5090
5007
  sql_params: Optional[Dict[str, str]] = None
5091
5008
  """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
5092
- doe", "age": "35"}`. The SQL alert task does not support custom parameters."""
5009
+ doe", "age": "35"}`. The SQL alert task does not support custom parameters.
5010
+
5011
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
5012
+
5013
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown"""
5093
5014
 
5094
5015
  def as_dict(self) -> dict:
5095
5016
  """Serializes the RunJobTask into a dictionary suitable for use as a JSON request body."""
@@ -5377,7 +5298,11 @@ class RunOutput:
5377
5298
  class RunParameters:
5378
5299
  dbt_commands: Optional[List[str]] = None
5379
5300
  """An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
5380
- deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`"""
5301
+ deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
5302
+
5303
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
5304
+
5305
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown"""
5381
5306
 
5382
5307
  jar_params: Optional[List[str]] = None
5383
5308
  """A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe",
@@ -5386,9 +5311,9 @@ class RunParameters:
5386
5311
  be specified in conjunction with notebook_params. The JSON representation of this field (for
5387
5312
  example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
5388
5313
 
5389
- Use [Task parameter variables] to set parameters containing information about job runs.
5314
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
5390
5315
 
5391
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
5316
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown"""
5392
5317
 
5393
5318
  notebook_params: Optional[Dict[str, str]] = None
5394
5319
  """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
@@ -5399,13 +5324,13 @@ class RunParameters:
5399
5324
 
5400
5325
  notebook_params cannot be specified in conjunction with jar_params.
5401
5326
 
5402
- Use [Task parameter variables] to set parameters containing information about job runs.
5327
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
5403
5328
 
5404
5329
  The JSON representation of this field (for example `{"notebook_params":{"name":"john
5405
5330
  doe","age":"35"}}`) cannot exceed 10,000 bytes.
5406
5331
 
5407
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
5408
- [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
5332
+ [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
5333
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown"""
5409
5334
 
5410
5335
  pipeline_params: Optional[PipelineParams] = None
5411
5336
  """Controls whether the pipeline should perform a full refresh"""
@@ -5418,7 +5343,7 @@ class RunParameters:
5418
5343
  `run-now`, it would overwrite the parameters specified in job setting. The JSON representation
5419
5344
  of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
5420
5345
 
5421
- Use [Task parameter variables] to set parameters containing information about job runs.
5346
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
5422
5347
 
5423
5348
  Important
5424
5349
 
@@ -5426,7 +5351,7 @@ class RunParameters:
5426
5351
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
5427
5352
  emojis.
5428
5353
 
5429
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
5354
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown"""
5430
5355
 
5431
5356
  spark_submit_params: Optional[List[str]] = None
5432
5357
  """A list of parameters for jobs with spark submit task, for example `"spark_submit_params":
@@ -5435,7 +5360,7 @@ class RunParameters:
5435
5360
  parameters specified in job setting. The JSON representation of this field (for example
5436
5361
  `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
5437
5362
 
5438
- Use [Task parameter variables] to set parameters containing information about job runs
5363
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
5439
5364
 
5440
5365
  Important
5441
5366
 
@@ -5443,11 +5368,15 @@ class RunParameters:
5443
5368
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
5444
5369
  emojis.
5445
5370
 
5446
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
5371
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown"""
5447
5372
 
5448
5373
  sql_params: Optional[Dict[str, str]] = None
5449
5374
  """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
5450
- doe", "age": "35"}`. The SQL alert task does not support custom parameters."""
5375
+ doe", "age": "35"}`. The SQL alert task does not support custom parameters.
5376
+
5377
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
5378
+
5379
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown"""
5451
5380
 
5452
5381
  def as_dict(self) -> dict:
5453
5382
  """Serializes the RunParameters into a dictionary suitable for use as a JSON request body."""
@@ -8037,9 +7966,6 @@ class TriggerSettings:
8037
7966
  periodic: Optional[PeriodicTriggerConfiguration] = None
8038
7967
  """Periodic trigger settings."""
8039
7968
 
8040
- table: Optional[TableUpdateTriggerConfiguration] = None
8041
- """Old table trigger settings name. Deprecated in favor of `table_update`."""
8042
-
8043
7969
  table_update: Optional[TableUpdateTriggerConfiguration] = None
8044
7970
 
8045
7971
  def as_dict(self) -> dict:
@@ -8051,8 +7977,6 @@ class TriggerSettings:
8051
7977
  body["pause_status"] = self.pause_status.value
8052
7978
  if self.periodic:
8053
7979
  body["periodic"] = self.periodic.as_dict()
8054
- if self.table:
8055
- body["table"] = self.table.as_dict()
8056
7980
  if self.table_update:
8057
7981
  body["table_update"] = self.table_update.as_dict()
8058
7982
  return body
@@ -8066,8 +7990,6 @@ class TriggerSettings:
8066
7990
  body["pause_status"] = self.pause_status
8067
7991
  if self.periodic:
8068
7992
  body["periodic"] = self.periodic
8069
- if self.table:
8070
- body["table"] = self.table
8071
7993
  if self.table_update:
8072
7994
  body["table_update"] = self.table_update
8073
7995
  return body
@@ -8079,7 +8001,6 @@ class TriggerSettings:
8079
8001
  file_arrival=_from_dict(d, "file_arrival", FileArrivalTriggerConfiguration),
8080
8002
  pause_status=_enum(d, "pause_status", PauseStatus),
8081
8003
  periodic=_from_dict(d, "periodic", PeriodicTriggerConfiguration),
8082
- table=_from_dict(d, "table", TableUpdateTriggerConfiguration),
8083
8004
  table_update=_from_dict(d, "table_update", TableUpdateTriggerConfiguration),
8084
8005
  )
8085
8006
 
@@ -8140,24 +8061,6 @@ class TriggerType(Enum):
8140
8061
  TABLE = "TABLE"
8141
8062
 
8142
8063
 
8143
- @dataclass
8144
- class UpdateResponse:
8145
- def as_dict(self) -> dict:
8146
- """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body."""
8147
- body = {}
8148
- return body
8149
-
8150
- def as_shallow_dict(self) -> dict:
8151
- """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes."""
8152
- body = {}
8153
- return body
8154
-
8155
- @classmethod
8156
- def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse:
8157
- """Deserializes the UpdateResponse from a dictionary."""
8158
- return cls()
8159
-
8160
-
8161
8064
  @dataclass
8162
8065
  class ViewItem:
8163
8066
  content: Optional[str] = None
@@ -8399,6 +8302,7 @@ class JobsAPI:
8399
8302
 
8400
8303
 
8401
8304
  """
8305
+
8402
8306
  body = {}
8403
8307
  if all_queued_runs is not None:
8404
8308
  body["all_queued_runs"] = all_queued_runs
@@ -8421,6 +8325,7 @@ class JobsAPI:
8421
8325
  Long-running operation waiter for :class:`Run`.
8422
8326
  See :method:wait_get_run_job_terminated_or_skipped for more details.
8423
8327
  """
8328
+
8424
8329
  body = {}
8425
8330
  if run_id is not None:
8426
8331
  body["run_id"] = run_id
@@ -8565,6 +8470,7 @@ class JobsAPI:
8565
8470
 
8566
8471
  :returns: :class:`CreateResponse`
8567
8472
  """
8473
+
8568
8474
  body = {}
8569
8475
  if access_control_list is not None:
8570
8476
  body["access_control_list"] = [v.as_dict() for v in access_control_list]
@@ -8634,6 +8540,7 @@ class JobsAPI:
8634
8540
 
8635
8541
 
8636
8542
  """
8543
+
8637
8544
  body = {}
8638
8545
  if job_id is not None:
8639
8546
  body["job_id"] = job_id
@@ -8651,6 +8558,7 @@ class JobsAPI:
8651
8558
 
8652
8559
 
8653
8560
  """
8561
+
8654
8562
  body = {}
8655
8563
  if run_id is not None:
8656
8564
  body["run_id"] = run_id
@@ -8981,6 +8889,10 @@ class JobsAPI:
8981
8889
  :param dbt_commands: List[str] (optional)
8982
8890
  An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
8983
8891
  deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
8892
+
8893
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
8894
+
8895
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
8984
8896
  :param jar_params: List[str] (optional)
8985
8897
  A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`.
8986
8898
  The parameters are used to invoke the main function of the main class specified in the Spark JAR
@@ -8988,9 +8900,9 @@ class JobsAPI:
8988
8900
  in conjunction with notebook_params. The JSON representation of this field (for example
8989
8901
  `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
8990
8902
 
8991
- Use [Task parameter variables] to set parameters containing information about job runs.
8903
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
8992
8904
 
8993
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
8905
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
8994
8906
  :param job_parameters: Dict[str,str] (optional)
8995
8907
  Job-level parameters used in the run. for example `"param": "overriding_val"`
8996
8908
  :param latest_repair_id: int (optional)
@@ -9005,13 +8917,13 @@ class JobsAPI:
9005
8917
 
9006
8918
  notebook_params cannot be specified in conjunction with jar_params.
9007
8919
 
9008
- Use [Task parameter variables] to set parameters containing information about job runs.
8920
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
9009
8921
 
9010
8922
  The JSON representation of this field (for example `{"notebook_params":{"name":"john
9011
8923
  doe","age":"35"}}`) cannot exceed 10,000 bytes.
9012
8924
 
9013
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
9014
8925
  [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
8926
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
9015
8927
  :param performance_target: :class:`PerformanceTarget` (optional)
9016
8928
  The performance mode on a serverless job. The performance target determines the level of compute
9017
8929
  performance or cost-efficiency for the run. This field overrides the performance target defined on
@@ -9029,7 +8941,7 @@ class JobsAPI:
9029
8941
  would overwrite the parameters specified in job setting. The JSON representation of this field (for
9030
8942
  example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
9031
8943
 
9032
- Use [Task parameter variables] to set parameters containing information about job runs.
8944
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
9033
8945
 
9034
8946
  Important
9035
8947
 
@@ -9037,7 +8949,7 @@ class JobsAPI:
9037
8949
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
9038
8950
  emojis.
9039
8951
 
9040
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
8952
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
9041
8953
  :param rerun_all_failed_tasks: bool (optional)
9042
8954
  If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be used.
9043
8955
  :param rerun_dependent_tasks: bool (optional)
@@ -9052,7 +8964,7 @@ class JobsAPI:
9052
8964
  in job setting. The JSON representation of this field (for example `{"python_params":["john
9053
8965
  doe","35"]}`) cannot exceed 10,000 bytes.
9054
8966
 
9055
- Use [Task parameter variables] to set parameters containing information about job runs
8967
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
9056
8968
 
9057
8969
  Important
9058
8970
 
@@ -9060,15 +8972,20 @@ class JobsAPI:
9060
8972
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
9061
8973
  emojis.
9062
8974
 
9063
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
8975
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
9064
8976
  :param sql_params: Dict[str,str] (optional)
9065
8977
  A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
9066
8978
  "age": "35"}`. The SQL alert task does not support custom parameters.
9067
8979
 
8980
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
8981
+
8982
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
8983
+
9068
8984
  :returns:
9069
8985
  Long-running operation waiter for :class:`Run`.
9070
8986
  See :method:wait_get_run_job_terminated_or_skipped for more details.
9071
8987
  """
8988
+
9072
8989
  body = {}
9073
8990
  if dbt_commands is not None:
9074
8991
  body["dbt_commands"] = [v for v in dbt_commands]
@@ -9164,6 +9081,7 @@ class JobsAPI:
9164
9081
 
9165
9082
 
9166
9083
  """
9084
+
9167
9085
  body = {}
9168
9086
  if job_id is not None:
9169
9087
  body["job_id"] = job_id
@@ -9200,6 +9118,10 @@ class JobsAPI:
9200
9118
  :param dbt_commands: List[str] (optional)
9201
9119
  An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
9202
9120
  deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
9121
+
9122
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
9123
+
9124
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
9203
9125
  :param idempotency_token: str (optional)
9204
9126
  An optional token to guarantee the idempotency of job run requests. If a run with the provided token
9205
9127
  already exists, the request does not create a new run but returns the ID of the existing run
@@ -9220,9 +9142,9 @@ class JobsAPI:
9220
9142
  in conjunction with notebook_params. The JSON representation of this field (for example
9221
9143
  `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
9222
9144
 
9223
- Use [Task parameter variables] to set parameters containing information about job runs.
9145
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
9224
9146
 
9225
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
9147
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
9226
9148
  :param job_parameters: Dict[str,str] (optional)
9227
9149
  Job-level parameters used in the run. for example `"param": "overriding_val"`
9228
9150
  :param notebook_params: Dict[str,str] (optional)
@@ -9234,13 +9156,13 @@ class JobsAPI:
9234
9156
 
9235
9157
  notebook_params cannot be specified in conjunction with jar_params.
9236
9158
 
9237
- Use [Task parameter variables] to set parameters containing information about job runs.
9159
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
9238
9160
 
9239
9161
  The JSON representation of this field (for example `{"notebook_params":{"name":"john
9240
9162
  doe","age":"35"}}`) cannot exceed 10,000 bytes.
9241
9163
 
9242
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
9243
9164
  [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
9165
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
9244
9166
  :param only: List[str] (optional)
9245
9167
  A list of task keys to run inside of the job. If this field is not provided, all tasks in the job
9246
9168
  will be run.
@@ -9261,7 +9183,7 @@ class JobsAPI:
9261
9183
  would overwrite the parameters specified in job setting. The JSON representation of this field (for
9262
9184
  example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
9263
9185
 
9264
- Use [Task parameter variables] to set parameters containing information about job runs.
9186
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
9265
9187
 
9266
9188
  Important
9267
9189
 
@@ -9269,7 +9191,7 @@ class JobsAPI:
9269
9191
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
9270
9192
  emojis.
9271
9193
 
9272
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
9194
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
9273
9195
  :param queue: :class:`QueueSettings` (optional)
9274
9196
  The queue settings of the run.
9275
9197
  :param spark_submit_params: List[str] (optional)
@@ -9279,7 +9201,7 @@ class JobsAPI:
9279
9201
  in job setting. The JSON representation of this field (for example `{"python_params":["john
9280
9202
  doe","35"]}`) cannot exceed 10,000 bytes.
9281
9203
 
9282
- Use [Task parameter variables] to set parameters containing information about job runs
9204
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
9283
9205
 
9284
9206
  Important
9285
9207
 
@@ -9287,15 +9209,20 @@ class JobsAPI:
9287
9209
  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
9288
9210
  emojis.
9289
9211
 
9290
- [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
9212
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
9291
9213
  :param sql_params: Dict[str,str] (optional)
9292
9214
  A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
9293
9215
  "age": "35"}`. The SQL alert task does not support custom parameters.
9294
9216
 
9217
+ ⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
9218
+
9219
+ [job parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
9220
+
9295
9221
  :returns:
9296
9222
  Long-running operation waiter for :class:`Run`.
9297
9223
  See :method:wait_get_run_job_terminated_or_skipped for more details.
9298
9224
  """
9225
+
9299
9226
  body = {}
9300
9227
  if dbt_commands is not None:
9301
9228
  body["dbt_commands"] = [v for v in dbt_commands]
@@ -9385,6 +9312,7 @@ class JobsAPI:
9385
9312
 
9386
9313
  :returns: :class:`JobPermissions`
9387
9314
  """
9315
+
9388
9316
  body = {}
9389
9317
  if access_control_list is not None:
9390
9318
  body["access_control_list"] = [v.as_dict() for v in access_control_list]
@@ -9474,6 +9402,7 @@ class JobsAPI:
9474
9402
  Long-running operation waiter for :class:`Run`.
9475
9403
  See :method:wait_get_run_job_terminated_or_skipped for more details.
9476
9404
  """
9405
+
9477
9406
  body = {}
9478
9407
  if access_control_list is not None:
9479
9408
  body["access_control_list"] = [v.as_dict() for v in access_control_list]
@@ -9580,6 +9509,7 @@ class JobsAPI:
9580
9509
 
9581
9510
 
9582
9511
  """
9512
+
9583
9513
  body = {}
9584
9514
  if fields_to_remove is not None:
9585
9515
  body["fields_to_remove"] = [v for v in fields_to_remove]
@@ -9604,6 +9534,7 @@ class JobsAPI:
9604
9534
 
9605
9535
  :returns: :class:`JobPermissions`
9606
9536
  """
9537
+
9607
9538
  body = {}
9608
9539
  if access_control_list is not None:
9609
9540
  body["access_control_list"] = [v.as_dict() for v in access_control_list]
@@ -9645,6 +9576,7 @@ class PolicyComplianceForJobsAPI:
9645
9576
 
9646
9577
  :returns: :class:`EnforcePolicyComplianceResponse`
9647
9578
  """
9579
+
9648
9580
  body = {}
9649
9581
  if job_id is not None:
9650
9582
  body["job_id"] = job_id