databricks-sdk 0.55.0__py3-none-any.whl → 0.57.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (31) hide show
  1. databricks/sdk/__init__.py +41 -24
  2. databricks/sdk/service/aibuilder.py +505 -0
  3. databricks/sdk/service/apps.py +14 -42
  4. databricks/sdk/service/billing.py +167 -220
  5. databricks/sdk/service/catalog.py +462 -1235
  6. databricks/sdk/service/cleanrooms.py +26 -43
  7. databricks/sdk/service/compute.py +75 -211
  8. databricks/sdk/service/dashboards.py +77 -511
  9. databricks/sdk/service/database.py +1271 -0
  10. databricks/sdk/service/files.py +20 -54
  11. databricks/sdk/service/iam.py +61 -171
  12. databricks/sdk/service/jobs.py +453 -68
  13. databricks/sdk/service/marketplace.py +46 -146
  14. databricks/sdk/service/ml.py +453 -477
  15. databricks/sdk/service/oauth2.py +17 -45
  16. databricks/sdk/service/pipelines.py +125 -40
  17. databricks/sdk/service/provisioning.py +30 -93
  18. databricks/sdk/service/qualitymonitorv2.py +265 -0
  19. databricks/sdk/service/serving.py +106 -46
  20. databricks/sdk/service/settings.py +1062 -390
  21. databricks/sdk/service/sharing.py +33 -88
  22. databricks/sdk/service/sql.py +292 -185
  23. databricks/sdk/service/vectorsearch.py +13 -43
  24. databricks/sdk/service/workspace.py +35 -105
  25. databricks/sdk/version.py +1 -1
  26. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/METADATA +1 -1
  27. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/RECORD +31 -28
  28. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/WHEEL +0 -0
  29. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/licenses/LICENSE +0 -0
  30. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/licenses/NOTICE +0 -0
  31. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/top_level.txt +0 -0
@@ -54,6 +54,9 @@ class BaseJob:
54
54
  """Settings for this job and all of its runs. These settings can be updated using the `resetJob`
55
55
  method."""
56
56
 
57
+ trigger_state: Optional[TriggerStateProto] = None
58
+ """State of the trigger associated with the job."""
59
+
57
60
  def as_dict(self) -> dict:
58
61
  """Serializes the BaseJob into a dictionary suitable for use as a JSON request body."""
59
62
  body = {}
@@ -69,6 +72,8 @@ class BaseJob:
69
72
  body["job_id"] = self.job_id
70
73
  if self.settings:
71
74
  body["settings"] = self.settings.as_dict()
75
+ if self.trigger_state:
76
+ body["trigger_state"] = self.trigger_state.as_dict()
72
77
  return body
73
78
 
74
79
  def as_shallow_dict(self) -> dict:
@@ -86,6 +91,8 @@ class BaseJob:
86
91
  body["job_id"] = self.job_id
87
92
  if self.settings:
88
93
  body["settings"] = self.settings
94
+ if self.trigger_state:
95
+ body["trigger_state"] = self.trigger_state
89
96
  return body
90
97
 
91
98
  @classmethod
@@ -98,6 +105,7 @@ class BaseJob:
98
105
  has_more=d.get("has_more", None),
99
106
  job_id=d.get("job_id", None),
100
107
  settings=_from_dict(d, "settings", JobSettings),
108
+ trigger_state=_from_dict(d, "trigger_state", TriggerStateProto),
101
109
  )
102
110
 
103
111
 
@@ -1389,6 +1397,142 @@ class DashboardTaskOutput:
1389
1397
  return cls(page_snapshots=_repeated_dict(d, "page_snapshots", DashboardPageSnapshot))
1390
1398
 
1391
1399
 
1400
+ @dataclass
1401
+ class DbtCloudJobRunStep:
1402
+ """Format of response retrieved from dbt Cloud, for inclusion in output Deprecated in favor of
1403
+ DbtPlatformJobRunStep"""
1404
+
1405
+ index: Optional[int] = None
1406
+ """Orders the steps in the job"""
1407
+
1408
+ logs: Optional[str] = None
1409
+ """Output of the step"""
1410
+
1411
+ name: Optional[str] = None
1412
+ """Name of the step in the job"""
1413
+
1414
+ status: Optional[DbtPlatformRunStatus] = None
1415
+ """State of the step"""
1416
+
1417
+ def as_dict(self) -> dict:
1418
+ """Serializes the DbtCloudJobRunStep into a dictionary suitable for use as a JSON request body."""
1419
+ body = {}
1420
+ if self.index is not None:
1421
+ body["index"] = self.index
1422
+ if self.logs is not None:
1423
+ body["logs"] = self.logs
1424
+ if self.name is not None:
1425
+ body["name"] = self.name
1426
+ if self.status is not None:
1427
+ body["status"] = self.status.value
1428
+ return body
1429
+
1430
+ def as_shallow_dict(self) -> dict:
1431
+ """Serializes the DbtCloudJobRunStep into a shallow dictionary of its immediate attributes."""
1432
+ body = {}
1433
+ if self.index is not None:
1434
+ body["index"] = self.index
1435
+ if self.logs is not None:
1436
+ body["logs"] = self.logs
1437
+ if self.name is not None:
1438
+ body["name"] = self.name
1439
+ if self.status is not None:
1440
+ body["status"] = self.status
1441
+ return body
1442
+
1443
+ @classmethod
1444
+ def from_dict(cls, d: Dict[str, Any]) -> DbtCloudJobRunStep:
1445
+ """Deserializes the DbtCloudJobRunStep from a dictionary."""
1446
+ return cls(
1447
+ index=d.get("index", None),
1448
+ logs=d.get("logs", None),
1449
+ name=d.get("name", None),
1450
+ status=_enum(d, "status", DbtPlatformRunStatus),
1451
+ )
1452
+
1453
+
1454
+ @dataclass
1455
+ class DbtCloudTask:
1456
+ """Deprecated in favor of DbtPlatformTask"""
1457
+
1458
+ connection_resource_name: Optional[str] = None
1459
+ """The resource name of the UC connection that authenticates the dbt Cloud for this task"""
1460
+
1461
+ dbt_cloud_job_id: Optional[int] = None
1462
+ """Id of the dbt Cloud job to be triggered"""
1463
+
1464
+ def as_dict(self) -> dict:
1465
+ """Serializes the DbtCloudTask into a dictionary suitable for use as a JSON request body."""
1466
+ body = {}
1467
+ if self.connection_resource_name is not None:
1468
+ body["connection_resource_name"] = self.connection_resource_name
1469
+ if self.dbt_cloud_job_id is not None:
1470
+ body["dbt_cloud_job_id"] = self.dbt_cloud_job_id
1471
+ return body
1472
+
1473
+ def as_shallow_dict(self) -> dict:
1474
+ """Serializes the DbtCloudTask into a shallow dictionary of its immediate attributes."""
1475
+ body = {}
1476
+ if self.connection_resource_name is not None:
1477
+ body["connection_resource_name"] = self.connection_resource_name
1478
+ if self.dbt_cloud_job_id is not None:
1479
+ body["dbt_cloud_job_id"] = self.dbt_cloud_job_id
1480
+ return body
1481
+
1482
+ @classmethod
1483
+ def from_dict(cls, d: Dict[str, Any]) -> DbtCloudTask:
1484
+ """Deserializes the DbtCloudTask from a dictionary."""
1485
+ return cls(
1486
+ connection_resource_name=d.get("connection_resource_name", None),
1487
+ dbt_cloud_job_id=d.get("dbt_cloud_job_id", None),
1488
+ )
1489
+
1490
+
1491
+ @dataclass
1492
+ class DbtCloudTaskOutput:
1493
+ """Deprecated in favor of DbtPlatformTaskOutput"""
1494
+
1495
+ dbt_cloud_job_run_id: Optional[int] = None
1496
+ """Id of the job run in dbt Cloud"""
1497
+
1498
+ dbt_cloud_job_run_output: Optional[List[DbtCloudJobRunStep]] = None
1499
+ """Steps of the job run as received from dbt Cloud"""
1500
+
1501
+ dbt_cloud_job_run_url: Optional[str] = None
1502
+ """Url where full run details can be viewed"""
1503
+
1504
+ def as_dict(self) -> dict:
1505
+ """Serializes the DbtCloudTaskOutput into a dictionary suitable for use as a JSON request body."""
1506
+ body = {}
1507
+ if self.dbt_cloud_job_run_id is not None:
1508
+ body["dbt_cloud_job_run_id"] = self.dbt_cloud_job_run_id
1509
+ if self.dbt_cloud_job_run_output:
1510
+ body["dbt_cloud_job_run_output"] = [v.as_dict() for v in self.dbt_cloud_job_run_output]
1511
+ if self.dbt_cloud_job_run_url is not None:
1512
+ body["dbt_cloud_job_run_url"] = self.dbt_cloud_job_run_url
1513
+ return body
1514
+
1515
+ def as_shallow_dict(self) -> dict:
1516
+ """Serializes the DbtCloudTaskOutput into a shallow dictionary of its immediate attributes."""
1517
+ body = {}
1518
+ if self.dbt_cloud_job_run_id is not None:
1519
+ body["dbt_cloud_job_run_id"] = self.dbt_cloud_job_run_id
1520
+ if self.dbt_cloud_job_run_output:
1521
+ body["dbt_cloud_job_run_output"] = self.dbt_cloud_job_run_output
1522
+ if self.dbt_cloud_job_run_url is not None:
1523
+ body["dbt_cloud_job_run_url"] = self.dbt_cloud_job_run_url
1524
+ return body
1525
+
1526
+ @classmethod
1527
+ def from_dict(cls, d: Dict[str, Any]) -> DbtCloudTaskOutput:
1528
+ """Deserializes the DbtCloudTaskOutput from a dictionary."""
1529
+ return cls(
1530
+ dbt_cloud_job_run_id=d.get("dbt_cloud_job_run_id", None),
1531
+ dbt_cloud_job_run_output=_repeated_dict(d, "dbt_cloud_job_run_output", DbtCloudJobRunStep),
1532
+ dbt_cloud_job_run_url=d.get("dbt_cloud_job_run_url", None),
1533
+ )
1534
+
1535
+
1392
1536
  @dataclass
1393
1537
  class DbtOutput:
1394
1538
  artifacts_headers: Optional[Dict[str, str]] = None
@@ -1422,6 +1566,176 @@ class DbtOutput:
1422
1566
  return cls(artifacts_headers=d.get("artifacts_headers", None), artifacts_link=d.get("artifacts_link", None))
1423
1567
 
1424
1568
 
1569
+ @dataclass
1570
+ class DbtPlatformJobRunStep:
1571
+ """Format of response retrieved from dbt platform, for inclusion in output"""
1572
+
1573
+ index: Optional[int] = None
1574
+ """Orders the steps in the job"""
1575
+
1576
+ logs: Optional[str] = None
1577
+ """Output of the step"""
1578
+
1579
+ logs_truncated: Optional[bool] = None
1580
+ """Whether the logs of this step have been truncated. If true, the logs has been truncated to 10000
1581
+ characters."""
1582
+
1583
+ name: Optional[str] = None
1584
+ """Name of the step in the job"""
1585
+
1586
+ name_truncated: Optional[bool] = None
1587
+ """Whether the name of the job has been truncated. If true, the name has been truncated to 100
1588
+ characters."""
1589
+
1590
+ status: Optional[DbtPlatformRunStatus] = None
1591
+ """State of the step"""
1592
+
1593
+ def as_dict(self) -> dict:
1594
+ """Serializes the DbtPlatformJobRunStep into a dictionary suitable for use as a JSON request body."""
1595
+ body = {}
1596
+ if self.index is not None:
1597
+ body["index"] = self.index
1598
+ if self.logs is not None:
1599
+ body["logs"] = self.logs
1600
+ if self.logs_truncated is not None:
1601
+ body["logs_truncated"] = self.logs_truncated
1602
+ if self.name is not None:
1603
+ body["name"] = self.name
1604
+ if self.name_truncated is not None:
1605
+ body["name_truncated"] = self.name_truncated
1606
+ if self.status is not None:
1607
+ body["status"] = self.status.value
1608
+ return body
1609
+
1610
+ def as_shallow_dict(self) -> dict:
1611
+ """Serializes the DbtPlatformJobRunStep into a shallow dictionary of its immediate attributes."""
1612
+ body = {}
1613
+ if self.index is not None:
1614
+ body["index"] = self.index
1615
+ if self.logs is not None:
1616
+ body["logs"] = self.logs
1617
+ if self.logs_truncated is not None:
1618
+ body["logs_truncated"] = self.logs_truncated
1619
+ if self.name is not None:
1620
+ body["name"] = self.name
1621
+ if self.name_truncated is not None:
1622
+ body["name_truncated"] = self.name_truncated
1623
+ if self.status is not None:
1624
+ body["status"] = self.status
1625
+ return body
1626
+
1627
+ @classmethod
1628
+ def from_dict(cls, d: Dict[str, Any]) -> DbtPlatformJobRunStep:
1629
+ """Deserializes the DbtPlatformJobRunStep from a dictionary."""
1630
+ return cls(
1631
+ index=d.get("index", None),
1632
+ logs=d.get("logs", None),
1633
+ logs_truncated=d.get("logs_truncated", None),
1634
+ name=d.get("name", None),
1635
+ name_truncated=d.get("name_truncated", None),
1636
+ status=_enum(d, "status", DbtPlatformRunStatus),
1637
+ )
1638
+
1639
+
1640
+ class DbtPlatformRunStatus(Enum):
1641
+ """Response enumeration from calling the dbt platform API, for inclusion in output"""
1642
+
1643
+ CANCELLED = "CANCELLED"
1644
+ ERROR = "ERROR"
1645
+ QUEUED = "QUEUED"
1646
+ RUNNING = "RUNNING"
1647
+ STARTING = "STARTING"
1648
+ SUCCESS = "SUCCESS"
1649
+
1650
+
1651
+ @dataclass
1652
+ class DbtPlatformTask:
1653
+ connection_resource_name: Optional[str] = None
1654
+ """The resource name of the UC connection that authenticates the dbt platform for this task"""
1655
+
1656
+ dbt_platform_job_id: Optional[str] = None
1657
+ """Id of the dbt platform job to be triggered. Specified as a string for maximum compatibility with
1658
+ clients."""
1659
+
1660
+ def as_dict(self) -> dict:
1661
+ """Serializes the DbtPlatformTask into a dictionary suitable for use as a JSON request body."""
1662
+ body = {}
1663
+ if self.connection_resource_name is not None:
1664
+ body["connection_resource_name"] = self.connection_resource_name
1665
+ if self.dbt_platform_job_id is not None:
1666
+ body["dbt_platform_job_id"] = self.dbt_platform_job_id
1667
+ return body
1668
+
1669
+ def as_shallow_dict(self) -> dict:
1670
+ """Serializes the DbtPlatformTask into a shallow dictionary of its immediate attributes."""
1671
+ body = {}
1672
+ if self.connection_resource_name is not None:
1673
+ body["connection_resource_name"] = self.connection_resource_name
1674
+ if self.dbt_platform_job_id is not None:
1675
+ body["dbt_platform_job_id"] = self.dbt_platform_job_id
1676
+ return body
1677
+
1678
+ @classmethod
1679
+ def from_dict(cls, d: Dict[str, Any]) -> DbtPlatformTask:
1680
+ """Deserializes the DbtPlatformTask from a dictionary."""
1681
+ return cls(
1682
+ connection_resource_name=d.get("connection_resource_name", None),
1683
+ dbt_platform_job_id=d.get("dbt_platform_job_id", None),
1684
+ )
1685
+
1686
+
1687
+ @dataclass
1688
+ class DbtPlatformTaskOutput:
1689
+ dbt_platform_job_run_id: Optional[str] = None
1690
+ """Id of the job run in dbt platform. Specified as a string for maximum compatibility with clients."""
1691
+
1692
+ dbt_platform_job_run_output: Optional[List[DbtPlatformJobRunStep]] = None
1693
+ """Steps of the job run as received from dbt platform"""
1694
+
1695
+ dbt_platform_job_run_url: Optional[str] = None
1696
+ """Url where full run details can be viewed"""
1697
+
1698
+ steps_truncated: Optional[bool] = None
1699
+ """Whether the number of steps in the output has been truncated. If true, the output will contain
1700
+ the first 20 steps of the output."""
1701
+
1702
+ def as_dict(self) -> dict:
1703
+ """Serializes the DbtPlatformTaskOutput into a dictionary suitable for use as a JSON request body."""
1704
+ body = {}
1705
+ if self.dbt_platform_job_run_id is not None:
1706
+ body["dbt_platform_job_run_id"] = self.dbt_platform_job_run_id
1707
+ if self.dbt_platform_job_run_output:
1708
+ body["dbt_platform_job_run_output"] = [v.as_dict() for v in self.dbt_platform_job_run_output]
1709
+ if self.dbt_platform_job_run_url is not None:
1710
+ body["dbt_platform_job_run_url"] = self.dbt_platform_job_run_url
1711
+ if self.steps_truncated is not None:
1712
+ body["steps_truncated"] = self.steps_truncated
1713
+ return body
1714
+
1715
+ def as_shallow_dict(self) -> dict:
1716
+ """Serializes the DbtPlatformTaskOutput into a shallow dictionary of its immediate attributes."""
1717
+ body = {}
1718
+ if self.dbt_platform_job_run_id is not None:
1719
+ body["dbt_platform_job_run_id"] = self.dbt_platform_job_run_id
1720
+ if self.dbt_platform_job_run_output:
1721
+ body["dbt_platform_job_run_output"] = self.dbt_platform_job_run_output
1722
+ if self.dbt_platform_job_run_url is not None:
1723
+ body["dbt_platform_job_run_url"] = self.dbt_platform_job_run_url
1724
+ if self.steps_truncated is not None:
1725
+ body["steps_truncated"] = self.steps_truncated
1726
+ return body
1727
+
1728
+ @classmethod
1729
+ def from_dict(cls, d: Dict[str, Any]) -> DbtPlatformTaskOutput:
1730
+ """Deserializes the DbtPlatformTaskOutput from a dictionary."""
1731
+ return cls(
1732
+ dbt_platform_job_run_id=d.get("dbt_platform_job_run_id", None),
1733
+ dbt_platform_job_run_output=_repeated_dict(d, "dbt_platform_job_run_output", DbtPlatformJobRunStep),
1734
+ dbt_platform_job_run_url=d.get("dbt_platform_job_run_url", None),
1735
+ steps_truncated=d.get("steps_truncated", None),
1736
+ )
1737
+
1738
+
1425
1739
  @dataclass
1426
1740
  class DbtTask:
1427
1741
  commands: List[str]
@@ -1804,6 +2118,31 @@ class FileArrivalTriggerConfiguration:
1804
2118
  )
1805
2119
 
1806
2120
 
2121
+ @dataclass
2122
+ class FileArrivalTriggerState:
2123
+ using_file_events: Optional[bool] = None
2124
+ """Indicates whether the trigger leverages file events to detect file arrivals."""
2125
+
2126
+ def as_dict(self) -> dict:
2127
+ """Serializes the FileArrivalTriggerState into a dictionary suitable for use as a JSON request body."""
2128
+ body = {}
2129
+ if self.using_file_events is not None:
2130
+ body["using_file_events"] = self.using_file_events
2131
+ return body
2132
+
2133
+ def as_shallow_dict(self) -> dict:
2134
+ """Serializes the FileArrivalTriggerState into a shallow dictionary of its immediate attributes."""
2135
+ body = {}
2136
+ if self.using_file_events is not None:
2137
+ body["using_file_events"] = self.using_file_events
2138
+ return body
2139
+
2140
+ @classmethod
2141
+ def from_dict(cls, d: Dict[str, Any]) -> FileArrivalTriggerState:
2142
+ """Deserializes the FileArrivalTriggerState from a dictionary."""
2143
+ return cls(using_file_events=d.get("using_file_events", None))
2144
+
2145
+
1807
2146
  @dataclass
1808
2147
  class ForEachStats:
1809
2148
  error_message_stats: Optional[List[ForEachTaskErrorMessageStats]] = None
@@ -2321,6 +2660,9 @@ class Job:
2321
2660
  """Settings for this job and all of its runs. These settings can be updated using the `resetJob`
2322
2661
  method."""
2323
2662
 
2663
+ trigger_state: Optional[TriggerStateProto] = None
2664
+ """State of the trigger associated with the job."""
2665
+
2324
2666
  def as_dict(self) -> dict:
2325
2667
  """Serializes the Job into a dictionary suitable for use as a JSON request body."""
2326
2668
  body = {}
@@ -2340,6 +2682,8 @@ class Job:
2340
2682
  body["run_as_user_name"] = self.run_as_user_name
2341
2683
  if self.settings:
2342
2684
  body["settings"] = self.settings.as_dict()
2685
+ if self.trigger_state:
2686
+ body["trigger_state"] = self.trigger_state.as_dict()
2343
2687
  return body
2344
2688
 
2345
2689
  def as_shallow_dict(self) -> dict:
@@ -2361,6 +2705,8 @@ class Job:
2361
2705
  body["run_as_user_name"] = self.run_as_user_name
2362
2706
  if self.settings:
2363
2707
  body["settings"] = self.settings
2708
+ if self.trigger_state:
2709
+ body["trigger_state"] = self.trigger_state
2364
2710
  return body
2365
2711
 
2366
2712
  @classmethod
@@ -2375,6 +2721,7 @@ class Job:
2375
2721
  next_page_token=d.get("next_page_token", None),
2376
2722
  run_as_user_name=d.get("run_as_user_name", None),
2377
2723
  settings=_from_dict(d, "settings", JobSettings),
2724
+ trigger_state=_from_dict(d, "trigger_state", TriggerStateProto),
2378
2725
  )
2379
2726
 
2380
2727
 
@@ -5771,9 +6118,14 @@ class RunOutput:
5771
6118
  dashboard_output: Optional[DashboardTaskOutput] = None
5772
6119
  """The output of a dashboard task, if available"""
5773
6120
 
6121
+ dbt_cloud_output: Optional[DbtCloudTaskOutput] = None
6122
+ """Deprecated in favor of the new dbt_platform_output"""
6123
+
5774
6124
  dbt_output: Optional[DbtOutput] = None
5775
6125
  """The output of a dbt task, if available."""
5776
6126
 
6127
+ dbt_platform_output: Optional[DbtPlatformTaskOutput] = None
6128
+
5777
6129
  error: Optional[str] = None
5778
6130
  """An error message indicating why a task failed or why output is not available. The message is
5779
6131
  unstructured, and its exact format is subject to change."""
@@ -5819,8 +6171,12 @@ class RunOutput:
5819
6171
  body["clean_rooms_notebook_output"] = self.clean_rooms_notebook_output.as_dict()
5820
6172
  if self.dashboard_output:
5821
6173
  body["dashboard_output"] = self.dashboard_output.as_dict()
6174
+ if self.dbt_cloud_output:
6175
+ body["dbt_cloud_output"] = self.dbt_cloud_output.as_dict()
5822
6176
  if self.dbt_output:
5823
6177
  body["dbt_output"] = self.dbt_output.as_dict()
6178
+ if self.dbt_platform_output:
6179
+ body["dbt_platform_output"] = self.dbt_platform_output.as_dict()
5824
6180
  if self.error is not None:
5825
6181
  body["error"] = self.error
5826
6182
  if self.error_trace is not None:
@@ -5848,8 +6204,12 @@ class RunOutput:
5848
6204
  body["clean_rooms_notebook_output"] = self.clean_rooms_notebook_output
5849
6205
  if self.dashboard_output:
5850
6206
  body["dashboard_output"] = self.dashboard_output
6207
+ if self.dbt_cloud_output:
6208
+ body["dbt_cloud_output"] = self.dbt_cloud_output
5851
6209
  if self.dbt_output:
5852
6210
  body["dbt_output"] = self.dbt_output
6211
+ if self.dbt_platform_output:
6212
+ body["dbt_platform_output"] = self.dbt_platform_output
5853
6213
  if self.error is not None:
5854
6214
  body["error"] = self.error
5855
6215
  if self.error_trace is not None:
@@ -5878,7 +6238,9 @@ class RunOutput:
5878
6238
  d, "clean_rooms_notebook_output", CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput
5879
6239
  ),
5880
6240
  dashboard_output=_from_dict(d, "dashboard_output", DashboardTaskOutput),
6241
+ dbt_cloud_output=_from_dict(d, "dbt_cloud_output", DbtCloudTaskOutput),
5881
6242
  dbt_output=_from_dict(d, "dbt_output", DbtOutput),
6243
+ dbt_platform_output=_from_dict(d, "dbt_platform_output", DbtPlatformTaskOutput),
5882
6244
  error=d.get("error", None),
5883
6245
  error_trace=d.get("error_trace", None),
5884
6246
  info=d.get("info", None),
@@ -6197,6 +6559,11 @@ class RunTask:
6197
6559
  dashboard_task: Optional[DashboardTask] = None
6198
6560
  """The task refreshes a dashboard and sends a snapshot to subscribers."""
6199
6561
 
6562
+ dbt_cloud_task: Optional[DbtCloudTask] = None
6563
+ """Task type for dbt cloud, deprecated in favor of the new name dbt_platform_task"""
6564
+
6565
+ dbt_platform_task: Optional[DbtPlatformTask] = None
6566
+
6200
6567
  dbt_task: Optional[DbtTask] = None
6201
6568
  """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task
6202
6569
  requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse."""
@@ -6377,6 +6744,10 @@ class RunTask:
6377
6744
  body["condition_task"] = self.condition_task.as_dict()
6378
6745
  if self.dashboard_task:
6379
6746
  body["dashboard_task"] = self.dashboard_task.as_dict()
6747
+ if self.dbt_cloud_task:
6748
+ body["dbt_cloud_task"] = self.dbt_cloud_task.as_dict()
6749
+ if self.dbt_platform_task:
6750
+ body["dbt_platform_task"] = self.dbt_platform_task.as_dict()
6380
6751
  if self.dbt_task:
6381
6752
  body["dbt_task"] = self.dbt_task.as_dict()
6382
6753
  if self.depends_on:
@@ -6472,6 +6843,10 @@ class RunTask:
6472
6843
  body["condition_task"] = self.condition_task
6473
6844
  if self.dashboard_task:
6474
6845
  body["dashboard_task"] = self.dashboard_task
6846
+ if self.dbt_cloud_task:
6847
+ body["dbt_cloud_task"] = self.dbt_cloud_task
6848
+ if self.dbt_platform_task:
6849
+ body["dbt_platform_task"] = self.dbt_platform_task
6475
6850
  if self.dbt_task:
6476
6851
  body["dbt_task"] = self.dbt_task
6477
6852
  if self.depends_on:
@@ -6562,6 +6937,8 @@ class RunTask:
6562
6937
  cluster_instance=_from_dict(d, "cluster_instance", ClusterInstance),
6563
6938
  condition_task=_from_dict(d, "condition_task", RunConditionTask),
6564
6939
  dashboard_task=_from_dict(d, "dashboard_task", DashboardTask),
6940
+ dbt_cloud_task=_from_dict(d, "dbt_cloud_task", DbtCloudTask),
6941
+ dbt_platform_task=_from_dict(d, "dbt_platform_task", DbtPlatformTask),
6565
6942
  dbt_task=_from_dict(d, "dbt_task", DbtTask),
6566
6943
  depends_on=_repeated_dict(d, "depends_on", TaskDependency),
6567
6944
  description=d.get("description", None),
@@ -7585,6 +7962,11 @@ class SubmitTask:
7585
7962
  dashboard_task: Optional[DashboardTask] = None
7586
7963
  """The task refreshes a dashboard and sends a snapshot to subscribers."""
7587
7964
 
7965
+ dbt_cloud_task: Optional[DbtCloudTask] = None
7966
+ """Task type for dbt cloud, deprecated in favor of the new name dbt_platform_task"""
7967
+
7968
+ dbt_platform_task: Optional[DbtPlatformTask] = None
7969
+
7588
7970
  dbt_task: Optional[DbtTask] = None
7589
7971
  """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task
7590
7972
  requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse."""
@@ -7695,6 +8077,10 @@ class SubmitTask:
7695
8077
  body["condition_task"] = self.condition_task.as_dict()
7696
8078
  if self.dashboard_task:
7697
8079
  body["dashboard_task"] = self.dashboard_task.as_dict()
8080
+ if self.dbt_cloud_task:
8081
+ body["dbt_cloud_task"] = self.dbt_cloud_task.as_dict()
8082
+ if self.dbt_platform_task:
8083
+ body["dbt_platform_task"] = self.dbt_platform_task.as_dict()
7698
8084
  if self.dbt_task:
7699
8085
  body["dbt_task"] = self.dbt_task.as_dict()
7700
8086
  if self.depends_on:
@@ -7756,6 +8142,10 @@ class SubmitTask:
7756
8142
  body["condition_task"] = self.condition_task
7757
8143
  if self.dashboard_task:
7758
8144
  body["dashboard_task"] = self.dashboard_task
8145
+ if self.dbt_cloud_task:
8146
+ body["dbt_cloud_task"] = self.dbt_cloud_task
8147
+ if self.dbt_platform_task:
8148
+ body["dbt_platform_task"] = self.dbt_platform_task
7759
8149
  if self.dbt_task:
7760
8150
  body["dbt_task"] = self.dbt_task
7761
8151
  if self.depends_on:
@@ -7815,6 +8205,8 @@ class SubmitTask:
7815
8205
  clean_rooms_notebook_task=_from_dict(d, "clean_rooms_notebook_task", CleanRoomsNotebookTask),
7816
8206
  condition_task=_from_dict(d, "condition_task", ConditionTask),
7817
8207
  dashboard_task=_from_dict(d, "dashboard_task", DashboardTask),
8208
+ dbt_cloud_task=_from_dict(d, "dbt_cloud_task", DbtCloudTask),
8209
+ dbt_platform_task=_from_dict(d, "dbt_platform_task", DbtPlatformTask),
7818
8210
  dbt_task=_from_dict(d, "dbt_task", DbtTask),
7819
8211
  depends_on=_repeated_dict(d, "depends_on", TaskDependency),
7820
8212
  description=d.get("description", None),
@@ -7995,6 +8387,11 @@ class Task:
7995
8387
  dashboard_task: Optional[DashboardTask] = None
7996
8388
  """The task refreshes a dashboard and sends a snapshot to subscribers."""
7997
8389
 
8390
+ dbt_cloud_task: Optional[DbtCloudTask] = None
8391
+ """Task type for dbt cloud, deprecated in favor of the new name dbt_platform_task"""
8392
+
8393
+ dbt_platform_task: Optional[DbtPlatformTask] = None
8394
+
7998
8395
  dbt_task: Optional[DbtTask] = None
7999
8396
  """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task
8000
8397
  requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse."""
@@ -8130,6 +8527,10 @@ class Task:
8130
8527
  body["condition_task"] = self.condition_task.as_dict()
8131
8528
  if self.dashboard_task:
8132
8529
  body["dashboard_task"] = self.dashboard_task.as_dict()
8530
+ if self.dbt_cloud_task:
8531
+ body["dbt_cloud_task"] = self.dbt_cloud_task.as_dict()
8532
+ if self.dbt_platform_task:
8533
+ body["dbt_platform_task"] = self.dbt_platform_task.as_dict()
8133
8534
  if self.dbt_task:
8134
8535
  body["dbt_task"] = self.dbt_task.as_dict()
8135
8536
  if self.depends_on:
@@ -8201,6 +8602,10 @@ class Task:
8201
8602
  body["condition_task"] = self.condition_task
8202
8603
  if self.dashboard_task:
8203
8604
  body["dashboard_task"] = self.dashboard_task
8605
+ if self.dbt_cloud_task:
8606
+ body["dbt_cloud_task"] = self.dbt_cloud_task
8607
+ if self.dbt_platform_task:
8608
+ body["dbt_platform_task"] = self.dbt_platform_task
8204
8609
  if self.dbt_task:
8205
8610
  body["dbt_task"] = self.dbt_task
8206
8611
  if self.depends_on:
@@ -8270,6 +8675,8 @@ class Task:
8270
8675
  clean_rooms_notebook_task=_from_dict(d, "clean_rooms_notebook_task", CleanRoomsNotebookTask),
8271
8676
  condition_task=_from_dict(d, "condition_task", ConditionTask),
8272
8677
  dashboard_task=_from_dict(d, "dashboard_task", DashboardTask),
8678
+ dbt_cloud_task=_from_dict(d, "dbt_cloud_task", DbtCloudTask),
8679
+ dbt_platform_task=_from_dict(d, "dbt_platform_task", DbtPlatformTask),
8273
8680
  dbt_task=_from_dict(d, "dbt_task", DbtTask),
8274
8681
  depends_on=_repeated_dict(d, "depends_on", TaskDependency),
8275
8682
  description=d.get("description", None),
@@ -8715,6 +9122,30 @@ class TriggerSettings:
8715
9122
  )
8716
9123
 
8717
9124
 
9125
+ @dataclass
9126
+ class TriggerStateProto:
9127
+ file_arrival: Optional[FileArrivalTriggerState] = None
9128
+
9129
+ def as_dict(self) -> dict:
9130
+ """Serializes the TriggerStateProto into a dictionary suitable for use as a JSON request body."""
9131
+ body = {}
9132
+ if self.file_arrival:
9133
+ body["file_arrival"] = self.file_arrival.as_dict()
9134
+ return body
9135
+
9136
+ def as_shallow_dict(self) -> dict:
9137
+ """Serializes the TriggerStateProto into a shallow dictionary of its immediate attributes."""
9138
+ body = {}
9139
+ if self.file_arrival:
9140
+ body["file_arrival"] = self.file_arrival
9141
+ return body
9142
+
9143
+ @classmethod
9144
+ def from_dict(cls, d: Dict[str, Any]) -> TriggerStateProto:
9145
+ """Deserializes the TriggerStateProto from a dictionary."""
9146
+ return cls(file_arrival=_from_dict(d, "file_arrival", FileArrivalTriggerState))
9147
+
9148
+
8718
9149
  class TriggerType(Enum):
8719
9150
  """The type of trigger that fired this run.
8720
9151
 
@@ -9036,9 +9467,7 @@ class JobsAPI:
9036
9467
  raise TimeoutError(f"timed out after {timeout}: {status_message}")
9037
9468
 
9038
9469
  def cancel_all_runs(self, *, all_queued_runs: Optional[bool] = None, job_id: Optional[int] = None):
9039
- """Cancel all runs of a job.
9040
-
9041
- Cancels all active runs of a job. The runs are canceled asynchronously, so it doesn't prevent new runs
9470
+ """Cancels all active runs of a job. The runs are canceled asynchronously, so it doesn't prevent new runs
9042
9471
  from being started.
9043
9472
 
9044
9473
  :param all_queued_runs: bool (optional)
@@ -9061,9 +9490,7 @@ class JobsAPI:
9061
9490
  self._api.do("POST", "/api/2.2/jobs/runs/cancel-all", body=body, headers=headers)
9062
9491
 
9063
9492
  def cancel_run(self, run_id: int) -> Wait[Run]:
9064
- """Cancel a run.
9065
-
9066
- Cancels a job run or a task run. The run is canceled asynchronously, so it may still be running when
9493
+ """Cancels a job run or a task run. The run is canceled asynchronously, so it may still be running when
9067
9494
  this request completes.
9068
9495
 
9069
9496
  :param run_id: int
@@ -9121,8 +9548,6 @@ class JobsAPI:
9121
9548
  ) -> CreateResponse:
9122
9549
  """Create a new job.
9123
9550
 
9124
- Create a new job.
9125
-
9126
9551
  :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
9127
9552
  List of permissions to set on the job.
9128
9553
  :param budget_policy_id: str (optional)
@@ -9280,9 +9705,7 @@ class JobsAPI:
9280
9705
  return CreateResponse.from_dict(res)
9281
9706
 
9282
9707
  def delete(self, job_id: int):
9283
- """Delete a job.
9284
-
9285
- Deletes a job.
9708
+ """Deletes a job.
9286
9709
 
9287
9710
  :param job_id: int
9288
9711
  The canonical identifier of the job to delete. This field is required.
@@ -9299,9 +9722,7 @@ class JobsAPI:
9299
9722
  self._api.do("POST", "/api/2.2/jobs/delete", body=body, headers=headers)
9300
9723
 
9301
9724
  def delete_run(self, run_id: int):
9302
- """Delete a job run.
9303
-
9304
- Deletes a non-active run. Returns an error if the run is active.
9725
+ """Deletes a non-active run. Returns an error if the run is active.
9305
9726
 
9306
9727
  :param run_id: int
9307
9728
  ID of the run to delete.
@@ -9318,9 +9739,7 @@ class JobsAPI:
9318
9739
  self._api.do("POST", "/api/2.2/jobs/runs/delete", body=body, headers=headers)
9319
9740
 
9320
9741
  def export_run(self, run_id: int, *, views_to_export: Optional[ViewsToExport] = None) -> ExportRunOutput:
9321
- """Export and retrieve a job run.
9322
-
9323
- Export and retrieve the job run task.
9742
+ """Export and retrieve the job run task.
9324
9743
 
9325
9744
  :param run_id: int
9326
9745
  The canonical identifier for the run. This field is required.
@@ -9343,9 +9762,7 @@ class JobsAPI:
9343
9762
  return ExportRunOutput.from_dict(res)
9344
9763
 
9345
9764
  def get(self, job_id: int, *, page_token: Optional[str] = None) -> Job:
9346
- """Get a single job.
9347
-
9348
- Retrieves the details for a single job.
9765
+ """Retrieves the details for a single job.
9349
9766
 
9350
9767
  Large arrays in the results will be paginated when they exceed 100 elements. A request for a single
9351
9768
  job will return all properties for that job, and the first 100 elements of array properties (`tasks`,
@@ -9376,9 +9793,7 @@ class JobsAPI:
9376
9793
  return Job.from_dict(res)
9377
9794
 
9378
9795
  def get_permission_levels(self, job_id: str) -> GetJobPermissionLevelsResponse:
9379
- """Get job permission levels.
9380
-
9381
- Gets the permission levels that a user can have on an object.
9796
+ """Gets the permission levels that a user can have on an object.
9382
9797
 
9383
9798
  :param job_id: str
9384
9799
  The job for which to get or manage permissions.
@@ -9394,9 +9809,7 @@ class JobsAPI:
9394
9809
  return GetJobPermissionLevelsResponse.from_dict(res)
9395
9810
 
9396
9811
  def get_permissions(self, job_id: str) -> JobPermissions:
9397
- """Get job permissions.
9398
-
9399
- Gets the permissions of a job. Jobs can inherit permissions from their root object.
9812
+ """Gets the permissions of a job. Jobs can inherit permissions from their root object.
9400
9813
 
9401
9814
  :param job_id: str
9402
9815
  The job for which to get or manage permissions.
@@ -9419,9 +9832,7 @@ class JobsAPI:
9419
9832
  include_resolved_values: Optional[bool] = None,
9420
9833
  page_token: Optional[str] = None,
9421
9834
  ) -> Run:
9422
- """Get a single job run.
9423
-
9424
- Retrieves the metadata of a run.
9835
+ """Retrieves the metadata of a run.
9425
9836
 
9426
9837
  Large arrays in the results will be paginated when they exceed 100 elements. A request for a single
9427
9838
  run will return all properties for that run, and the first 100 elements of array properties (`tasks`,
@@ -9460,9 +9871,7 @@ class JobsAPI:
9460
9871
  return Run.from_dict(res)
9461
9872
 
9462
9873
  def get_run_output(self, run_id: int) -> RunOutput:
9463
- """Get the output for a single run.
9464
-
9465
- Retrieve the output and metadata of a single task run. When a notebook task returns a value through
9874
+ """Retrieve the output and metadata of a single task run. When a notebook task returns a value through
9466
9875
  the `dbutils.notebook.exit()` call, you can use this endpoint to retrieve that value. Databricks
9467
9876
  restricts this API to returning the first 5 MB of the output. To return a larger result, you can store
9468
9877
  job results in a cloud storage service.
@@ -9496,9 +9905,7 @@ class JobsAPI:
9496
9905
  offset: Optional[int] = None,
9497
9906
  page_token: Optional[str] = None,
9498
9907
  ) -> Iterator[BaseJob]:
9499
- """List jobs.
9500
-
9501
- Retrieves a list of jobs.
9908
+ """Retrieves a list of jobs.
9502
9909
 
9503
9910
  :param expand_tasks: bool (optional)
9504
9911
  Whether to include task and cluster details in the response. Note that only the first 100 elements
@@ -9556,9 +9963,7 @@ class JobsAPI:
9556
9963
  start_time_from: Optional[int] = None,
9557
9964
  start_time_to: Optional[int] = None,
9558
9965
  ) -> Iterator[BaseRun]:
9559
- """List job runs.
9560
-
9561
- List runs in descending order by start time.
9966
+ """List runs in descending order by start time.
9562
9967
 
9563
9968
  :param active_only: bool (optional)
9564
9969
  If active_only is `true`, only active runs are included in the results; otherwise, lists both active
@@ -9646,9 +10051,7 @@ class JobsAPI:
9646
10051
  spark_submit_params: Optional[List[str]] = None,
9647
10052
  sql_params: Optional[Dict[str, str]] = None,
9648
10053
  ) -> Wait[Run]:
9649
- """Repair a job run.
9650
-
9651
- Re-run one or more tasks. Tasks are re-run as part of the original job run. They use the current job
10054
+ """Re-run one or more tasks. Tasks are re-run as part of the original job run. They use the current job
9652
10055
  and task settings, and can be viewed in the history for the original job run.
9653
10056
 
9654
10057
  :param run_id: int
@@ -9826,9 +10229,7 @@ class JobsAPI:
9826
10229
  ).result(timeout=timeout)
9827
10230
 
9828
10231
  def reset(self, job_id: int, new_settings: JobSettings):
9829
- """Update all job settings (reset).
9830
-
9831
- Overwrite all settings for the given job. Use the [_Update_ endpoint](:method:jobs/update) to update
10232
+ """Overwrite all settings for the given job. Use the [_Update_ endpoint](:method:jobs/update) to update
9832
10233
  job settings partially.
9833
10234
 
9834
10235
  :param job_id: int
@@ -9870,9 +10271,7 @@ class JobsAPI:
9870
10271
  spark_submit_params: Optional[List[str]] = None,
9871
10272
  sql_params: Optional[Dict[str, str]] = None,
9872
10273
  ) -> Wait[Run]:
9873
- """Trigger a new job run.
9874
-
9875
- Run a job and return the `run_id` of the triggered run.
10274
+ """Run a job and return the `run_id` of the triggered run.
9876
10275
 
9877
10276
  :param job_id: int
9878
10277
  The ID of the job to be executed
@@ -10055,9 +10454,7 @@ class JobsAPI:
10055
10454
  def set_permissions(
10056
10455
  self, job_id: str, *, access_control_list: Optional[List[JobAccessControlRequest]] = None
10057
10456
  ) -> JobPermissions:
10058
- """Set job permissions.
10059
-
10060
- Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
10457
+ """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
10061
10458
  permissions if none are specified. Objects can inherit permissions from their root object.
10062
10459
 
10063
10460
  :param job_id: str
@@ -10095,9 +10492,7 @@ class JobsAPI:
10095
10492
  timeout_seconds: Optional[int] = None,
10096
10493
  webhook_notifications: Optional[WebhookNotifications] = None,
10097
10494
  ) -> Wait[Run]:
10098
- """Create and trigger a one-time run.
10099
-
10100
- Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job.
10495
+ """Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job.
10101
10496
  Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the
10102
10497
  run state after the job is submitted.
10103
10498
 
@@ -10234,9 +10629,7 @@ class JobsAPI:
10234
10629
  def update(
10235
10630
  self, job_id: int, *, fields_to_remove: Optional[List[str]] = None, new_settings: Optional[JobSettings] = None
10236
10631
  ):
10237
- """Update job settings partially.
10238
-
10239
- Add, update, or remove specific settings of an existing job. Use the [_Reset_
10632
+ """Add, update, or remove specific settings of an existing job. Use the [_Reset_
10240
10633
  endpoint](:method:jobs/reset) to overwrite all job settings.
10241
10634
 
10242
10635
  :param job_id: int
@@ -10274,9 +10667,7 @@ class JobsAPI:
10274
10667
  def update_permissions(
10275
10668
  self, job_id: str, *, access_control_list: Optional[List[JobAccessControlRequest]] = None
10276
10669
  ) -> JobPermissions:
10277
- """Update job permissions.
10278
-
10279
- Updates the permissions on a job. Jobs can inherit permissions from their root object.
10670
+ """Updates the permissions on a job. Jobs can inherit permissions from their root object.
10280
10671
 
10281
10672
  :param job_id: str
10282
10673
  The job for which to get or manage permissions.
@@ -10314,9 +10705,7 @@ class PolicyComplianceForJobsAPI:
10314
10705
  def enforce_compliance(
10315
10706
  self, job_id: int, *, validate_only: Optional[bool] = None
10316
10707
  ) -> EnforcePolicyComplianceResponse:
10317
- """Enforce job policy compliance.
10318
-
10319
- Updates a job so the job clusters that are created when running the job (specified in `new_cluster`)
10708
+ """Updates a job so the job clusters that are created when running the job (specified in `new_cluster`)
10320
10709
  are compliant with the current versions of their respective cluster policies. All-purpose clusters
10321
10710
  used in the job will not be updated.
10322
10711
 
@@ -10341,9 +10730,7 @@ class PolicyComplianceForJobsAPI:
10341
10730
  return EnforcePolicyComplianceResponse.from_dict(res)
10342
10731
 
10343
10732
  def get_compliance(self, job_id: int) -> GetPolicyComplianceResponse:
10344
- """Get job policy compliance.
10345
-
10346
- Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy
10733
+ """Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy
10347
10734
  they use was updated after the job was last edited and some of its job clusters no longer comply with
10348
10735
  their updated policies.
10349
10736
 
@@ -10366,9 +10753,7 @@ class PolicyComplianceForJobsAPI:
10366
10753
  def list_compliance(
10367
10754
  self, policy_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None
10368
10755
  ) -> Iterator[JobCompliance]:
10369
- """List job policy compliance.
10370
-
10371
- Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of
10756
+ """Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of
10372
10757
  compliance if a cluster policy they use was updated after the job was last edited and its job clusters
10373
10758
  no longer comply with the updated policy.
10374
10759