databricks-sdk 0.62.0__py3-none-any.whl → 0.63.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +15 -2
- databricks/sdk/service/apps.py +0 -4
- databricks/sdk/service/catalog.py +791 -22
- databricks/sdk/service/cleanrooms.py +21 -14
- databricks/sdk/service/compute.py +14 -0
- databricks/sdk/service/database.py +195 -2
- databricks/sdk/service/jobs.py +27 -0
- databricks/sdk/service/ml.py +3 -4
- databricks/sdk/service/serving.py +22 -0
- databricks/sdk/service/settings.py +36 -1
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.62.0.dist-info → databricks_sdk-0.63.0.dist-info}/METADATA +1 -1
- {databricks_sdk-0.62.0.dist-info → databricks_sdk-0.63.0.dist-info}/RECORD +17 -17
- {databricks_sdk-0.62.0.dist-info → databricks_sdk-0.63.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.62.0.dist-info → databricks_sdk-0.63.0.dist-info}/licenses/LICENSE +0 -0
- {databricks_sdk-0.62.0.dist-info → databricks_sdk-0.63.0.dist-info}/licenses/NOTICE +0 -0
- {databricks_sdk-0.62.0.dist-info → databricks_sdk-0.63.0.dist-info}/top_level.txt +0 -0
|
@@ -142,7 +142,8 @@ class CleanRoomAsset:
|
|
|
142
142
|
For UC securable assets (tables, volumes, etc.), the format is
|
|
143
143
|
*shared_catalog*.*shared_schema*.*asset_name*
|
|
144
144
|
|
|
145
|
-
For notebooks, the name is the notebook file name.
|
|
145
|
+
For notebooks, the name is the notebook file name. For jar analyses, the name is the jar
|
|
146
|
+
analysis name."""
|
|
146
147
|
|
|
147
148
|
asset_type: CleanRoomAssetAssetType
|
|
148
149
|
"""The type of the asset."""
|
|
@@ -351,7 +352,7 @@ class CleanRoomAssetNotebook:
|
|
|
351
352
|
"""Server generated etag that represents the notebook version."""
|
|
352
353
|
|
|
353
354
|
review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None
|
|
354
|
-
"""
|
|
355
|
+
"""Top-level status derived from all reviews"""
|
|
355
356
|
|
|
356
357
|
reviews: Optional[List[CleanRoomNotebookReview]] = None
|
|
357
358
|
"""All existing approvals or rejections"""
|
|
@@ -546,8 +547,12 @@ class CleanRoomAssetVolumeLocalDetails:
|
|
|
546
547
|
@dataclass
|
|
547
548
|
class CleanRoomAutoApprovalRule:
|
|
548
549
|
author_collaborator_alias: Optional[str] = None
|
|
550
|
+
"""Collaborator alias of the author covered by the rule. Only one of `author_collaborator_alias`
|
|
551
|
+
and `author_scope` can be set."""
|
|
549
552
|
|
|
550
553
|
author_scope: Optional[CleanRoomAutoApprovalRuleAuthorScope] = None
|
|
554
|
+
"""Scope of authors covered by the rule. Only one of `author_collaborator_alias` and `author_scope`
|
|
555
|
+
can be set."""
|
|
551
556
|
|
|
552
557
|
clean_room_name: Optional[str] = None
|
|
553
558
|
"""The name of the clean room this auto-approval rule belongs to."""
|
|
@@ -562,6 +567,7 @@ class CleanRoomAutoApprovalRule:
|
|
|
562
567
|
"""The owner of the rule to whom the rule applies."""
|
|
563
568
|
|
|
564
569
|
runner_collaborator_alias: Optional[str] = None
|
|
570
|
+
"""Collaborator alias of the runner covered by the rule."""
|
|
565
571
|
|
|
566
572
|
def as_dict(self) -> dict:
|
|
567
573
|
"""Serializes the CleanRoomAutoApprovalRule into a dictionary suitable for use as a JSON request body."""
|
|
@@ -704,19 +710,19 @@ class CleanRoomCollaborator:
|
|
|
704
710
|
@dataclass
|
|
705
711
|
class CleanRoomNotebookReview:
|
|
706
712
|
comment: Optional[str] = None
|
|
707
|
-
"""
|
|
713
|
+
"""Review comment"""
|
|
708
714
|
|
|
709
715
|
created_at_millis: Optional[int] = None
|
|
710
|
-
"""
|
|
716
|
+
"""When the review was submitted, in epoch milliseconds"""
|
|
711
717
|
|
|
712
718
|
review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None
|
|
713
|
-
"""
|
|
719
|
+
"""Review outcome"""
|
|
714
720
|
|
|
715
721
|
review_sub_reason: Optional[CleanRoomNotebookReviewNotebookReviewSubReason] = None
|
|
716
|
-
"""
|
|
722
|
+
"""Specified when the review was not explicitly made by a user"""
|
|
717
723
|
|
|
718
724
|
reviewer_collaborator_alias: Optional[str] = None
|
|
719
|
-
"""
|
|
725
|
+
"""Collaborator alias of the reviewer"""
|
|
720
726
|
|
|
721
727
|
def as_dict(self) -> dict:
|
|
722
728
|
"""Serializes the CleanRoomNotebookReview into a dictionary suitable for use as a JSON request body."""
|
|
@@ -1100,7 +1106,7 @@ class ComplianceSecurityProfile:
|
|
|
1100
1106
|
@dataclass
|
|
1101
1107
|
class CreateCleanRoomAssetReviewResponse:
|
|
1102
1108
|
notebook_review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None
|
|
1103
|
-
"""
|
|
1109
|
+
"""Top-level status derived from all reviews"""
|
|
1104
1110
|
|
|
1105
1111
|
notebook_reviews: Optional[List[CleanRoomNotebookReview]] = None
|
|
1106
1112
|
"""All existing notebook approvals or rejections"""
|
|
@@ -1348,13 +1354,13 @@ class ListCleanRoomsResponse:
|
|
|
1348
1354
|
@dataclass
|
|
1349
1355
|
class NotebookVersionReview:
|
|
1350
1356
|
etag: str
|
|
1351
|
-
"""
|
|
1357
|
+
"""Etag identifying the notebook version"""
|
|
1352
1358
|
|
|
1353
1359
|
review_state: CleanRoomNotebookReviewNotebookReviewState
|
|
1354
|
-
"""
|
|
1360
|
+
"""Review outcome"""
|
|
1355
1361
|
|
|
1356
1362
|
comment: Optional[str] = None
|
|
1357
|
-
"""
|
|
1363
|
+
"""Review comment"""
|
|
1358
1364
|
|
|
1359
1365
|
def as_dict(self) -> dict:
|
|
1360
1366
|
"""Serializes the NotebookVersionReview into a dictionary suitable for use as a JSON request body."""
|
|
@@ -1505,12 +1511,12 @@ class CleanRoomAssetsAPI:
|
|
|
1505
1511
|
name: str,
|
|
1506
1512
|
notebook_review: NotebookVersionReview,
|
|
1507
1513
|
) -> CreateCleanRoomAssetReviewResponse:
|
|
1508
|
-
"""
|
|
1514
|
+
"""Submit an asset review
|
|
1509
1515
|
|
|
1510
1516
|
:param clean_room_name: str
|
|
1511
1517
|
Name of the clean room
|
|
1512
1518
|
:param asset_type: :class:`CleanRoomAssetAssetType`
|
|
1513
|
-
|
|
1519
|
+
Asset type. Can only be NOTEBOOK_FILE.
|
|
1514
1520
|
:param name: str
|
|
1515
1521
|
Name of the asset
|
|
1516
1522
|
:param notebook_review: :class:`NotebookVersionReview`
|
|
@@ -1620,7 +1626,8 @@ class CleanRoomAssetsAPI:
|
|
|
1620
1626
|
For UC securable assets (tables, volumes, etc.), the format is
|
|
1621
1627
|
*shared_catalog*.*shared_schema*.*asset_name*
|
|
1622
1628
|
|
|
1623
|
-
For notebooks, the name is the notebook file name.
|
|
1629
|
+
For notebooks, the name is the notebook file name. For jar analyses, the name is the jar analysis
|
|
1630
|
+
name.
|
|
1624
1631
|
:param asset: :class:`CleanRoomAsset`
|
|
1625
1632
|
The asset to update. The asset's `name` and `asset_type` fields are used to identify the asset to
|
|
1626
1633
|
update.
|
|
@@ -3410,6 +3410,15 @@ class GcpAttributes:
|
|
|
3410
3410
|
boot_disk_size: Optional[int] = None
|
|
3411
3411
|
"""Boot disk size in GB"""
|
|
3412
3412
|
|
|
3413
|
+
first_on_demand: Optional[int] = None
|
|
3414
|
+
"""The first `first_on_demand` nodes of the cluster will be placed on on-demand instances. This
|
|
3415
|
+
value should be greater than 0, to make sure the cluster driver node is placed on an on-demand
|
|
3416
|
+
instance. If this value is greater than or equal to the current cluster size, all nodes will be
|
|
3417
|
+
placed on on-demand instances. If this value is less than the current cluster size,
|
|
3418
|
+
`first_on_demand` nodes will be placed on on-demand instances and the remainder will be placed
|
|
3419
|
+
on `availability` instances. Note that this value does not affect cluster size and cannot
|
|
3420
|
+
currently be mutated over the lifetime of a cluster."""
|
|
3421
|
+
|
|
3413
3422
|
google_service_account: Optional[str] = None
|
|
3414
3423
|
"""If provided, the cluster will impersonate the google service account when accessing gcloud
|
|
3415
3424
|
services (like GCS). The google service account must have previously been added to the
|
|
@@ -3441,6 +3450,8 @@ class GcpAttributes:
|
|
|
3441
3450
|
body["availability"] = self.availability.value
|
|
3442
3451
|
if self.boot_disk_size is not None:
|
|
3443
3452
|
body["boot_disk_size"] = self.boot_disk_size
|
|
3453
|
+
if self.first_on_demand is not None:
|
|
3454
|
+
body["first_on_demand"] = self.first_on_demand
|
|
3444
3455
|
if self.google_service_account is not None:
|
|
3445
3456
|
body["google_service_account"] = self.google_service_account
|
|
3446
3457
|
if self.local_ssd_count is not None:
|
|
@@ -3458,6 +3469,8 @@ class GcpAttributes:
|
|
|
3458
3469
|
body["availability"] = self.availability
|
|
3459
3470
|
if self.boot_disk_size is not None:
|
|
3460
3471
|
body["boot_disk_size"] = self.boot_disk_size
|
|
3472
|
+
if self.first_on_demand is not None:
|
|
3473
|
+
body["first_on_demand"] = self.first_on_demand
|
|
3461
3474
|
if self.google_service_account is not None:
|
|
3462
3475
|
body["google_service_account"] = self.google_service_account
|
|
3463
3476
|
if self.local_ssd_count is not None:
|
|
@@ -3474,6 +3487,7 @@ class GcpAttributes:
|
|
|
3474
3487
|
return cls(
|
|
3475
3488
|
availability=_enum(d, "availability", GcpAvailability),
|
|
3476
3489
|
boot_disk_size=d.get("boot_disk_size", None),
|
|
3490
|
+
first_on_demand=d.get("first_on_demand", None),
|
|
3477
3491
|
google_service_account=d.get("google_service_account", None),
|
|
3478
3492
|
local_ssd_count=d.get("local_ssd_count", None),
|
|
3479
3493
|
use_preemptible_executors=d.get("use_preemptible_executors", None),
|
|
@@ -589,6 +589,40 @@ class DeltaTableSyncInfo:
|
|
|
589
589
|
)
|
|
590
590
|
|
|
591
591
|
|
|
592
|
+
@dataclass
|
|
593
|
+
class ListDatabaseCatalogsResponse:
|
|
594
|
+
database_catalogs: Optional[List[DatabaseCatalog]] = None
|
|
595
|
+
|
|
596
|
+
next_page_token: Optional[str] = None
|
|
597
|
+
"""Pagination token to request the next page of database catalogs."""
|
|
598
|
+
|
|
599
|
+
def as_dict(self) -> dict:
|
|
600
|
+
"""Serializes the ListDatabaseCatalogsResponse into a dictionary suitable for use as a JSON request body."""
|
|
601
|
+
body = {}
|
|
602
|
+
if self.database_catalogs:
|
|
603
|
+
body["database_catalogs"] = [v.as_dict() for v in self.database_catalogs]
|
|
604
|
+
if self.next_page_token is not None:
|
|
605
|
+
body["next_page_token"] = self.next_page_token
|
|
606
|
+
return body
|
|
607
|
+
|
|
608
|
+
def as_shallow_dict(self) -> dict:
|
|
609
|
+
"""Serializes the ListDatabaseCatalogsResponse into a shallow dictionary of its immediate attributes."""
|
|
610
|
+
body = {}
|
|
611
|
+
if self.database_catalogs:
|
|
612
|
+
body["database_catalogs"] = self.database_catalogs
|
|
613
|
+
if self.next_page_token is not None:
|
|
614
|
+
body["next_page_token"] = self.next_page_token
|
|
615
|
+
return body
|
|
616
|
+
|
|
617
|
+
@classmethod
|
|
618
|
+
def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseCatalogsResponse:
|
|
619
|
+
"""Deserializes the ListDatabaseCatalogsResponse from a dictionary."""
|
|
620
|
+
return cls(
|
|
621
|
+
database_catalogs=_repeated_dict(d, "database_catalogs", DatabaseCatalog),
|
|
622
|
+
next_page_token=d.get("next_page_token", None),
|
|
623
|
+
)
|
|
624
|
+
|
|
625
|
+
|
|
592
626
|
@dataclass
|
|
593
627
|
class ListDatabaseInstanceRolesResponse:
|
|
594
628
|
database_instance_roles: Optional[List[DatabaseInstanceRole]] = None
|
|
@@ -659,6 +693,40 @@ class ListDatabaseInstancesResponse:
|
|
|
659
693
|
)
|
|
660
694
|
|
|
661
695
|
|
|
696
|
+
@dataclass
|
|
697
|
+
class ListSyncedDatabaseTablesResponse:
|
|
698
|
+
next_page_token: Optional[str] = None
|
|
699
|
+
"""Pagination token to request the next page of synced tables."""
|
|
700
|
+
|
|
701
|
+
synced_tables: Optional[List[SyncedDatabaseTable]] = None
|
|
702
|
+
|
|
703
|
+
def as_dict(self) -> dict:
|
|
704
|
+
"""Serializes the ListSyncedDatabaseTablesResponse into a dictionary suitable for use as a JSON request body."""
|
|
705
|
+
body = {}
|
|
706
|
+
if self.next_page_token is not None:
|
|
707
|
+
body["next_page_token"] = self.next_page_token
|
|
708
|
+
if self.synced_tables:
|
|
709
|
+
body["synced_tables"] = [v.as_dict() for v in self.synced_tables]
|
|
710
|
+
return body
|
|
711
|
+
|
|
712
|
+
def as_shallow_dict(self) -> dict:
|
|
713
|
+
"""Serializes the ListSyncedDatabaseTablesResponse into a shallow dictionary of its immediate attributes."""
|
|
714
|
+
body = {}
|
|
715
|
+
if self.next_page_token is not None:
|
|
716
|
+
body["next_page_token"] = self.next_page_token
|
|
717
|
+
if self.synced_tables:
|
|
718
|
+
body["synced_tables"] = self.synced_tables
|
|
719
|
+
return body
|
|
720
|
+
|
|
721
|
+
@classmethod
|
|
722
|
+
def from_dict(cls, d: Dict[str, Any]) -> ListSyncedDatabaseTablesResponse:
|
|
723
|
+
"""Deserializes the ListSyncedDatabaseTablesResponse from a dictionary."""
|
|
724
|
+
return cls(
|
|
725
|
+
next_page_token=d.get("next_page_token", None),
|
|
726
|
+
synced_tables=_repeated_dict(d, "synced_tables", SyncedDatabaseTable),
|
|
727
|
+
)
|
|
728
|
+
|
|
729
|
+
|
|
662
730
|
@dataclass
|
|
663
731
|
class NewPipelineSpec:
|
|
664
732
|
"""Custom fields that user can set for pipeline while creating SyncedDatabaseTable. Note that other
|
|
@@ -1733,10 +1801,47 @@ class DatabaseAPI:
|
|
|
1733
1801
|
res = self._api.do("GET", f"/api/2.0/database/synced_tables/{name}", headers=headers)
|
|
1734
1802
|
return SyncedDatabaseTable.from_dict(res)
|
|
1735
1803
|
|
|
1804
|
+
def list_database_catalogs(
|
|
1805
|
+
self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None
|
|
1806
|
+
) -> Iterator[DatabaseCatalog]:
|
|
1807
|
+
"""This API is currently unimplemented, but exposed for Terraform support.
|
|
1808
|
+
|
|
1809
|
+
:param instance_name: str
|
|
1810
|
+
Name of the instance to get database catalogs for.
|
|
1811
|
+
:param page_size: int (optional)
|
|
1812
|
+
Upper bound for items returned.
|
|
1813
|
+
:param page_token: str (optional)
|
|
1814
|
+
Pagination token to go to the next page of synced database tables. Requests first page if absent.
|
|
1815
|
+
|
|
1816
|
+
:returns: Iterator over :class:`DatabaseCatalog`
|
|
1817
|
+
"""
|
|
1818
|
+
|
|
1819
|
+
query = {}
|
|
1820
|
+
if page_size is not None:
|
|
1821
|
+
query["page_size"] = page_size
|
|
1822
|
+
if page_token is not None:
|
|
1823
|
+
query["page_token"] = page_token
|
|
1824
|
+
headers = {
|
|
1825
|
+
"Accept": "application/json",
|
|
1826
|
+
}
|
|
1827
|
+
|
|
1828
|
+
while True:
|
|
1829
|
+
json = self._api.do(
|
|
1830
|
+
"GET", f"/api/2.0/database/instances/{instance_name}/catalogs", query=query, headers=headers
|
|
1831
|
+
)
|
|
1832
|
+
if "database_catalogs" in json:
|
|
1833
|
+
for v in json["database_catalogs"]:
|
|
1834
|
+
yield DatabaseCatalog.from_dict(v)
|
|
1835
|
+
if "next_page_token" not in json or not json["next_page_token"]:
|
|
1836
|
+
return
|
|
1837
|
+
query["page_token"] = json["next_page_token"]
|
|
1838
|
+
|
|
1736
1839
|
def list_database_instance_roles(
|
|
1737
1840
|
self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None
|
|
1738
1841
|
) -> Iterator[DatabaseInstanceRole]:
|
|
1739
|
-
"""START OF PG ROLE APIs Section
|
|
1842
|
+
"""START OF PG ROLE APIs Section These APIs are marked a PUBLIC with stage < PUBLIC_PREVIEW. With more
|
|
1843
|
+
recent Lakebase V2 plans, we don't plan to ever advance these to PUBLIC_PREVIEW. These APIs will
|
|
1844
|
+
remain effectively undocumented/UI-only and we'll aim for a new public roles API as part of V2 PuPr.
|
|
1740
1845
|
|
|
1741
1846
|
:param instance_name: str
|
|
1742
1847
|
:param page_size: int (optional)
|
|
@@ -1798,6 +1903,67 @@ class DatabaseAPI:
|
|
|
1798
1903
|
return
|
|
1799
1904
|
query["page_token"] = json["next_page_token"]
|
|
1800
1905
|
|
|
1906
|
+
def list_synced_database_tables(
|
|
1907
|
+
self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None
|
|
1908
|
+
) -> Iterator[SyncedDatabaseTable]:
|
|
1909
|
+
"""This API is currently unimplemented, but exposed for Terraform support.
|
|
1910
|
+
|
|
1911
|
+
:param instance_name: str
|
|
1912
|
+
Name of the instance to get synced tables for.
|
|
1913
|
+
:param page_size: int (optional)
|
|
1914
|
+
Upper bound for items returned.
|
|
1915
|
+
:param page_token: str (optional)
|
|
1916
|
+
Pagination token to go to the next page of synced database tables. Requests first page if absent.
|
|
1917
|
+
|
|
1918
|
+
:returns: Iterator over :class:`SyncedDatabaseTable`
|
|
1919
|
+
"""
|
|
1920
|
+
|
|
1921
|
+
query = {}
|
|
1922
|
+
if page_size is not None:
|
|
1923
|
+
query["page_size"] = page_size
|
|
1924
|
+
if page_token is not None:
|
|
1925
|
+
query["page_token"] = page_token
|
|
1926
|
+
headers = {
|
|
1927
|
+
"Accept": "application/json",
|
|
1928
|
+
}
|
|
1929
|
+
|
|
1930
|
+
while True:
|
|
1931
|
+
json = self._api.do(
|
|
1932
|
+
"GET", f"/api/2.0/database/instances/{instance_name}/synced_tables", query=query, headers=headers
|
|
1933
|
+
)
|
|
1934
|
+
if "synced_tables" in json:
|
|
1935
|
+
for v in json["synced_tables"]:
|
|
1936
|
+
yield SyncedDatabaseTable.from_dict(v)
|
|
1937
|
+
if "next_page_token" not in json or not json["next_page_token"]:
|
|
1938
|
+
return
|
|
1939
|
+
query["page_token"] = json["next_page_token"]
|
|
1940
|
+
|
|
1941
|
+
def update_database_catalog(
|
|
1942
|
+
self, name: str, database_catalog: DatabaseCatalog, update_mask: str
|
|
1943
|
+
) -> DatabaseCatalog:
|
|
1944
|
+
"""This API is currently unimplemented, but exposed for Terraform support.
|
|
1945
|
+
|
|
1946
|
+
:param name: str
|
|
1947
|
+
The name of the catalog in UC.
|
|
1948
|
+
:param database_catalog: :class:`DatabaseCatalog`
|
|
1949
|
+
Note that updating a database catalog is not yet supported.
|
|
1950
|
+
:param update_mask: str
|
|
1951
|
+
The list of fields to update. Setting this field is not yet supported.
|
|
1952
|
+
|
|
1953
|
+
:returns: :class:`DatabaseCatalog`
|
|
1954
|
+
"""
|
|
1955
|
+
body = database_catalog.as_dict()
|
|
1956
|
+
query = {}
|
|
1957
|
+
if update_mask is not None:
|
|
1958
|
+
query["update_mask"] = update_mask
|
|
1959
|
+
headers = {
|
|
1960
|
+
"Accept": "application/json",
|
|
1961
|
+
"Content-Type": "application/json",
|
|
1962
|
+
}
|
|
1963
|
+
|
|
1964
|
+
res = self._api.do("PATCH", f"/api/2.0/database/catalogs/{name}", query=query, body=body, headers=headers)
|
|
1965
|
+
return DatabaseCatalog.from_dict(res)
|
|
1966
|
+
|
|
1801
1967
|
def update_database_instance(
|
|
1802
1968
|
self, name: str, database_instance: DatabaseInstance, update_mask: str
|
|
1803
1969
|
) -> DatabaseInstance:
|
|
@@ -1807,7 +1973,8 @@ class DatabaseAPI:
|
|
|
1807
1973
|
The name of the instance. This is the unique identifier for the instance.
|
|
1808
1974
|
:param database_instance: :class:`DatabaseInstance`
|
|
1809
1975
|
:param update_mask: str
|
|
1810
|
-
The list of fields to update.
|
|
1976
|
+
The list of fields to update. If unspecified, all fields will be updated when possible. To wipe out
|
|
1977
|
+
custom_tags, specify custom_tags in the update_mask with an empty custom_tags map.
|
|
1811
1978
|
|
|
1812
1979
|
:returns: :class:`DatabaseInstance`
|
|
1813
1980
|
"""
|
|
@@ -1822,3 +1989,29 @@ class DatabaseAPI:
|
|
|
1822
1989
|
|
|
1823
1990
|
res = self._api.do("PATCH", f"/api/2.0/database/instances/{name}", query=query, body=body, headers=headers)
|
|
1824
1991
|
return DatabaseInstance.from_dict(res)
|
|
1992
|
+
|
|
1993
|
+
def update_synced_database_table(
|
|
1994
|
+
self, name: str, synced_table: SyncedDatabaseTable, update_mask: str
|
|
1995
|
+
) -> SyncedDatabaseTable:
|
|
1996
|
+
"""This API is currently unimplemented, but exposed for Terraform support.
|
|
1997
|
+
|
|
1998
|
+
:param name: str
|
|
1999
|
+
Full three-part (catalog, schema, table) name of the table.
|
|
2000
|
+
:param synced_table: :class:`SyncedDatabaseTable`
|
|
2001
|
+
Note that updating a synced database table is not yet supported.
|
|
2002
|
+
:param update_mask: str
|
|
2003
|
+
The list of fields to update. Setting this field is not yet supported.
|
|
2004
|
+
|
|
2005
|
+
:returns: :class:`SyncedDatabaseTable`
|
|
2006
|
+
"""
|
|
2007
|
+
body = synced_table.as_dict()
|
|
2008
|
+
query = {}
|
|
2009
|
+
if update_mask is not None:
|
|
2010
|
+
query["update_mask"] = update_mask
|
|
2011
|
+
headers = {
|
|
2012
|
+
"Accept": "application/json",
|
|
2013
|
+
"Content-Type": "application/json",
|
|
2014
|
+
}
|
|
2015
|
+
|
|
2016
|
+
res = self._api.do("PATCH", f"/api/2.0/database/synced_tables/{name}", query=query, body=body, headers=headers)
|
|
2017
|
+
return SyncedDatabaseTable.from_dict(res)
|
databricks/sdk/service/jobs.py
CHANGED
|
@@ -3037,6 +3037,11 @@ class JobSettings:
|
|
|
3037
3037
|
the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API
|
|
3038
3038
|
request to `runNow`."""
|
|
3039
3039
|
|
|
3040
|
+
usage_policy_id: Optional[str] = None
|
|
3041
|
+
"""The id of the user specified usage policy to use for this job. If not specified, a default usage
|
|
3042
|
+
policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for
|
|
3043
|
+
the budget policy used by this workload."""
|
|
3044
|
+
|
|
3040
3045
|
webhook_notifications: Optional[WebhookNotifications] = None
|
|
3041
3046
|
"""A collection of system notification IDs to notify when runs of this job begin or complete."""
|
|
3042
3047
|
|
|
@@ -3089,6 +3094,8 @@ class JobSettings:
|
|
|
3089
3094
|
body["timeout_seconds"] = self.timeout_seconds
|
|
3090
3095
|
if self.trigger:
|
|
3091
3096
|
body["trigger"] = self.trigger.as_dict()
|
|
3097
|
+
if self.usage_policy_id is not None:
|
|
3098
|
+
body["usage_policy_id"] = self.usage_policy_id
|
|
3092
3099
|
if self.webhook_notifications:
|
|
3093
3100
|
body["webhook_notifications"] = self.webhook_notifications.as_dict()
|
|
3094
3101
|
return body
|
|
@@ -3142,6 +3149,8 @@ class JobSettings:
|
|
|
3142
3149
|
body["timeout_seconds"] = self.timeout_seconds
|
|
3143
3150
|
if self.trigger:
|
|
3144
3151
|
body["trigger"] = self.trigger
|
|
3152
|
+
if self.usage_policy_id is not None:
|
|
3153
|
+
body["usage_policy_id"] = self.usage_policy_id
|
|
3145
3154
|
if self.webhook_notifications:
|
|
3146
3155
|
body["webhook_notifications"] = self.webhook_notifications
|
|
3147
3156
|
return body
|
|
@@ -3173,6 +3182,7 @@ class JobSettings:
|
|
|
3173
3182
|
tasks=_repeated_dict(d, "tasks", Task),
|
|
3174
3183
|
timeout_seconds=d.get("timeout_seconds", None),
|
|
3175
3184
|
trigger=_from_dict(d, "trigger", TriggerSettings),
|
|
3185
|
+
usage_policy_id=d.get("usage_policy_id", None),
|
|
3176
3186
|
webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications),
|
|
3177
3187
|
)
|
|
3178
3188
|
|
|
@@ -7884,6 +7894,8 @@ class TerminationCodeCode(Enum):
|
|
|
7884
7894
|
run failed due to a cloud provider issue. Refer to the state message for further details. *
|
|
7885
7895
|
`MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size
|
|
7886
7896
|
limit. * `DISABLED`: The run was never executed because it was disabled explicitly by the user.
|
|
7897
|
+
* `BREAKING_CHANGE`: Run failed because of an intentional breaking change in Spark, but it will
|
|
7898
|
+
be retried with a mitigation config.
|
|
7887
7899
|
|
|
7888
7900
|
[Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now"""
|
|
7889
7901
|
|
|
@@ -8437,6 +8449,7 @@ class JobsAPI:
|
|
|
8437
8449
|
tasks: Optional[List[Task]] = None,
|
|
8438
8450
|
timeout_seconds: Optional[int] = None,
|
|
8439
8451
|
trigger: Optional[TriggerSettings] = None,
|
|
8452
|
+
usage_policy_id: Optional[str] = None,
|
|
8440
8453
|
webhook_notifications: Optional[WebhookNotifications] = None,
|
|
8441
8454
|
) -> CreateResponse:
|
|
8442
8455
|
"""Create a new job.
|
|
@@ -8531,6 +8544,10 @@ class JobsAPI:
|
|
|
8531
8544
|
A configuration to trigger a run when certain conditions are met. The default behavior is that the
|
|
8532
8545
|
job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to
|
|
8533
8546
|
`runNow`.
|
|
8547
|
+
:param usage_policy_id: str (optional)
|
|
8548
|
+
The id of the user specified usage policy to use for this job. If not specified, a default usage
|
|
8549
|
+
policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the
|
|
8550
|
+
budget policy used by this workload.
|
|
8534
8551
|
:param webhook_notifications: :class:`WebhookNotifications` (optional)
|
|
8535
8552
|
A collection of system notification IDs to notify when runs of this job begin or complete.
|
|
8536
8553
|
|
|
@@ -8585,6 +8602,8 @@ class JobsAPI:
|
|
|
8585
8602
|
body["timeout_seconds"] = timeout_seconds
|
|
8586
8603
|
if trigger is not None:
|
|
8587
8604
|
body["trigger"] = trigger.as_dict()
|
|
8605
|
+
if usage_policy_id is not None:
|
|
8606
|
+
body["usage_policy_id"] = usage_policy_id
|
|
8588
8607
|
if webhook_notifications is not None:
|
|
8589
8608
|
body["webhook_notifications"] = webhook_notifications.as_dict()
|
|
8590
8609
|
headers = {
|
|
@@ -9381,6 +9400,7 @@ class JobsAPI:
|
|
|
9381
9400
|
run_name: Optional[str] = None,
|
|
9382
9401
|
tasks: Optional[List[SubmitTask]] = None,
|
|
9383
9402
|
timeout_seconds: Optional[int] = None,
|
|
9403
|
+
usage_policy_id: Optional[str] = None,
|
|
9384
9404
|
webhook_notifications: Optional[WebhookNotifications] = None,
|
|
9385
9405
|
) -> Wait[Run]:
|
|
9386
9406
|
"""Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job.
|
|
@@ -9432,6 +9452,9 @@ class JobsAPI:
|
|
|
9432
9452
|
:param tasks: List[:class:`SubmitTask`] (optional)
|
|
9433
9453
|
:param timeout_seconds: int (optional)
|
|
9434
9454
|
An optional timeout applied to each run of this job. A value of `0` means no timeout.
|
|
9455
|
+
:param usage_policy_id: str (optional)
|
|
9456
|
+
The user specified id of the usage policy to use for this one-time run. If not specified, a default
|
|
9457
|
+
usage policy may be applied when creating or modifying the job.
|
|
9435
9458
|
:param webhook_notifications: :class:`WebhookNotifications` (optional)
|
|
9436
9459
|
A collection of system notification IDs to notify when the run begins or completes.
|
|
9437
9460
|
|
|
@@ -9466,6 +9489,8 @@ class JobsAPI:
|
|
|
9466
9489
|
body["tasks"] = [v.as_dict() for v in tasks]
|
|
9467
9490
|
if timeout_seconds is not None:
|
|
9468
9491
|
body["timeout_seconds"] = timeout_seconds
|
|
9492
|
+
if usage_policy_id is not None:
|
|
9493
|
+
body["usage_policy_id"] = usage_policy_id
|
|
9469
9494
|
if webhook_notifications is not None:
|
|
9470
9495
|
body["webhook_notifications"] = webhook_notifications.as_dict()
|
|
9471
9496
|
headers = {
|
|
@@ -9496,6 +9521,7 @@ class JobsAPI:
|
|
|
9496
9521
|
run_name: Optional[str] = None,
|
|
9497
9522
|
tasks: Optional[List[SubmitTask]] = None,
|
|
9498
9523
|
timeout_seconds: Optional[int] = None,
|
|
9524
|
+
usage_policy_id: Optional[str] = None,
|
|
9499
9525
|
webhook_notifications: Optional[WebhookNotifications] = None,
|
|
9500
9526
|
timeout=timedelta(minutes=20),
|
|
9501
9527
|
) -> Run:
|
|
@@ -9513,6 +9539,7 @@ class JobsAPI:
|
|
|
9513
9539
|
run_name=run_name,
|
|
9514
9540
|
tasks=tasks,
|
|
9515
9541
|
timeout_seconds=timeout_seconds,
|
|
9542
|
+
usage_policy_id=usage_policy_id,
|
|
9516
9543
|
webhook_notifications=webhook_notifications,
|
|
9517
9544
|
).result(timeout=timeout)
|
|
9518
9545
|
|
databricks/sdk/service/ml.py
CHANGED
|
@@ -3494,10 +3494,8 @@ class PublishSpec:
|
|
|
3494
3494
|
online_table_name: str
|
|
3495
3495
|
"""The full three-part (catalog, schema, table) name of the online table."""
|
|
3496
3496
|
|
|
3497
|
-
publish_mode:
|
|
3498
|
-
"""The publish mode of the pipeline that syncs the online table with the source table.
|
|
3499
|
-
TRIGGERED if not specified. All publish modes require the source table to have Change Data Feed
|
|
3500
|
-
(CDF) enabled."""
|
|
3497
|
+
publish_mode: PublishSpecPublishMode
|
|
3498
|
+
"""The publish mode of the pipeline that syncs the online table with the source table."""
|
|
3501
3499
|
|
|
3502
3500
|
def as_dict(self) -> dict:
|
|
3503
3501
|
"""Serializes the PublishSpec into a dictionary suitable for use as a JSON request body."""
|
|
@@ -3534,6 +3532,7 @@ class PublishSpec:
|
|
|
3534
3532
|
class PublishSpecPublishMode(Enum):
|
|
3535
3533
|
|
|
3536
3534
|
CONTINUOUS = "CONTINUOUS"
|
|
3535
|
+
SNAPSHOT = "SNAPSHOT"
|
|
3537
3536
|
TRIGGERED = "TRIGGERED"
|
|
3538
3537
|
|
|
3539
3538
|
|
|
@@ -4474,6 +4474,7 @@ class ServingEndpointsAPI:
|
|
|
4474
4474
|
self,
|
|
4475
4475
|
name: str,
|
|
4476
4476
|
*,
|
|
4477
|
+
client_request_id: Optional[str] = None,
|
|
4477
4478
|
dataframe_records: Optional[List[Any]] = None,
|
|
4478
4479
|
dataframe_split: Optional[DataframeSplitInput] = None,
|
|
4479
4480
|
extra_params: Optional[Dict[str, str]] = None,
|
|
@@ -4487,11 +4488,15 @@ class ServingEndpointsAPI:
|
|
|
4487
4488
|
stop: Optional[List[str]] = None,
|
|
4488
4489
|
stream: Optional[bool] = None,
|
|
4489
4490
|
temperature: Optional[float] = None,
|
|
4491
|
+
usage_context: Optional[Dict[str, str]] = None,
|
|
4490
4492
|
) -> QueryEndpointResponse:
|
|
4491
4493
|
"""Query a serving endpoint
|
|
4492
4494
|
|
|
4493
4495
|
:param name: str
|
|
4494
4496
|
The name of the serving endpoint. This field is required and is provided via the path parameter.
|
|
4497
|
+
:param client_request_id: str (optional)
|
|
4498
|
+
Optional user-provided request identifier that will be recorded in the inference table and the usage
|
|
4499
|
+
tracking table.
|
|
4495
4500
|
:param dataframe_records: List[Any] (optional)
|
|
4496
4501
|
Pandas Dataframe input in the records orientation.
|
|
4497
4502
|
:param dataframe_split: :class:`DataframeSplitInput` (optional)
|
|
@@ -4533,10 +4538,14 @@ class ServingEndpointsAPI:
|
|
|
4533
4538
|
The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving
|
|
4534
4539
|
endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with
|
|
4535
4540
|
other chat/completions query fields.
|
|
4541
|
+
:param usage_context: Dict[str,str] (optional)
|
|
4542
|
+
Optional user-provided context that will be recorded in the usage tracking table.
|
|
4536
4543
|
|
|
4537
4544
|
:returns: :class:`QueryEndpointResponse`
|
|
4538
4545
|
"""
|
|
4539
4546
|
body = {}
|
|
4547
|
+
if client_request_id is not None:
|
|
4548
|
+
body["client_request_id"] = client_request_id
|
|
4540
4549
|
if dataframe_records is not None:
|
|
4541
4550
|
body["dataframe_records"] = [v for v in dataframe_records]
|
|
4542
4551
|
if dataframe_split is not None:
|
|
@@ -4563,6 +4572,8 @@ class ServingEndpointsAPI:
|
|
|
4563
4572
|
body["stream"] = stream
|
|
4564
4573
|
if temperature is not None:
|
|
4565
4574
|
body["temperature"] = temperature
|
|
4575
|
+
if usage_context is not None:
|
|
4576
|
+
body["usage_context"] = usage_context
|
|
4566
4577
|
headers = {
|
|
4567
4578
|
"Accept": "application/json",
|
|
4568
4579
|
"Content-Type": "application/json",
|
|
@@ -4777,6 +4788,7 @@ class ServingEndpointsDataPlaneAPI:
|
|
|
4777
4788
|
self,
|
|
4778
4789
|
name: str,
|
|
4779
4790
|
*,
|
|
4791
|
+
client_request_id: Optional[str] = None,
|
|
4780
4792
|
dataframe_records: Optional[List[Any]] = None,
|
|
4781
4793
|
dataframe_split: Optional[DataframeSplitInput] = None,
|
|
4782
4794
|
extra_params: Optional[Dict[str, str]] = None,
|
|
@@ -4790,11 +4802,15 @@ class ServingEndpointsDataPlaneAPI:
|
|
|
4790
4802
|
stop: Optional[List[str]] = None,
|
|
4791
4803
|
stream: Optional[bool] = None,
|
|
4792
4804
|
temperature: Optional[float] = None,
|
|
4805
|
+
usage_context: Optional[Dict[str, str]] = None,
|
|
4793
4806
|
) -> QueryEndpointResponse:
|
|
4794
4807
|
"""Query a serving endpoint
|
|
4795
4808
|
|
|
4796
4809
|
:param name: str
|
|
4797
4810
|
The name of the serving endpoint. This field is required and is provided via the path parameter.
|
|
4811
|
+
:param client_request_id: str (optional)
|
|
4812
|
+
Optional user-provided request identifier that will be recorded in the inference table and the usage
|
|
4813
|
+
tracking table.
|
|
4798
4814
|
:param dataframe_records: List[Any] (optional)
|
|
4799
4815
|
Pandas Dataframe input in the records orientation.
|
|
4800
4816
|
:param dataframe_split: :class:`DataframeSplitInput` (optional)
|
|
@@ -4836,10 +4852,14 @@ class ServingEndpointsDataPlaneAPI:
|
|
|
4836
4852
|
The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving
|
|
4837
4853
|
endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with
|
|
4838
4854
|
other chat/completions query fields.
|
|
4855
|
+
:param usage_context: Dict[str,str] (optional)
|
|
4856
|
+
Optional user-provided context that will be recorded in the usage tracking table.
|
|
4839
4857
|
|
|
4840
4858
|
:returns: :class:`QueryEndpointResponse`
|
|
4841
4859
|
"""
|
|
4842
4860
|
body = {}
|
|
4861
|
+
if client_request_id is not None:
|
|
4862
|
+
body["client_request_id"] = client_request_id
|
|
4843
4863
|
if dataframe_records is not None:
|
|
4844
4864
|
body["dataframe_records"] = [v for v in dataframe_records]
|
|
4845
4865
|
if dataframe_split is not None:
|
|
@@ -4866,6 +4886,8 @@ class ServingEndpointsDataPlaneAPI:
|
|
|
4866
4886
|
body["stream"] = stream
|
|
4867
4887
|
if temperature is not None:
|
|
4868
4888
|
body["temperature"] = temperature
|
|
4889
|
+
if usage_context is not None:
|
|
4890
|
+
body["usage_context"] = usage_context
|
|
4869
4891
|
data_plane_info = self._data_plane_info_query(
|
|
4870
4892
|
name=name,
|
|
4871
4893
|
)
|