databricks-sdk 0.19.1__py3-none-any.whl → 0.21.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +28 -6
- databricks/sdk/_widgets/__init__.py +2 -2
- databricks/sdk/config.py +3 -2
- databricks/sdk/core.py +4 -2
- databricks/sdk/mixins/workspace.py +2 -1
- databricks/sdk/oauth.py +1 -1
- databricks/sdk/runtime/__init__.py +85 -11
- databricks/sdk/runtime/dbutils_stub.py +1 -1
- databricks/sdk/service/_internal.py +1 -1
- databricks/sdk/service/billing.py +64 -1
- databricks/sdk/service/catalog.py +796 -84
- databricks/sdk/service/compute.py +391 -13
- databricks/sdk/service/dashboards.py +15 -0
- databricks/sdk/service/files.py +289 -15
- databricks/sdk/service/iam.py +214 -0
- databricks/sdk/service/jobs.py +242 -143
- databricks/sdk/service/ml.py +407 -0
- databricks/sdk/service/oauth2.py +83 -0
- databricks/sdk/service/pipelines.py +78 -8
- databricks/sdk/service/provisioning.py +108 -36
- databricks/sdk/service/serving.py +101 -35
- databricks/sdk/service/settings.py +1316 -186
- databricks/sdk/service/sharing.py +94 -18
- databricks/sdk/service/sql.py +230 -13
- databricks/sdk/service/vectorsearch.py +105 -60
- databricks/sdk/service/workspace.py +175 -1
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.19.1.dist-info → databricks_sdk-0.21.0.dist-info}/METADATA +3 -1
- databricks_sdk-0.21.0.dist-info/RECORD +53 -0
- databricks/sdk/runtime/stub.py +0 -48
- databricks_sdk-0.19.1.dist-info/RECORD +0 -54
- {databricks_sdk-0.19.1.dist-info → databricks_sdk-0.21.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.19.1.dist-info → databricks_sdk-0.21.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.19.1.dist-info → databricks_sdk-0.21.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.19.1.dist-info → databricks_sdk-0.21.0.dist-info}/top_level.txt +0 -0
|
@@ -5,7 +5,7 @@ from __future__ import annotations
|
|
|
5
5
|
import logging
|
|
6
6
|
from dataclasses import dataclass
|
|
7
7
|
from enum import Enum
|
|
8
|
-
from typing import
|
|
8
|
+
from typing import Dict, Iterator, List, Optional
|
|
9
9
|
|
|
10
10
|
from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum
|
|
11
11
|
|
|
@@ -260,6 +260,20 @@ class ArtifactType(Enum):
|
|
|
260
260
|
LIBRARY_MAVEN = 'LIBRARY_MAVEN'
|
|
261
261
|
|
|
262
262
|
|
|
263
|
+
@dataclass
|
|
264
|
+
class AssignResponse:
|
|
265
|
+
|
|
266
|
+
def as_dict(self) -> dict:
|
|
267
|
+
"""Serializes the AssignResponse into a dictionary suitable for use as a JSON request body."""
|
|
268
|
+
body = {}
|
|
269
|
+
return body
|
|
270
|
+
|
|
271
|
+
@classmethod
|
|
272
|
+
def from_dict(cls, d: Dict[str, any]) -> AssignResponse:
|
|
273
|
+
"""Deserializes the AssignResponse from a dictionary."""
|
|
274
|
+
return cls()
|
|
275
|
+
|
|
276
|
+
|
|
263
277
|
@dataclass
|
|
264
278
|
class AwsIamRole:
|
|
265
279
|
role_arn: str
|
|
@@ -347,6 +361,20 @@ class AzureServicePrincipal:
|
|
|
347
361
|
directory_id=d.get('directory_id', None))
|
|
348
362
|
|
|
349
363
|
|
|
364
|
+
@dataclass
|
|
365
|
+
class CancelRefreshResponse:
|
|
366
|
+
|
|
367
|
+
def as_dict(self) -> dict:
|
|
368
|
+
"""Serializes the CancelRefreshResponse into a dictionary suitable for use as a JSON request body."""
|
|
369
|
+
body = {}
|
|
370
|
+
return body
|
|
371
|
+
|
|
372
|
+
@classmethod
|
|
373
|
+
def from_dict(cls, d: Dict[str, any]) -> CancelRefreshResponse:
|
|
374
|
+
"""Deserializes the CancelRefreshResponse from a dictionary."""
|
|
375
|
+
return cls()
|
|
376
|
+
|
|
377
|
+
|
|
350
378
|
@dataclass
|
|
351
379
|
class CatalogInfo:
|
|
352
380
|
browse_only: Optional[bool] = None
|
|
@@ -794,6 +822,41 @@ class ConnectionType(Enum):
|
|
|
794
822
|
SQLSERVER = 'SQLSERVER'
|
|
795
823
|
|
|
796
824
|
|
|
825
|
+
@dataclass
|
|
826
|
+
class ContinuousUpdateStatus:
|
|
827
|
+
"""Detailed status of an online table. Shown if the online table is in the ONLINE_CONTINUOUS_UPDATE
|
|
828
|
+
or the ONLINE_UPDATING_PIPELINE_RESOURCES state."""
|
|
829
|
+
|
|
830
|
+
initial_pipeline_sync_progress: Optional[PipelineProgress] = None
|
|
831
|
+
"""Progress of the initial data synchronization."""
|
|
832
|
+
|
|
833
|
+
last_processed_commit_version: Optional[int] = None
|
|
834
|
+
"""The last source table Delta version that was synced to the online table. Note that this Delta
|
|
835
|
+
version may not be completely synced to the online table yet."""
|
|
836
|
+
|
|
837
|
+
timestamp: Optional[str] = None
|
|
838
|
+
"""The timestamp of the last time any data was synchronized from the source table to the online
|
|
839
|
+
table."""
|
|
840
|
+
|
|
841
|
+
def as_dict(self) -> dict:
|
|
842
|
+
"""Serializes the ContinuousUpdateStatus into a dictionary suitable for use as a JSON request body."""
|
|
843
|
+
body = {}
|
|
844
|
+
if self.initial_pipeline_sync_progress:
|
|
845
|
+
body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress.as_dict()
|
|
846
|
+
if self.last_processed_commit_version is not None:
|
|
847
|
+
body['last_processed_commit_version'] = self.last_processed_commit_version
|
|
848
|
+
if self.timestamp is not None: body['timestamp'] = self.timestamp
|
|
849
|
+
return body
|
|
850
|
+
|
|
851
|
+
@classmethod
|
|
852
|
+
def from_dict(cls, d: Dict[str, any]) -> ContinuousUpdateStatus:
|
|
853
|
+
"""Deserializes the ContinuousUpdateStatus from a dictionary."""
|
|
854
|
+
return cls(initial_pipeline_sync_progress=_from_dict(d, 'initial_pipeline_sync_progress',
|
|
855
|
+
PipelineProgress),
|
|
856
|
+
last_processed_commit_version=d.get('last_processed_commit_version', None),
|
|
857
|
+
timestamp=d.get('timestamp', None))
|
|
858
|
+
|
|
859
|
+
|
|
797
860
|
@dataclass
|
|
798
861
|
class CreateCatalog:
|
|
799
862
|
name: str
|
|
@@ -1203,7 +1266,7 @@ class CreateMonitor:
|
|
|
1203
1266
|
expression independently, resulting in a separate slice for each predicate and its complements.
|
|
1204
1267
|
For high-cardinality columns, only the top 100 unique values by frequency will generate slices."""
|
|
1205
1268
|
|
|
1206
|
-
snapshot: Optional[
|
|
1269
|
+
snapshot: Optional[MonitorSnapshotProfileType] = None
|
|
1207
1270
|
"""Configuration for monitoring snapshot tables."""
|
|
1208
1271
|
|
|
1209
1272
|
time_series: Optional[MonitorTimeSeriesProfileType] = None
|
|
@@ -1229,7 +1292,7 @@ class CreateMonitor:
|
|
|
1229
1292
|
if self.skip_builtin_dashboard is not None:
|
|
1230
1293
|
body['skip_builtin_dashboard'] = self.skip_builtin_dashboard
|
|
1231
1294
|
if self.slicing_exprs: body['slicing_exprs'] = [v for v in self.slicing_exprs]
|
|
1232
|
-
if self.snapshot: body['snapshot'] = self.snapshot
|
|
1295
|
+
if self.snapshot: body['snapshot'] = self.snapshot.as_dict()
|
|
1233
1296
|
if self.time_series: body['time_series'] = self.time_series.as_dict()
|
|
1234
1297
|
if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
|
|
1235
1298
|
return body
|
|
@@ -1249,7 +1312,7 @@ class CreateMonitor:
|
|
|
1249
1312
|
schedule=_from_dict(d, 'schedule', MonitorCronSchedule),
|
|
1250
1313
|
skip_builtin_dashboard=d.get('skip_builtin_dashboard', None),
|
|
1251
1314
|
slicing_exprs=d.get('slicing_exprs', None),
|
|
1252
|
-
snapshot=d
|
|
1315
|
+
snapshot=_from_dict(d, 'snapshot', MonitorSnapshotProfileType),
|
|
1253
1316
|
time_series=_from_dict(d, 'time_series', MonitorTimeSeriesProfileType),
|
|
1254
1317
|
warehouse_id=d.get('warehouse_id', None))
|
|
1255
1318
|
|
|
@@ -1291,6 +1354,20 @@ class CreateRegisteredModelRequest:
|
|
|
1291
1354
|
storage_location=d.get('storage_location', None))
|
|
1292
1355
|
|
|
1293
1356
|
|
|
1357
|
+
@dataclass
|
|
1358
|
+
class CreateResponse:
|
|
1359
|
+
|
|
1360
|
+
def as_dict(self) -> dict:
|
|
1361
|
+
"""Serializes the CreateResponse into a dictionary suitable for use as a JSON request body."""
|
|
1362
|
+
body = {}
|
|
1363
|
+
return body
|
|
1364
|
+
|
|
1365
|
+
@classmethod
|
|
1366
|
+
def from_dict(cls, d: Dict[str, any]) -> CreateResponse:
|
|
1367
|
+
"""Deserializes the CreateResponse from a dictionary."""
|
|
1368
|
+
return cls()
|
|
1369
|
+
|
|
1370
|
+
|
|
1294
1371
|
@dataclass
|
|
1295
1372
|
class CreateSchema:
|
|
1296
1373
|
name: str
|
|
@@ -1348,7 +1425,7 @@ class CreateStorageCredential:
|
|
|
1348
1425
|
comment: Optional[str] = None
|
|
1349
1426
|
"""Comment associated with the credential."""
|
|
1350
1427
|
|
|
1351
|
-
databricks_gcp_service_account: Optional[
|
|
1428
|
+
databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None
|
|
1352
1429
|
"""The <Databricks> managed GCP service account configuration."""
|
|
1353
1430
|
|
|
1354
1431
|
read_only: Optional[bool] = None
|
|
@@ -1367,7 +1444,7 @@ class CreateStorageCredential:
|
|
|
1367
1444
|
if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict()
|
|
1368
1445
|
if self.comment is not None: body['comment'] = self.comment
|
|
1369
1446
|
if self.databricks_gcp_service_account:
|
|
1370
|
-
body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
|
|
1447
|
+
body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
|
|
1371
1448
|
if self.name is not None: body['name'] = self.name
|
|
1372
1449
|
if self.read_only is not None: body['read_only'] = self.read_only
|
|
1373
1450
|
if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
|
|
@@ -1381,7 +1458,8 @@ class CreateStorageCredential:
|
|
|
1381
1458
|
azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
|
|
1382
1459
|
cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken),
|
|
1383
1460
|
comment=d.get('comment', None),
|
|
1384
|
-
databricks_gcp_service_account=d
|
|
1461
|
+
databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
|
|
1462
|
+
DatabricksGcpServiceAccountRequest),
|
|
1385
1463
|
name=d.get('name', None),
|
|
1386
1464
|
read_only=d.get('read_only', None),
|
|
1387
1465
|
skip_validation=d.get('skip_validation', None))
|
|
@@ -1490,6 +1568,20 @@ class DataSourceFormat(Enum):
|
|
|
1490
1568
|
UNITY_CATALOG = 'UNITY_CATALOG'
|
|
1491
1569
|
|
|
1492
1570
|
|
|
1571
|
+
@dataclass
|
|
1572
|
+
class DatabricksGcpServiceAccountRequest:
|
|
1573
|
+
|
|
1574
|
+
def as_dict(self) -> dict:
|
|
1575
|
+
"""Serializes the DatabricksGcpServiceAccountRequest into a dictionary suitable for use as a JSON request body."""
|
|
1576
|
+
body = {}
|
|
1577
|
+
return body
|
|
1578
|
+
|
|
1579
|
+
@classmethod
|
|
1580
|
+
def from_dict(cls, d: Dict[str, any]) -> DatabricksGcpServiceAccountRequest:
|
|
1581
|
+
"""Deserializes the DatabricksGcpServiceAccountRequest from a dictionary."""
|
|
1582
|
+
return cls()
|
|
1583
|
+
|
|
1584
|
+
|
|
1493
1585
|
@dataclass
|
|
1494
1586
|
class DatabricksGcpServiceAccountResponse:
|
|
1495
1587
|
credential_id: Optional[str] = None
|
|
@@ -1511,6 +1603,34 @@ class DatabricksGcpServiceAccountResponse:
|
|
|
1511
1603
|
return cls(credential_id=d.get('credential_id', None), email=d.get('email', None))
|
|
1512
1604
|
|
|
1513
1605
|
|
|
1606
|
+
@dataclass
|
|
1607
|
+
class DeleteAliasResponse:
|
|
1608
|
+
|
|
1609
|
+
def as_dict(self) -> dict:
|
|
1610
|
+
"""Serializes the DeleteAliasResponse into a dictionary suitable for use as a JSON request body."""
|
|
1611
|
+
body = {}
|
|
1612
|
+
return body
|
|
1613
|
+
|
|
1614
|
+
@classmethod
|
|
1615
|
+
def from_dict(cls, d: Dict[str, any]) -> DeleteAliasResponse:
|
|
1616
|
+
"""Deserializes the DeleteAliasResponse from a dictionary."""
|
|
1617
|
+
return cls()
|
|
1618
|
+
|
|
1619
|
+
|
|
1620
|
+
@dataclass
|
|
1621
|
+
class DeleteResponse:
|
|
1622
|
+
|
|
1623
|
+
def as_dict(self) -> dict:
|
|
1624
|
+
"""Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body."""
|
|
1625
|
+
body = {}
|
|
1626
|
+
return body
|
|
1627
|
+
|
|
1628
|
+
@classmethod
|
|
1629
|
+
def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
|
|
1630
|
+
"""Deserializes the DeleteResponse from a dictionary."""
|
|
1631
|
+
return cls()
|
|
1632
|
+
|
|
1633
|
+
|
|
1514
1634
|
@dataclass
|
|
1515
1635
|
class DeltaRuntimePropertiesKvPairs:
|
|
1516
1636
|
"""Properties pertaining to the current state of the delta table as given by the commit server.
|
|
@@ -1575,6 +1695,20 @@ class DependencyList:
|
|
|
1575
1695
|
return cls(dependencies=_repeated_dict(d, 'dependencies', Dependency))
|
|
1576
1696
|
|
|
1577
1697
|
|
|
1698
|
+
@dataclass
|
|
1699
|
+
class DisableResponse:
|
|
1700
|
+
|
|
1701
|
+
def as_dict(self) -> dict:
|
|
1702
|
+
"""Serializes the DisableResponse into a dictionary suitable for use as a JSON request body."""
|
|
1703
|
+
body = {}
|
|
1704
|
+
return body
|
|
1705
|
+
|
|
1706
|
+
@classmethod
|
|
1707
|
+
def from_dict(cls, d: Dict[str, any]) -> DisableResponse:
|
|
1708
|
+
"""Deserializes the DisableResponse from a dictionary."""
|
|
1709
|
+
return cls()
|
|
1710
|
+
|
|
1711
|
+
|
|
1578
1712
|
class DisableSchemaName(Enum):
|
|
1579
1713
|
|
|
1580
1714
|
ACCESS = 'access'
|
|
@@ -1699,6 +1833,20 @@ class EnablePredictiveOptimization(Enum):
|
|
|
1699
1833
|
INHERIT = 'INHERIT'
|
|
1700
1834
|
|
|
1701
1835
|
|
|
1836
|
+
@dataclass
|
|
1837
|
+
class EnableResponse:
|
|
1838
|
+
|
|
1839
|
+
def as_dict(self) -> dict:
|
|
1840
|
+
"""Serializes the EnableResponse into a dictionary suitable for use as a JSON request body."""
|
|
1841
|
+
body = {}
|
|
1842
|
+
return body
|
|
1843
|
+
|
|
1844
|
+
@classmethod
|
|
1845
|
+
def from_dict(cls, d: Dict[str, any]) -> EnableResponse:
|
|
1846
|
+
"""Deserializes the EnableResponse from a dictionary."""
|
|
1847
|
+
return cls()
|
|
1848
|
+
|
|
1849
|
+
|
|
1702
1850
|
class EnableSchemaName(Enum):
|
|
1703
1851
|
|
|
1704
1852
|
ACCESS = 'access'
|
|
@@ -1808,6 +1956,35 @@ class ExternalLocationInfo:
|
|
|
1808
1956
|
url=d.get('url', None))
|
|
1809
1957
|
|
|
1810
1958
|
|
|
1959
|
+
@dataclass
|
|
1960
|
+
class FailedStatus:
|
|
1961
|
+
"""Detailed status of an online table. Shown if the online table is in the OFFLINE_FAILED or the
|
|
1962
|
+
ONLINE_PIPELINE_FAILED state."""
|
|
1963
|
+
|
|
1964
|
+
last_processed_commit_version: Optional[int] = None
|
|
1965
|
+
"""The last source table Delta version that was synced to the online table. Note that this Delta
|
|
1966
|
+
version may only be partially synced to the online table. Only populated if the table is still
|
|
1967
|
+
online and available for serving."""
|
|
1968
|
+
|
|
1969
|
+
timestamp: Optional[str] = None
|
|
1970
|
+
"""The timestamp of the last time any data was synchronized from the source table to the online
|
|
1971
|
+
table. Only populated if the table is still online and available for serving."""
|
|
1972
|
+
|
|
1973
|
+
def as_dict(self) -> dict:
|
|
1974
|
+
"""Serializes the FailedStatus into a dictionary suitable for use as a JSON request body."""
|
|
1975
|
+
body = {}
|
|
1976
|
+
if self.last_processed_commit_version is not None:
|
|
1977
|
+
body['last_processed_commit_version'] = self.last_processed_commit_version
|
|
1978
|
+
if self.timestamp is not None: body['timestamp'] = self.timestamp
|
|
1979
|
+
return body
|
|
1980
|
+
|
|
1981
|
+
@classmethod
|
|
1982
|
+
def from_dict(cls, d: Dict[str, any]) -> FailedStatus:
|
|
1983
|
+
"""Deserializes the FailedStatus from a dictionary."""
|
|
1984
|
+
return cls(last_processed_commit_version=d.get('last_processed_commit_version', None),
|
|
1985
|
+
timestamp=d.get('timestamp', None))
|
|
1986
|
+
|
|
1987
|
+
|
|
1811
1988
|
@dataclass
|
|
1812
1989
|
class ForeignKeyConstraint:
|
|
1813
1990
|
name: str
|
|
@@ -2548,18 +2725,25 @@ class ListTablesResponse:
|
|
|
2548
2725
|
|
|
2549
2726
|
@dataclass
|
|
2550
2727
|
class ListVolumesResponseContent:
|
|
2728
|
+
next_page_token: Optional[str] = None
|
|
2729
|
+
"""Opaque token to retrieve the next page of results. Absent if there are no more pages.
|
|
2730
|
+
__page_token__ should be set to this value for the next request to retrieve the next page of
|
|
2731
|
+
results."""
|
|
2732
|
+
|
|
2551
2733
|
volumes: Optional[List[VolumeInfo]] = None
|
|
2552
2734
|
|
|
2553
2735
|
def as_dict(self) -> dict:
|
|
2554
2736
|
"""Serializes the ListVolumesResponseContent into a dictionary suitable for use as a JSON request body."""
|
|
2555
2737
|
body = {}
|
|
2738
|
+
if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
|
|
2556
2739
|
if self.volumes: body['volumes'] = [v.as_dict() for v in self.volumes]
|
|
2557
2740
|
return body
|
|
2558
2741
|
|
|
2559
2742
|
@classmethod
|
|
2560
2743
|
def from_dict(cls, d: Dict[str, any]) -> ListVolumesResponseContent:
|
|
2561
2744
|
"""Deserializes the ListVolumesResponseContent from a dictionary."""
|
|
2562
|
-
return cls(
|
|
2745
|
+
return cls(next_page_token=d.get('next_page_token', None),
|
|
2746
|
+
volumes=_repeated_dict(d, 'volumes', VolumeInfo))
|
|
2563
2747
|
|
|
2564
2748
|
|
|
2565
2749
|
class MatchType(Enum):
|
|
@@ -3047,7 +3231,7 @@ class MonitorInfo:
|
|
|
3047
3231
|
expression independently, resulting in a separate slice for each predicate and its complements.
|
|
3048
3232
|
For high-cardinality columns, only the top 100 unique values by frequency will generate slices."""
|
|
3049
3233
|
|
|
3050
|
-
snapshot: Optional[
|
|
3234
|
+
snapshot: Optional[MonitorSnapshotProfileType] = None
|
|
3051
3235
|
"""Configuration for monitoring snapshot tables."""
|
|
3052
3236
|
|
|
3053
3237
|
status: Optional[MonitorInfoStatus] = None
|
|
@@ -3080,7 +3264,7 @@ class MonitorInfo:
|
|
|
3080
3264
|
body['profile_metrics_table_name'] = self.profile_metrics_table_name
|
|
3081
3265
|
if self.schedule: body['schedule'] = self.schedule.as_dict()
|
|
3082
3266
|
if self.slicing_exprs: body['slicing_exprs'] = [v for v in self.slicing_exprs]
|
|
3083
|
-
if self.snapshot: body['snapshot'] = self.snapshot
|
|
3267
|
+
if self.snapshot: body['snapshot'] = self.snapshot.as_dict()
|
|
3084
3268
|
if self.status is not None: body['status'] = self.status.value
|
|
3085
3269
|
if self.table_name is not None: body['table_name'] = self.table_name
|
|
3086
3270
|
if self.time_series: body['time_series'] = self.time_series.as_dict()
|
|
@@ -3104,7 +3288,7 @@ class MonitorInfo:
|
|
|
3104
3288
|
profile_metrics_table_name=d.get('profile_metrics_table_name', None),
|
|
3105
3289
|
schedule=_from_dict(d, 'schedule', MonitorCronSchedule),
|
|
3106
3290
|
slicing_exprs=d.get('slicing_exprs', None),
|
|
3107
|
-
snapshot=d
|
|
3291
|
+
snapshot=_from_dict(d, 'snapshot', MonitorSnapshotProfileType),
|
|
3108
3292
|
status=_enum(d, 'status', MonitorInfoStatus),
|
|
3109
3293
|
table_name=d.get('table_name', None),
|
|
3110
3294
|
time_series=_from_dict(d, 'time_series', MonitorTimeSeriesProfileType))
|
|
@@ -3184,6 +3368,20 @@ class MonitorRefreshInfoState(Enum):
|
|
|
3184
3368
|
SUCCESS = 'SUCCESS'
|
|
3185
3369
|
|
|
3186
3370
|
|
|
3371
|
+
@dataclass
|
|
3372
|
+
class MonitorSnapshotProfileType:
|
|
3373
|
+
|
|
3374
|
+
def as_dict(self) -> dict:
|
|
3375
|
+
"""Serializes the MonitorSnapshotProfileType into a dictionary suitable for use as a JSON request body."""
|
|
3376
|
+
body = {}
|
|
3377
|
+
return body
|
|
3378
|
+
|
|
3379
|
+
@classmethod
|
|
3380
|
+
def from_dict(cls, d: Dict[str, any]) -> MonitorSnapshotProfileType:
|
|
3381
|
+
"""Deserializes the MonitorSnapshotProfileType from a dictionary."""
|
|
3382
|
+
return cls()
|
|
3383
|
+
|
|
3384
|
+
|
|
3187
3385
|
@dataclass
|
|
3188
3386
|
class MonitorTimeSeriesProfileType:
|
|
3189
3387
|
granularities: Optional[List[str]] = None
|
|
@@ -3223,6 +3421,187 @@ class NamedTableConstraint:
|
|
|
3223
3421
|
return cls(name=d.get('name', None))
|
|
3224
3422
|
|
|
3225
3423
|
|
|
3424
|
+
@dataclass
|
|
3425
|
+
class OnlineTable:
|
|
3426
|
+
"""Online Table information."""
|
|
3427
|
+
|
|
3428
|
+
name: Optional[str] = None
|
|
3429
|
+
"""Full three-part (catalog, schema, table) name of the table."""
|
|
3430
|
+
|
|
3431
|
+
spec: Optional[OnlineTableSpec] = None
|
|
3432
|
+
"""Specification of the online table."""
|
|
3433
|
+
|
|
3434
|
+
status: Optional[OnlineTableStatus] = None
|
|
3435
|
+
"""Online Table status"""
|
|
3436
|
+
|
|
3437
|
+
def as_dict(self) -> dict:
|
|
3438
|
+
"""Serializes the OnlineTable into a dictionary suitable for use as a JSON request body."""
|
|
3439
|
+
body = {}
|
|
3440
|
+
if self.name is not None: body['name'] = self.name
|
|
3441
|
+
if self.spec: body['spec'] = self.spec.as_dict()
|
|
3442
|
+
if self.status: body['status'] = self.status.as_dict()
|
|
3443
|
+
return body
|
|
3444
|
+
|
|
3445
|
+
@classmethod
|
|
3446
|
+
def from_dict(cls, d: Dict[str, any]) -> OnlineTable:
|
|
3447
|
+
"""Deserializes the OnlineTable from a dictionary."""
|
|
3448
|
+
return cls(name=d.get('name', None),
|
|
3449
|
+
spec=_from_dict(d, 'spec', OnlineTableSpec),
|
|
3450
|
+
status=_from_dict(d, 'status', OnlineTableStatus))
|
|
3451
|
+
|
|
3452
|
+
|
|
3453
|
+
@dataclass
|
|
3454
|
+
class OnlineTableSpec:
|
|
3455
|
+
"""Specification of an online table."""
|
|
3456
|
+
|
|
3457
|
+
perform_full_copy: Optional[bool] = None
|
|
3458
|
+
"""Whether to create a full-copy pipeline -- a pipeline that stops after creates a full copy of the
|
|
3459
|
+
source table upon initialization and does not process any change data feeds (CDFs) afterwards.
|
|
3460
|
+
The pipeline can still be manually triggered afterwards, but it always perform a full copy of
|
|
3461
|
+
the source table and there are no incremental updates. This mode is useful for syncing views or
|
|
3462
|
+
tables without CDFs to online tables. Note that the full-copy pipeline only supports "triggered"
|
|
3463
|
+
scheduling policy."""
|
|
3464
|
+
|
|
3465
|
+
pipeline_id: Optional[str] = None
|
|
3466
|
+
"""ID of the associated pipeline. Generated by the server - cannot be set by the caller."""
|
|
3467
|
+
|
|
3468
|
+
primary_key_columns: Optional[List[str]] = None
|
|
3469
|
+
"""Primary Key columns to be used for data insert/update in the destination."""
|
|
3470
|
+
|
|
3471
|
+
run_continuously: Optional[OnlineTableSpecContinuousSchedulingPolicy] = None
|
|
3472
|
+
"""Pipeline runs continuously after generating the initial data."""
|
|
3473
|
+
|
|
3474
|
+
run_triggered: Optional[OnlineTableSpecTriggeredSchedulingPolicy] = None
|
|
3475
|
+
"""Pipeline stops after generating the initial data and can be triggered later (manually, through a
|
|
3476
|
+
cron job or through data triggers)"""
|
|
3477
|
+
|
|
3478
|
+
source_table_full_name: Optional[str] = None
|
|
3479
|
+
"""Three-part (catalog, schema, table) name of the source Delta table."""
|
|
3480
|
+
|
|
3481
|
+
timeseries_key: Optional[str] = None
|
|
3482
|
+
"""Time series key to deduplicate (tie-break) rows with the same primary key."""
|
|
3483
|
+
|
|
3484
|
+
def as_dict(self) -> dict:
|
|
3485
|
+
"""Serializes the OnlineTableSpec into a dictionary suitable for use as a JSON request body."""
|
|
3486
|
+
body = {}
|
|
3487
|
+
if self.perform_full_copy is not None: body['perform_full_copy'] = self.perform_full_copy
|
|
3488
|
+
if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
|
|
3489
|
+
if self.primary_key_columns: body['primary_key_columns'] = [v for v in self.primary_key_columns]
|
|
3490
|
+
if self.run_continuously: body['run_continuously'] = self.run_continuously.as_dict()
|
|
3491
|
+
if self.run_triggered: body['run_triggered'] = self.run_triggered.as_dict()
|
|
3492
|
+
if self.source_table_full_name is not None:
|
|
3493
|
+
body['source_table_full_name'] = self.source_table_full_name
|
|
3494
|
+
if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key
|
|
3495
|
+
return body
|
|
3496
|
+
|
|
3497
|
+
@classmethod
|
|
3498
|
+
def from_dict(cls, d: Dict[str, any]) -> OnlineTableSpec:
|
|
3499
|
+
"""Deserializes the OnlineTableSpec from a dictionary."""
|
|
3500
|
+
return cls(perform_full_copy=d.get('perform_full_copy', None),
|
|
3501
|
+
pipeline_id=d.get('pipeline_id', None),
|
|
3502
|
+
primary_key_columns=d.get('primary_key_columns', None),
|
|
3503
|
+
run_continuously=_from_dict(d, 'run_continuously',
|
|
3504
|
+
OnlineTableSpecContinuousSchedulingPolicy),
|
|
3505
|
+
run_triggered=_from_dict(d, 'run_triggered', OnlineTableSpecTriggeredSchedulingPolicy),
|
|
3506
|
+
source_table_full_name=d.get('source_table_full_name', None),
|
|
3507
|
+
timeseries_key=d.get('timeseries_key', None))
|
|
3508
|
+
|
|
3509
|
+
|
|
3510
|
+
@dataclass
|
|
3511
|
+
class OnlineTableSpecContinuousSchedulingPolicy:
|
|
3512
|
+
|
|
3513
|
+
def as_dict(self) -> dict:
|
|
3514
|
+
"""Serializes the OnlineTableSpecContinuousSchedulingPolicy into a dictionary suitable for use as a JSON request body."""
|
|
3515
|
+
body = {}
|
|
3516
|
+
return body
|
|
3517
|
+
|
|
3518
|
+
@classmethod
|
|
3519
|
+
def from_dict(cls, d: Dict[str, any]) -> OnlineTableSpecContinuousSchedulingPolicy:
|
|
3520
|
+
"""Deserializes the OnlineTableSpecContinuousSchedulingPolicy from a dictionary."""
|
|
3521
|
+
return cls()
|
|
3522
|
+
|
|
3523
|
+
|
|
3524
|
+
@dataclass
|
|
3525
|
+
class OnlineTableSpecTriggeredSchedulingPolicy:
|
|
3526
|
+
|
|
3527
|
+
def as_dict(self) -> dict:
|
|
3528
|
+
"""Serializes the OnlineTableSpecTriggeredSchedulingPolicy into a dictionary suitable for use as a JSON request body."""
|
|
3529
|
+
body = {}
|
|
3530
|
+
return body
|
|
3531
|
+
|
|
3532
|
+
@classmethod
|
|
3533
|
+
def from_dict(cls, d: Dict[str, any]) -> OnlineTableSpecTriggeredSchedulingPolicy:
|
|
3534
|
+
"""Deserializes the OnlineTableSpecTriggeredSchedulingPolicy from a dictionary."""
|
|
3535
|
+
return cls()
|
|
3536
|
+
|
|
3537
|
+
|
|
3538
|
+
class OnlineTableState(Enum):
|
|
3539
|
+
"""The state of an online table."""
|
|
3540
|
+
|
|
3541
|
+
OFFLINE = 'OFFLINE'
|
|
3542
|
+
OFFLINE_FAILED = 'OFFLINE_FAILED'
|
|
3543
|
+
ONLINE = 'ONLINE'
|
|
3544
|
+
ONLINE_CONTINUOUS_UPDATE = 'ONLINE_CONTINUOUS_UPDATE'
|
|
3545
|
+
ONLINE_NO_PENDING_UPDATE = 'ONLINE_NO_PENDING_UPDATE'
|
|
3546
|
+
ONLINE_PIPELINE_FAILED = 'ONLINE_PIPELINE_FAILED'
|
|
3547
|
+
ONLINE_TABLE_STATE_UNSPECIFIED = 'ONLINE_TABLE_STATE_UNSPECIFIED'
|
|
3548
|
+
ONLINE_TRIGGERED_UPDATE = 'ONLINE_TRIGGERED_UPDATE'
|
|
3549
|
+
ONLINE_UPDATING_PIPELINE_RESOURCES = 'ONLINE_UPDATING_PIPELINE_RESOURCES'
|
|
3550
|
+
PROVISIONING = 'PROVISIONING'
|
|
3551
|
+
PROVISIONING_INITIAL_SNAPSHOT = 'PROVISIONING_INITIAL_SNAPSHOT'
|
|
3552
|
+
PROVISIONING_PIPELINE_RESOURCES = 'PROVISIONING_PIPELINE_RESOURCES'
|
|
3553
|
+
|
|
3554
|
+
|
|
3555
|
+
@dataclass
|
|
3556
|
+
class OnlineTableStatus:
|
|
3557
|
+
"""Status of an online table."""
|
|
3558
|
+
|
|
3559
|
+
continuous_update_status: Optional[ContinuousUpdateStatus] = None
|
|
3560
|
+
"""Detailed status of an online table. Shown if the online table is in the ONLINE_CONTINUOUS_UPDATE
|
|
3561
|
+
or the ONLINE_UPDATING_PIPELINE_RESOURCES state."""
|
|
3562
|
+
|
|
3563
|
+
detailed_state: Optional[OnlineTableState] = None
|
|
3564
|
+
"""The state of the online table."""
|
|
3565
|
+
|
|
3566
|
+
failed_status: Optional[FailedStatus] = None
|
|
3567
|
+
"""Detailed status of an online table. Shown if the online table is in the OFFLINE_FAILED or the
|
|
3568
|
+
ONLINE_PIPELINE_FAILED state."""
|
|
3569
|
+
|
|
3570
|
+
message: Optional[str] = None
|
|
3571
|
+
"""A text description of the current state of the online table."""
|
|
3572
|
+
|
|
3573
|
+
provisioning_status: Optional[ProvisioningStatus] = None
|
|
3574
|
+
"""Detailed status of an online table. Shown if the online table is in the
|
|
3575
|
+
PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state."""
|
|
3576
|
+
|
|
3577
|
+
triggered_update_status: Optional[TriggeredUpdateStatus] = None
|
|
3578
|
+
"""Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE
|
|
3579
|
+
or the ONLINE_NO_PENDING_UPDATE state."""
|
|
3580
|
+
|
|
3581
|
+
def as_dict(self) -> dict:
|
|
3582
|
+
"""Serializes the OnlineTableStatus into a dictionary suitable for use as a JSON request body."""
|
|
3583
|
+
body = {}
|
|
3584
|
+
if self.continuous_update_status:
|
|
3585
|
+
body['continuous_update_status'] = self.continuous_update_status.as_dict()
|
|
3586
|
+
if self.detailed_state is not None: body['detailed_state'] = self.detailed_state.value
|
|
3587
|
+
if self.failed_status: body['failed_status'] = self.failed_status.as_dict()
|
|
3588
|
+
if self.message is not None: body['message'] = self.message
|
|
3589
|
+
if self.provisioning_status: body['provisioning_status'] = self.provisioning_status.as_dict()
|
|
3590
|
+
if self.triggered_update_status:
|
|
3591
|
+
body['triggered_update_status'] = self.triggered_update_status.as_dict()
|
|
3592
|
+
return body
|
|
3593
|
+
|
|
3594
|
+
@classmethod
|
|
3595
|
+
def from_dict(cls, d: Dict[str, any]) -> OnlineTableStatus:
|
|
3596
|
+
"""Deserializes the OnlineTableStatus from a dictionary."""
|
|
3597
|
+
return cls(continuous_update_status=_from_dict(d, 'continuous_update_status', ContinuousUpdateStatus),
|
|
3598
|
+
detailed_state=_enum(d, 'detailed_state', OnlineTableState),
|
|
3599
|
+
failed_status=_from_dict(d, 'failed_status', FailedStatus),
|
|
3600
|
+
message=d.get('message', None),
|
|
3601
|
+
provisioning_status=_from_dict(d, 'provisioning_status', ProvisioningStatus),
|
|
3602
|
+
triggered_update_status=_from_dict(d, 'triggered_update_status', TriggeredUpdateStatus))
|
|
3603
|
+
|
|
3604
|
+
|
|
3226
3605
|
@dataclass
|
|
3227
3606
|
class PermissionsChange:
|
|
3228
3607
|
add: Optional[List[Privilege]] = None
|
|
@@ -3268,6 +3647,49 @@ class PermissionsList:
|
|
|
3268
3647
|
return cls(privilege_assignments=_repeated_dict(d, 'privilege_assignments', PrivilegeAssignment))
|
|
3269
3648
|
|
|
3270
3649
|
|
|
3650
|
+
@dataclass
|
|
3651
|
+
class PipelineProgress:
|
|
3652
|
+
"""Progress information of the Online Table data synchronization pipeline."""
|
|
3653
|
+
|
|
3654
|
+
estimated_completion_time_seconds: Optional[float] = None
|
|
3655
|
+
"""The estimated time remaining to complete this update in seconds."""
|
|
3656
|
+
|
|
3657
|
+
latest_version_currently_processing: Optional[int] = None
|
|
3658
|
+
"""The source table Delta version that was last processed by the pipeline. The pipeline may not
|
|
3659
|
+
have completely processed this version yet."""
|
|
3660
|
+
|
|
3661
|
+
sync_progress_completion: Optional[float] = None
|
|
3662
|
+
"""The completion ratio of this update. This is a number between 0 and 1."""
|
|
3663
|
+
|
|
3664
|
+
synced_row_count: Optional[int] = None
|
|
3665
|
+
"""The number of rows that have been synced in this update."""
|
|
3666
|
+
|
|
3667
|
+
total_row_count: Optional[int] = None
|
|
3668
|
+
"""The total number of rows that need to be synced in this update. This number may be an estimate."""
|
|
3669
|
+
|
|
3670
|
+
def as_dict(self) -> dict:
|
|
3671
|
+
"""Serializes the PipelineProgress into a dictionary suitable for use as a JSON request body."""
|
|
3672
|
+
body = {}
|
|
3673
|
+
if self.estimated_completion_time_seconds is not None:
|
|
3674
|
+
body['estimated_completion_time_seconds'] = self.estimated_completion_time_seconds
|
|
3675
|
+
if self.latest_version_currently_processing is not None:
|
|
3676
|
+
body['latest_version_currently_processing'] = self.latest_version_currently_processing
|
|
3677
|
+
if self.sync_progress_completion is not None:
|
|
3678
|
+
body['sync_progress_completion'] = self.sync_progress_completion
|
|
3679
|
+
if self.synced_row_count is not None: body['synced_row_count'] = self.synced_row_count
|
|
3680
|
+
if self.total_row_count is not None: body['total_row_count'] = self.total_row_count
|
|
3681
|
+
return body
|
|
3682
|
+
|
|
3683
|
+
@classmethod
|
|
3684
|
+
def from_dict(cls, d: Dict[str, any]) -> PipelineProgress:
|
|
3685
|
+
"""Deserializes the PipelineProgress from a dictionary."""
|
|
3686
|
+
return cls(estimated_completion_time_seconds=d.get('estimated_completion_time_seconds', None),
|
|
3687
|
+
latest_version_currently_processing=d.get('latest_version_currently_processing', None),
|
|
3688
|
+
sync_progress_completion=d.get('sync_progress_completion', None),
|
|
3689
|
+
synced_row_count=d.get('synced_row_count', None),
|
|
3690
|
+
total_row_count=d.get('total_row_count', None))
|
|
3691
|
+
|
|
3692
|
+
|
|
3271
3693
|
@dataclass
|
|
3272
3694
|
class PrimaryKeyConstraint:
|
|
3273
3695
|
name: str
|
|
@@ -3385,6 +3807,29 @@ class ProvisioningInfoState(Enum):
|
|
|
3385
3807
|
STATE_UNSPECIFIED = 'STATE_UNSPECIFIED'
|
|
3386
3808
|
|
|
3387
3809
|
|
|
3810
|
+
@dataclass
|
|
3811
|
+
class ProvisioningStatus:
|
|
3812
|
+
"""Detailed status of an online table. Shown if the online table is in the
|
|
3813
|
+
PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state."""
|
|
3814
|
+
|
|
3815
|
+
initial_pipeline_sync_progress: Optional[PipelineProgress] = None
|
|
3816
|
+
"""Details about initial data synchronization. Only populated when in the
|
|
3817
|
+
PROVISIONING_INITIAL_SNAPSHOT state."""
|
|
3818
|
+
|
|
3819
|
+
def as_dict(self) -> dict:
|
|
3820
|
+
"""Serializes the ProvisioningStatus into a dictionary suitable for use as a JSON request body."""
|
|
3821
|
+
body = {}
|
|
3822
|
+
if self.initial_pipeline_sync_progress:
|
|
3823
|
+
body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress.as_dict()
|
|
3824
|
+
return body
|
|
3825
|
+
|
|
3826
|
+
@classmethod
|
|
3827
|
+
def from_dict(cls, d: Dict[str, any]) -> ProvisioningStatus:
|
|
3828
|
+
"""Deserializes the ProvisioningStatus from a dictionary."""
|
|
3829
|
+
return cls(
|
|
3830
|
+
initial_pipeline_sync_progress=_from_dict(d, 'initial_pipeline_sync_progress', PipelineProgress))
|
|
3831
|
+
|
|
3832
|
+
|
|
3388
3833
|
@dataclass
|
|
3389
3834
|
class RegisteredModelAlias:
|
|
3390
3835
|
"""Registered model alias."""
|
|
@@ -4109,6 +4554,68 @@ class TableType(Enum):
|
|
|
4109
4554
|
VIEW = 'VIEW'
|
|
4110
4555
|
|
|
4111
4556
|
|
|
4557
|
+
@dataclass
|
|
4558
|
+
class TriggeredUpdateStatus:
|
|
4559
|
+
"""Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE
|
|
4560
|
+
or the ONLINE_NO_PENDING_UPDATE state."""
|
|
4561
|
+
|
|
4562
|
+
last_processed_commit_version: Optional[int] = None
|
|
4563
|
+
"""The last source table Delta version that was synced to the online table. Note that this Delta
|
|
4564
|
+
version may not be completely synced to the online table yet."""
|
|
4565
|
+
|
|
4566
|
+
timestamp: Optional[str] = None
|
|
4567
|
+
"""The timestamp of the last time any data was synchronized from the source table to the online
|
|
4568
|
+
table."""
|
|
4569
|
+
|
|
4570
|
+
triggered_update_progress: Optional[PipelineProgress] = None
|
|
4571
|
+
"""Progress of the active data synchronization pipeline."""
|
|
4572
|
+
|
|
4573
|
+
def as_dict(self) -> dict:
|
|
4574
|
+
"""Serializes the TriggeredUpdateStatus into a dictionary suitable for use as a JSON request body."""
|
|
4575
|
+
body = {}
|
|
4576
|
+
if self.last_processed_commit_version is not None:
|
|
4577
|
+
body['last_processed_commit_version'] = self.last_processed_commit_version
|
|
4578
|
+
if self.timestamp is not None: body['timestamp'] = self.timestamp
|
|
4579
|
+
if self.triggered_update_progress:
|
|
4580
|
+
body['triggered_update_progress'] = self.triggered_update_progress.as_dict()
|
|
4581
|
+
return body
|
|
4582
|
+
|
|
4583
|
+
@classmethod
|
|
4584
|
+
def from_dict(cls, d: Dict[str, any]) -> TriggeredUpdateStatus:
|
|
4585
|
+
"""Deserializes the TriggeredUpdateStatus from a dictionary."""
|
|
4586
|
+
return cls(last_processed_commit_version=d.get('last_processed_commit_version', None),
|
|
4587
|
+
timestamp=d.get('timestamp', None),
|
|
4588
|
+
triggered_update_progress=_from_dict(d, 'triggered_update_progress', PipelineProgress))
|
|
4589
|
+
|
|
4590
|
+
|
|
4591
|
+
@dataclass
|
|
4592
|
+
class UnassignResponse:
|
|
4593
|
+
|
|
4594
|
+
def as_dict(self) -> dict:
|
|
4595
|
+
"""Serializes the UnassignResponse into a dictionary suitable for use as a JSON request body."""
|
|
4596
|
+
body = {}
|
|
4597
|
+
return body
|
|
4598
|
+
|
|
4599
|
+
@classmethod
|
|
4600
|
+
def from_dict(cls, d: Dict[str, any]) -> UnassignResponse:
|
|
4601
|
+
"""Deserializes the UnassignResponse from a dictionary."""
|
|
4602
|
+
return cls()
|
|
4603
|
+
|
|
4604
|
+
|
|
4605
|
+
@dataclass
|
|
4606
|
+
class UpdateAssignmentResponse:
|
|
4607
|
+
|
|
4608
|
+
def as_dict(self) -> dict:
|
|
4609
|
+
"""Serializes the UpdateAssignmentResponse into a dictionary suitable for use as a JSON request body."""
|
|
4610
|
+
body = {}
|
|
4611
|
+
return body
|
|
4612
|
+
|
|
4613
|
+
@classmethod
|
|
4614
|
+
def from_dict(cls, d: Dict[str, any]) -> UpdateAssignmentResponse:
|
|
4615
|
+
"""Deserializes the UpdateAssignmentResponse from a dictionary."""
|
|
4616
|
+
return cls()
|
|
4617
|
+
|
|
4618
|
+
|
|
4112
4619
|
@dataclass
|
|
4113
4620
|
class UpdateCatalog:
|
|
4114
4621
|
comment: Optional[str] = None
|
|
@@ -4163,7 +4670,7 @@ class UpdateConnection:
|
|
|
4163
4670
|
options: Dict[str, str]
|
|
4164
4671
|
"""A map of key-value properties attached to the securable."""
|
|
4165
4672
|
|
|
4166
|
-
|
|
4673
|
+
name: Optional[str] = None
|
|
4167
4674
|
"""Name of the connection."""
|
|
4168
4675
|
|
|
4169
4676
|
new_name: Optional[str] = None
|
|
@@ -4175,7 +4682,7 @@ class UpdateConnection:
|
|
|
4175
4682
|
def as_dict(self) -> dict:
|
|
4176
4683
|
"""Serializes the UpdateConnection into a dictionary suitable for use as a JSON request body."""
|
|
4177
4684
|
body = {}
|
|
4178
|
-
if self.
|
|
4685
|
+
if self.name is not None: body['name'] = self.name
|
|
4179
4686
|
if self.new_name is not None: body['new_name'] = self.new_name
|
|
4180
4687
|
if self.options: body['options'] = self.options
|
|
4181
4688
|
if self.owner is not None: body['owner'] = self.owner
|
|
@@ -4184,7 +4691,7 @@ class UpdateConnection:
|
|
|
4184
4691
|
@classmethod
|
|
4185
4692
|
def from_dict(cls, d: Dict[str, any]) -> UpdateConnection:
|
|
4186
4693
|
"""Deserializes the UpdateConnection from a dictionary."""
|
|
4187
|
-
return cls(
|
|
4694
|
+
return cls(name=d.get('name', None),
|
|
4188
4695
|
new_name=d.get('new_name', None),
|
|
4189
4696
|
options=d.get('options', None),
|
|
4190
4697
|
owner=d.get('owner', None))
|
|
@@ -4401,9 +4908,6 @@ class UpdateModelVersionRequest:
|
|
|
4401
4908
|
|
|
4402
4909
|
@dataclass
|
|
4403
4910
|
class UpdateMonitor:
|
|
4404
|
-
assets_dir: str
|
|
4405
|
-
"""The directory to store monitoring assets (e.g. dashboard, metric tables)."""
|
|
4406
|
-
|
|
4407
4911
|
output_schema_name: str
|
|
4408
4912
|
"""Schema where output metric tables are created."""
|
|
4409
4913
|
|
|
@@ -4436,7 +4940,7 @@ class UpdateMonitor:
|
|
|
4436
4940
|
expression independently, resulting in a separate slice for each predicate and its complements.
|
|
4437
4941
|
For high-cardinality columns, only the top 100 unique values by frequency will generate slices."""
|
|
4438
4942
|
|
|
4439
|
-
snapshot: Optional[
|
|
4943
|
+
snapshot: Optional[MonitorSnapshotProfileType] = None
|
|
4440
4944
|
"""Configuration for monitoring snapshot tables."""
|
|
4441
4945
|
|
|
4442
4946
|
time_series: Optional[MonitorTimeSeriesProfileType] = None
|
|
@@ -4445,7 +4949,6 @@ class UpdateMonitor:
|
|
|
4445
4949
|
def as_dict(self) -> dict:
|
|
4446
4950
|
"""Serializes the UpdateMonitor into a dictionary suitable for use as a JSON request body."""
|
|
4447
4951
|
body = {}
|
|
4448
|
-
if self.assets_dir is not None: body['assets_dir'] = self.assets_dir
|
|
4449
4952
|
if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name
|
|
4450
4953
|
if self.custom_metrics: body['custom_metrics'] = [v.as_dict() for v in self.custom_metrics]
|
|
4451
4954
|
if self.data_classification_config:
|
|
@@ -4456,15 +4959,14 @@ class UpdateMonitor:
|
|
|
4456
4959
|
if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
|
|
4457
4960
|
if self.schedule: body['schedule'] = self.schedule.as_dict()
|
|
4458
4961
|
if self.slicing_exprs: body['slicing_exprs'] = [v for v in self.slicing_exprs]
|
|
4459
|
-
if self.snapshot: body['snapshot'] = self.snapshot
|
|
4962
|
+
if self.snapshot: body['snapshot'] = self.snapshot.as_dict()
|
|
4460
4963
|
if self.time_series: body['time_series'] = self.time_series.as_dict()
|
|
4461
4964
|
return body
|
|
4462
4965
|
|
|
4463
4966
|
@classmethod
|
|
4464
4967
|
def from_dict(cls, d: Dict[str, any]) -> UpdateMonitor:
|
|
4465
4968
|
"""Deserializes the UpdateMonitor from a dictionary."""
|
|
4466
|
-
return cls(
|
|
4467
|
-
baseline_table_name=d.get('baseline_table_name', None),
|
|
4969
|
+
return cls(baseline_table_name=d.get('baseline_table_name', None),
|
|
4468
4970
|
custom_metrics=_repeated_dict(d, 'custom_metrics', MonitorCustomMetric),
|
|
4469
4971
|
data_classification_config=_from_dict(d, 'data_classification_config',
|
|
4470
4972
|
MonitorDataClassificationConfig),
|
|
@@ -4474,7 +4976,7 @@ class UpdateMonitor:
|
|
|
4474
4976
|
output_schema_name=d.get('output_schema_name', None),
|
|
4475
4977
|
schedule=_from_dict(d, 'schedule', MonitorCronSchedule),
|
|
4476
4978
|
slicing_exprs=d.get('slicing_exprs', None),
|
|
4477
|
-
snapshot=d
|
|
4979
|
+
snapshot=_from_dict(d, 'snapshot', MonitorSnapshotProfileType),
|
|
4478
4980
|
time_series=_from_dict(d, 'time_series', MonitorTimeSeriesProfileType))
|
|
4479
4981
|
|
|
4480
4982
|
|
|
@@ -4537,6 +5039,20 @@ class UpdateRegisteredModelRequest:
|
|
|
4537
5039
|
owner=d.get('owner', None))
|
|
4538
5040
|
|
|
4539
5041
|
|
|
5042
|
+
@dataclass
|
|
5043
|
+
class UpdateResponse:
|
|
5044
|
+
|
|
5045
|
+
def as_dict(self) -> dict:
|
|
5046
|
+
"""Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body."""
|
|
5047
|
+
body = {}
|
|
5048
|
+
return body
|
|
5049
|
+
|
|
5050
|
+
@classmethod
|
|
5051
|
+
def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
|
|
5052
|
+
"""Deserializes the UpdateResponse from a dictionary."""
|
|
5053
|
+
return cls()
|
|
5054
|
+
|
|
5055
|
+
|
|
4540
5056
|
@dataclass
|
|
4541
5057
|
class UpdateSchema:
|
|
4542
5058
|
comment: Optional[str] = None
|
|
@@ -4598,7 +5114,7 @@ class UpdateStorageCredential:
|
|
|
4598
5114
|
comment: Optional[str] = None
|
|
4599
5115
|
"""Comment associated with the credential."""
|
|
4600
5116
|
|
|
4601
|
-
databricks_gcp_service_account: Optional[
|
|
5117
|
+
databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None
|
|
4602
5118
|
"""The <Databricks> managed GCP service account configuration."""
|
|
4603
5119
|
|
|
4604
5120
|
force: Optional[bool] = None
|
|
@@ -4629,7 +5145,7 @@ class UpdateStorageCredential:
|
|
|
4629
5145
|
if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict()
|
|
4630
5146
|
if self.comment is not None: body['comment'] = self.comment
|
|
4631
5147
|
if self.databricks_gcp_service_account:
|
|
4632
|
-
body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
|
|
5148
|
+
body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
|
|
4633
5149
|
if self.force is not None: body['force'] = self.force
|
|
4634
5150
|
if self.name is not None: body['name'] = self.name
|
|
4635
5151
|
if self.new_name is not None: body['new_name'] = self.new_name
|
|
@@ -4646,7 +5162,8 @@ class UpdateStorageCredential:
|
|
|
4646
5162
|
azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
|
|
4647
5163
|
cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken),
|
|
4648
5164
|
comment=d.get('comment', None),
|
|
4649
|
-
databricks_gcp_service_account=d
|
|
5165
|
+
databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
|
|
5166
|
+
DatabricksGcpServiceAccountRequest),
|
|
4650
5167
|
force=d.get('force', None),
|
|
4651
5168
|
name=d.get('name', None),
|
|
4652
5169
|
new_name=d.get('new_name', None),
|
|
@@ -4660,7 +5177,7 @@ class UpdateVolumeRequestContent:
|
|
|
4660
5177
|
comment: Optional[str] = None
|
|
4661
5178
|
"""The comment attached to the volume"""
|
|
4662
5179
|
|
|
4663
|
-
|
|
5180
|
+
name: Optional[str] = None
|
|
4664
5181
|
"""The three-level (fully qualified) name of the volume"""
|
|
4665
5182
|
|
|
4666
5183
|
new_name: Optional[str] = None
|
|
@@ -4673,7 +5190,7 @@ class UpdateVolumeRequestContent:
|
|
|
4673
5190
|
"""Serializes the UpdateVolumeRequestContent into a dictionary suitable for use as a JSON request body."""
|
|
4674
5191
|
body = {}
|
|
4675
5192
|
if self.comment is not None: body['comment'] = self.comment
|
|
4676
|
-
if self.
|
|
5193
|
+
if self.name is not None: body['name'] = self.name
|
|
4677
5194
|
if self.new_name is not None: body['new_name'] = self.new_name
|
|
4678
5195
|
if self.owner is not None: body['owner'] = self.owner
|
|
4679
5196
|
return body
|
|
@@ -4682,7 +5199,7 @@ class UpdateVolumeRequestContent:
|
|
|
4682
5199
|
def from_dict(cls, d: Dict[str, any]) -> UpdateVolumeRequestContent:
|
|
4683
5200
|
"""Deserializes the UpdateVolumeRequestContent from a dictionary."""
|
|
4684
5201
|
return cls(comment=d.get('comment', None),
|
|
4685
|
-
|
|
5202
|
+
name=d.get('name', None),
|
|
4686
5203
|
new_name=d.get('new_name', None),
|
|
4687
5204
|
owner=d.get('owner', None))
|
|
4688
5205
|
|
|
@@ -4760,7 +5277,7 @@ class ValidateStorageCredential:
|
|
|
4760
5277
|
cloudflare_api_token: Optional[CloudflareApiToken] = None
|
|
4761
5278
|
"""The Cloudflare API token configuration."""
|
|
4762
5279
|
|
|
4763
|
-
databricks_gcp_service_account: Optional[
|
|
5280
|
+
databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None
|
|
4764
5281
|
"""The Databricks created GCP service account configuration."""
|
|
4765
5282
|
|
|
4766
5283
|
external_location_name: Optional[str] = None
|
|
@@ -4769,7 +5286,7 @@ class ValidateStorageCredential:
|
|
|
4769
5286
|
read_only: Optional[bool] = None
|
|
4770
5287
|
"""Whether the storage credential is only usable for read operations."""
|
|
4771
5288
|
|
|
4772
|
-
storage_credential_name: Optional[
|
|
5289
|
+
storage_credential_name: Optional[str] = None
|
|
4773
5290
|
"""The name of the storage credential to validate."""
|
|
4774
5291
|
|
|
4775
5292
|
url: Optional[str] = None
|
|
@@ -4784,11 +5301,12 @@ class ValidateStorageCredential:
|
|
|
4784
5301
|
body['azure_service_principal'] = self.azure_service_principal.as_dict()
|
|
4785
5302
|
if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict()
|
|
4786
5303
|
if self.databricks_gcp_service_account:
|
|
4787
|
-
body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
|
|
5304
|
+
body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
|
|
4788
5305
|
if self.external_location_name is not None:
|
|
4789
5306
|
body['external_location_name'] = self.external_location_name
|
|
4790
5307
|
if self.read_only is not None: body['read_only'] = self.read_only
|
|
4791
|
-
if self.storage_credential_name
|
|
5308
|
+
if self.storage_credential_name is not None:
|
|
5309
|
+
body['storage_credential_name'] = self.storage_credential_name
|
|
4792
5310
|
if self.url is not None: body['url'] = self.url
|
|
4793
5311
|
return body
|
|
4794
5312
|
|
|
@@ -4799,7 +5317,8 @@ class ValidateStorageCredential:
|
|
|
4799
5317
|
azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
|
|
4800
5318
|
azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
|
|
4801
5319
|
cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken),
|
|
4802
|
-
databricks_gcp_service_account=d
|
|
5320
|
+
databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
|
|
5321
|
+
DatabricksGcpServiceAccountRequest),
|
|
4803
5322
|
external_location_name=d.get('external_location_name', None),
|
|
4804
5323
|
read_only=d.get('read_only', None),
|
|
4805
5324
|
storage_credential_name=d.get('storage_credential_name', None),
|
|
@@ -4871,6 +5390,29 @@ class ValidationResultResult(Enum):
|
|
|
4871
5390
|
SKIP = 'SKIP'
|
|
4872
5391
|
|
|
4873
5392
|
|
|
5393
|
+
@dataclass
|
|
5394
|
+
class ViewData:
|
|
5395
|
+
"""Online Table information."""
|
|
5396
|
+
|
|
5397
|
+
name: Optional[str] = None
|
|
5398
|
+
"""Full three-part (catalog, schema, table) name of the table."""
|
|
5399
|
+
|
|
5400
|
+
spec: Optional[OnlineTableSpec] = None
|
|
5401
|
+
"""Specification of the online table."""
|
|
5402
|
+
|
|
5403
|
+
def as_dict(self) -> dict:
|
|
5404
|
+
"""Serializes the ViewData into a dictionary suitable for use as a JSON request body."""
|
|
5405
|
+
body = {}
|
|
5406
|
+
if self.name is not None: body['name'] = self.name
|
|
5407
|
+
if self.spec: body['spec'] = self.spec.as_dict()
|
|
5408
|
+
return body
|
|
5409
|
+
|
|
5410
|
+
@classmethod
|
|
5411
|
+
def from_dict(cls, d: Dict[str, any]) -> ViewData:
|
|
5412
|
+
"""Deserializes the ViewData from a dictionary."""
|
|
5413
|
+
return cls(name=d.get('name', None), spec=_from_dict(d, 'spec', OnlineTableSpec))
|
|
5414
|
+
|
|
5415
|
+
|
|
4874
5416
|
@dataclass
|
|
4875
5417
|
class VolumeInfo:
|
|
4876
5418
|
access_point: Optional[str] = None
|
|
@@ -5037,6 +5579,7 @@ class AccountMetastoreAssignmentsAPI:
|
|
|
5037
5579
|
body = {}
|
|
5038
5580
|
if metastore_assignment is not None: body['metastore_assignment'] = metastore_assignment.as_dict()
|
|
5039
5581
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
5582
|
+
|
|
5040
5583
|
self._api.do(
|
|
5041
5584
|
'POST',
|
|
5042
5585
|
f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}',
|
|
@@ -5057,6 +5600,7 @@ class AccountMetastoreAssignmentsAPI:
|
|
|
5057
5600
|
"""
|
|
5058
5601
|
|
|
5059
5602
|
headers = {'Accept': 'application/json', }
|
|
5603
|
+
|
|
5060
5604
|
self._api.do(
|
|
5061
5605
|
'DELETE',
|
|
5062
5606
|
f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}',
|
|
@@ -5076,6 +5620,7 @@ class AccountMetastoreAssignmentsAPI:
|
|
|
5076
5620
|
"""
|
|
5077
5621
|
|
|
5078
5622
|
headers = {'Accept': 'application/json', }
|
|
5623
|
+
|
|
5079
5624
|
res = self._api.do('GET',
|
|
5080
5625
|
f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastore',
|
|
5081
5626
|
headers=headers)
|
|
@@ -5093,6 +5638,7 @@ class AccountMetastoreAssignmentsAPI:
|
|
|
5093
5638
|
"""
|
|
5094
5639
|
|
|
5095
5640
|
headers = {'Accept': 'application/json', }
|
|
5641
|
+
|
|
5096
5642
|
json = self._api.do('GET',
|
|
5097
5643
|
f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/workspaces',
|
|
5098
5644
|
headers=headers)
|
|
@@ -5120,6 +5666,7 @@ class AccountMetastoreAssignmentsAPI:
|
|
|
5120
5666
|
body = {}
|
|
5121
5667
|
if metastore_assignment is not None: body['metastore_assignment'] = metastore_assignment.as_dict()
|
|
5122
5668
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
5669
|
+
|
|
5123
5670
|
self._api.do(
|
|
5124
5671
|
'PUT',
|
|
5125
5672
|
f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}',
|
|
@@ -5146,6 +5693,7 @@ class AccountMetastoresAPI:
|
|
|
5146
5693
|
body = {}
|
|
5147
5694
|
if metastore_info is not None: body['metastore_info'] = metastore_info.as_dict()
|
|
5148
5695
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
5696
|
+
|
|
5149
5697
|
res = self._api.do('POST',
|
|
5150
5698
|
f'/api/2.0/accounts/{self._api.account_id}/metastores',
|
|
5151
5699
|
body=body,
|
|
@@ -5168,6 +5716,7 @@ class AccountMetastoresAPI:
|
|
|
5168
5716
|
query = {}
|
|
5169
5717
|
if force is not None: query['force'] = force
|
|
5170
5718
|
headers = {'Accept': 'application/json', }
|
|
5719
|
+
|
|
5171
5720
|
self._api.do('DELETE',
|
|
5172
5721
|
f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}',
|
|
5173
5722
|
query=query,
|
|
@@ -5185,6 +5734,7 @@ class AccountMetastoresAPI:
|
|
|
5185
5734
|
"""
|
|
5186
5735
|
|
|
5187
5736
|
headers = {'Accept': 'application/json', }
|
|
5737
|
+
|
|
5188
5738
|
res = self._api.do('GET',
|
|
5189
5739
|
f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}',
|
|
5190
5740
|
headers=headers)
|
|
@@ -5199,6 +5749,7 @@ class AccountMetastoresAPI:
|
|
|
5199
5749
|
"""
|
|
5200
5750
|
|
|
5201
5751
|
headers = {'Accept': 'application/json', }
|
|
5752
|
+
|
|
5202
5753
|
json = self._api.do('GET', f'/api/2.0/accounts/{self._api.account_id}/metastores', headers=headers)
|
|
5203
5754
|
parsed = ListMetastoresResponse.from_dict(json).metastores
|
|
5204
5755
|
return parsed if parsed is not None else []
|
|
@@ -5220,6 +5771,7 @@ class AccountMetastoresAPI:
|
|
|
5220
5771
|
body = {}
|
|
5221
5772
|
if metastore_info is not None: body['metastore_info'] = metastore_info.as_dict()
|
|
5222
5773
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
5774
|
+
|
|
5223
5775
|
res = self._api.do('PUT',
|
|
5224
5776
|
f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}',
|
|
5225
5777
|
body=body,
|
|
@@ -5256,6 +5808,7 @@ class AccountStorageCredentialsAPI:
|
|
|
5256
5808
|
body = {}
|
|
5257
5809
|
if credential_info is not None: body['credential_info'] = credential_info.as_dict()
|
|
5258
5810
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
5811
|
+
|
|
5259
5812
|
res = self._api.do(
|
|
5260
5813
|
'POST',
|
|
5261
5814
|
f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials',
|
|
@@ -5282,6 +5835,7 @@ class AccountStorageCredentialsAPI:
|
|
|
5282
5835
|
query = {}
|
|
5283
5836
|
if force is not None: query['force'] = force
|
|
5284
5837
|
headers = {'Accept': 'application/json', }
|
|
5838
|
+
|
|
5285
5839
|
self._api.do(
|
|
5286
5840
|
'DELETE',
|
|
5287
5841
|
f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}',
|
|
@@ -5303,6 +5857,7 @@ class AccountStorageCredentialsAPI:
|
|
|
5303
5857
|
"""
|
|
5304
5858
|
|
|
5305
5859
|
headers = {'Accept': 'application/json', }
|
|
5860
|
+
|
|
5306
5861
|
res = self._api.do(
|
|
5307
5862
|
'GET',
|
|
5308
5863
|
f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}',
|
|
@@ -5321,6 +5876,7 @@ class AccountStorageCredentialsAPI:
|
|
|
5321
5876
|
"""
|
|
5322
5877
|
|
|
5323
5878
|
headers = {'Accept': 'application/json', }
|
|
5879
|
+
|
|
5324
5880
|
res = self._api.do(
|
|
5325
5881
|
'GET',
|
|
5326
5882
|
f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials',
|
|
@@ -5348,6 +5904,7 @@ class AccountStorageCredentialsAPI:
|
|
|
5348
5904
|
body = {}
|
|
5349
5905
|
if credential_info is not None: body['credential_info'] = credential_info.as_dict()
|
|
5350
5906
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
5907
|
+
|
|
5351
5908
|
res = self._api.do(
|
|
5352
5909
|
'PUT',
|
|
5353
5910
|
f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}',
|
|
@@ -5376,6 +5933,7 @@ class ArtifactAllowlistsAPI:
|
|
|
5376
5933
|
"""
|
|
5377
5934
|
|
|
5378
5935
|
headers = {'Accept': 'application/json', }
|
|
5936
|
+
|
|
5379
5937
|
res = self._api.do('GET',
|
|
5380
5938
|
f'/api/2.1/unity-catalog/artifact-allowlists/{artifact_type.value}',
|
|
5381
5939
|
headers=headers)
|
|
@@ -5399,6 +5957,7 @@ class ArtifactAllowlistsAPI:
|
|
|
5399
5957
|
body = {}
|
|
5400
5958
|
if artifact_matchers is not None: body['artifact_matchers'] = [v.as_dict() for v in artifact_matchers]
|
|
5401
5959
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
5960
|
+
|
|
5402
5961
|
res = self._api.do('PUT',
|
|
5403
5962
|
f'/api/2.1/unity-catalog/artifact-allowlists/{artifact_type.value}',
|
|
5404
5963
|
body=body,
|
|
@@ -5463,6 +6022,7 @@ class CatalogsAPI:
|
|
|
5463
6022
|
if share_name is not None: body['share_name'] = share_name
|
|
5464
6023
|
if storage_root is not None: body['storage_root'] = storage_root
|
|
5465
6024
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6025
|
+
|
|
5466
6026
|
res = self._api.do('POST', '/api/2.1/unity-catalog/catalogs', body=body, headers=headers)
|
|
5467
6027
|
return CatalogInfo.from_dict(res)
|
|
5468
6028
|
|
|
@@ -5483,6 +6043,7 @@ class CatalogsAPI:
|
|
|
5483
6043
|
query = {}
|
|
5484
6044
|
if force is not None: query['force'] = force
|
|
5485
6045
|
headers = {'Accept': 'application/json', }
|
|
6046
|
+
|
|
5486
6047
|
self._api.do('DELETE', f'/api/2.1/unity-catalog/catalogs/{name}', query=query, headers=headers)
|
|
5487
6048
|
|
|
5488
6049
|
def get(self, name: str) -> CatalogInfo:
|
|
@@ -5498,6 +6059,7 @@ class CatalogsAPI:
|
|
|
5498
6059
|
"""
|
|
5499
6060
|
|
|
5500
6061
|
headers = {'Accept': 'application/json', }
|
|
6062
|
+
|
|
5501
6063
|
res = self._api.do('GET', f'/api/2.1/unity-catalog/catalogs/{name}', headers=headers)
|
|
5502
6064
|
return CatalogInfo.from_dict(res)
|
|
5503
6065
|
|
|
@@ -5513,6 +6075,7 @@ class CatalogsAPI:
|
|
|
5513
6075
|
"""
|
|
5514
6076
|
|
|
5515
6077
|
headers = {'Accept': 'application/json', }
|
|
6078
|
+
|
|
5516
6079
|
json = self._api.do('GET', '/api/2.1/unity-catalog/catalogs', headers=headers)
|
|
5517
6080
|
parsed = ListCatalogsResponse.from_dict(json).catalogs
|
|
5518
6081
|
return parsed if parsed is not None else []
|
|
@@ -5557,6 +6120,7 @@ class CatalogsAPI:
|
|
|
5557
6120
|
if owner is not None: body['owner'] = owner
|
|
5558
6121
|
if properties is not None: body['properties'] = properties
|
|
5559
6122
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6123
|
+
|
|
5560
6124
|
res = self._api.do('PATCH', f'/api/2.1/unity-catalog/catalogs/{name}', body=body, headers=headers)
|
|
5561
6125
|
return CatalogInfo.from_dict(res)
|
|
5562
6126
|
|
|
@@ -5612,36 +6176,39 @@ class ConnectionsAPI:
|
|
|
5612
6176
|
if properties is not None: body['properties'] = properties
|
|
5613
6177
|
if read_only is not None: body['read_only'] = read_only
|
|
5614
6178
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6179
|
+
|
|
5615
6180
|
res = self._api.do('POST', '/api/2.1/unity-catalog/connections', body=body, headers=headers)
|
|
5616
6181
|
return ConnectionInfo.from_dict(res)
|
|
5617
6182
|
|
|
5618
|
-
def delete(self,
|
|
6183
|
+
def delete(self, name: str):
|
|
5619
6184
|
"""Delete a connection.
|
|
5620
6185
|
|
|
5621
6186
|
Deletes the connection that matches the supplied name.
|
|
5622
6187
|
|
|
5623
|
-
:param
|
|
6188
|
+
:param name: str
|
|
5624
6189
|
The name of the connection to be deleted.
|
|
5625
6190
|
|
|
5626
6191
|
|
|
5627
6192
|
"""
|
|
5628
6193
|
|
|
5629
6194
|
headers = {'Accept': 'application/json', }
|
|
5630
|
-
self._api.do('DELETE', f'/api/2.1/unity-catalog/connections/{name_arg}', headers=headers)
|
|
5631
6195
|
|
|
5632
|
-
|
|
6196
|
+
self._api.do('DELETE', f'/api/2.1/unity-catalog/connections/{name}', headers=headers)
|
|
6197
|
+
|
|
6198
|
+
def get(self, name: str) -> ConnectionInfo:
|
|
5633
6199
|
"""Get a connection.
|
|
5634
6200
|
|
|
5635
6201
|
Gets a connection from it's name.
|
|
5636
6202
|
|
|
5637
|
-
:param
|
|
6203
|
+
:param name: str
|
|
5638
6204
|
Name of the connection.
|
|
5639
6205
|
|
|
5640
6206
|
:returns: :class:`ConnectionInfo`
|
|
5641
6207
|
"""
|
|
5642
6208
|
|
|
5643
6209
|
headers = {'Accept': 'application/json', }
|
|
5644
|
-
|
|
6210
|
+
|
|
6211
|
+
res = self._api.do('GET', f'/api/2.1/unity-catalog/connections/{name}', headers=headers)
|
|
5645
6212
|
return ConnectionInfo.from_dict(res)
|
|
5646
6213
|
|
|
5647
6214
|
def list(self) -> Iterator[ConnectionInfo]:
|
|
@@ -5653,12 +6220,13 @@ class ConnectionsAPI:
|
|
|
5653
6220
|
"""
|
|
5654
6221
|
|
|
5655
6222
|
headers = {'Accept': 'application/json', }
|
|
6223
|
+
|
|
5656
6224
|
json = self._api.do('GET', '/api/2.1/unity-catalog/connections', headers=headers)
|
|
5657
6225
|
parsed = ListConnectionsResponse.from_dict(json).connections
|
|
5658
6226
|
return parsed if parsed is not None else []
|
|
5659
6227
|
|
|
5660
6228
|
def update(self,
|
|
5661
|
-
|
|
6229
|
+
name: str,
|
|
5662
6230
|
options: Dict[str, str],
|
|
5663
6231
|
*,
|
|
5664
6232
|
new_name: Optional[str] = None,
|
|
@@ -5667,7 +6235,7 @@ class ConnectionsAPI:
|
|
|
5667
6235
|
|
|
5668
6236
|
Updates the connection that matches the supplied name.
|
|
5669
6237
|
|
|
5670
|
-
:param
|
|
6238
|
+
:param name: str
|
|
5671
6239
|
Name of the connection.
|
|
5672
6240
|
:param options: Dict[str,str]
|
|
5673
6241
|
A map of key-value properties attached to the securable.
|
|
@@ -5683,10 +6251,8 @@ class ConnectionsAPI:
|
|
|
5683
6251
|
if options is not None: body['options'] = options
|
|
5684
6252
|
if owner is not None: body['owner'] = owner
|
|
5685
6253
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
5686
|
-
|
|
5687
|
-
|
|
5688
|
-
body=body,
|
|
5689
|
-
headers=headers)
|
|
6254
|
+
|
|
6255
|
+
res = self._api.do('PATCH', f'/api/2.1/unity-catalog/connections/{name}', body=body, headers=headers)
|
|
5690
6256
|
return ConnectionInfo.from_dict(res)
|
|
5691
6257
|
|
|
5692
6258
|
|
|
@@ -5750,6 +6316,7 @@ class ExternalLocationsAPI:
|
|
|
5750
6316
|
if skip_validation is not None: body['skip_validation'] = skip_validation
|
|
5751
6317
|
if url is not None: body['url'] = url
|
|
5752
6318
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6319
|
+
|
|
5753
6320
|
res = self._api.do('POST', '/api/2.1/unity-catalog/external-locations', body=body, headers=headers)
|
|
5754
6321
|
return ExternalLocationInfo.from_dict(res)
|
|
5755
6322
|
|
|
@@ -5770,6 +6337,7 @@ class ExternalLocationsAPI:
|
|
|
5770
6337
|
query = {}
|
|
5771
6338
|
if force is not None: query['force'] = force
|
|
5772
6339
|
headers = {'Accept': 'application/json', }
|
|
6340
|
+
|
|
5773
6341
|
self._api.do('DELETE',
|
|
5774
6342
|
f'/api/2.1/unity-catalog/external-locations/{name}',
|
|
5775
6343
|
query=query,
|
|
@@ -5788,6 +6356,7 @@ class ExternalLocationsAPI:
|
|
|
5788
6356
|
"""
|
|
5789
6357
|
|
|
5790
6358
|
headers = {'Accept': 'application/json', }
|
|
6359
|
+
|
|
5791
6360
|
res = self._api.do('GET', f'/api/2.1/unity-catalog/external-locations/{name}', headers=headers)
|
|
5792
6361
|
return ExternalLocationInfo.from_dict(res)
|
|
5793
6362
|
|
|
@@ -5886,6 +6455,7 @@ class ExternalLocationsAPI:
|
|
|
5886
6455
|
if skip_validation is not None: body['skip_validation'] = skip_validation
|
|
5887
6456
|
if url is not None: body['url'] = url
|
|
5888
6457
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6458
|
+
|
|
5889
6459
|
res = self._api.do('PATCH',
|
|
5890
6460
|
f'/api/2.1/unity-catalog/external-locations/{name}',
|
|
5891
6461
|
body=body,
|
|
@@ -5920,6 +6490,7 @@ class FunctionsAPI:
|
|
|
5920
6490
|
body = {}
|
|
5921
6491
|
if function_info is not None: body['function_info'] = function_info.as_dict()
|
|
5922
6492
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6493
|
+
|
|
5923
6494
|
res = self._api.do('POST', '/api/2.1/unity-catalog/functions', body=body, headers=headers)
|
|
5924
6495
|
return FunctionInfo.from_dict(res)
|
|
5925
6496
|
|
|
@@ -5944,6 +6515,7 @@ class FunctionsAPI:
|
|
|
5944
6515
|
query = {}
|
|
5945
6516
|
if force is not None: query['force'] = force
|
|
5946
6517
|
headers = {'Accept': 'application/json', }
|
|
6518
|
+
|
|
5947
6519
|
self._api.do('DELETE', f'/api/2.1/unity-catalog/functions/{name}', query=query, headers=headers)
|
|
5948
6520
|
|
|
5949
6521
|
def get(self, name: str) -> FunctionInfo:
|
|
@@ -5964,6 +6536,7 @@ class FunctionsAPI:
|
|
|
5964
6536
|
"""
|
|
5965
6537
|
|
|
5966
6538
|
headers = {'Accept': 'application/json', }
|
|
6539
|
+
|
|
5967
6540
|
res = self._api.do('GET', f'/api/2.1/unity-catalog/functions/{name}', headers=headers)
|
|
5968
6541
|
return FunctionInfo.from_dict(res)
|
|
5969
6542
|
|
|
@@ -6034,6 +6607,7 @@ class FunctionsAPI:
|
|
|
6034
6607
|
body = {}
|
|
6035
6608
|
if owner is not None: body['owner'] = owner
|
|
6036
6609
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6610
|
+
|
|
6037
6611
|
res = self._api.do('PATCH', f'/api/2.1/unity-catalog/functions/{name}', body=body, headers=headers)
|
|
6038
6612
|
return FunctionInfo.from_dict(res)
|
|
6039
6613
|
|
|
@@ -6074,6 +6648,7 @@ class GrantsAPI:
|
|
|
6074
6648
|
query = {}
|
|
6075
6649
|
if principal is not None: query['principal'] = principal
|
|
6076
6650
|
headers = {'Accept': 'application/json', }
|
|
6651
|
+
|
|
6077
6652
|
res = self._api.do('GET',
|
|
6078
6653
|
f'/api/2.1/unity-catalog/permissions/{securable_type.value}/{full_name}',
|
|
6079
6654
|
query=query,
|
|
@@ -6103,6 +6678,7 @@ class GrantsAPI:
|
|
|
6103
6678
|
query = {}
|
|
6104
6679
|
if principal is not None: query['principal'] = principal
|
|
6105
6680
|
headers = {'Accept': 'application/json', }
|
|
6681
|
+
|
|
6106
6682
|
res = self._api.do('GET',
|
|
6107
6683
|
f'/api/2.1/unity-catalog/effective-permissions/{securable_type.value}/{full_name}',
|
|
6108
6684
|
query=query,
|
|
@@ -6130,6 +6706,7 @@ class GrantsAPI:
|
|
|
6130
6706
|
body = {}
|
|
6131
6707
|
if changes is not None: body['changes'] = [v.as_dict() for v in changes]
|
|
6132
6708
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6709
|
+
|
|
6133
6710
|
res = self._api.do('PATCH',
|
|
6134
6711
|
f'/api/2.1/unity-catalog/permissions/{securable_type.value}/{full_name}',
|
|
6135
6712
|
body=body,
|
|
@@ -6169,6 +6746,7 @@ class LakehouseMonitorsAPI:
|
|
|
6169
6746
|
"""
|
|
6170
6747
|
|
|
6171
6748
|
headers = {}
|
|
6749
|
+
|
|
6172
6750
|
self._api.do('POST',
|
|
6173
6751
|
f'/api/2.1/unity-catalog/tables/{full_name}/monitor/refreshes/{refresh_id}/cancel',
|
|
6174
6752
|
headers=headers)
|
|
@@ -6186,7 +6764,7 @@ class LakehouseMonitorsAPI:
|
|
|
6186
6764
|
schedule: Optional[MonitorCronSchedule] = None,
|
|
6187
6765
|
skip_builtin_dashboard: Optional[bool] = None,
|
|
6188
6766
|
slicing_exprs: Optional[List[str]] = None,
|
|
6189
|
-
snapshot: Optional[
|
|
6767
|
+
snapshot: Optional[MonitorSnapshotProfileType] = None,
|
|
6190
6768
|
time_series: Optional[MonitorTimeSeriesProfileType] = None,
|
|
6191
6769
|
warehouse_id: Optional[str] = None) -> MonitorInfo:
|
|
6192
6770
|
"""Create a table monitor.
|
|
@@ -6227,7 +6805,7 @@ class LakehouseMonitorsAPI:
|
|
|
6227
6805
|
List of column expressions to slice data with for targeted analysis. The data is grouped by each
|
|
6228
6806
|
expression independently, resulting in a separate slice for each predicate and its complements. For
|
|
6229
6807
|
high-cardinality columns, only the top 100 unique values by frequency will generate slices.
|
|
6230
|
-
:param snapshot:
|
|
6808
|
+
:param snapshot: :class:`MonitorSnapshotProfileType` (optional)
|
|
6231
6809
|
Configuration for monitoring snapshot tables.
|
|
6232
6810
|
:param time_series: :class:`MonitorTimeSeriesProfileType` (optional)
|
|
6233
6811
|
Configuration for monitoring time series tables.
|
|
@@ -6249,10 +6827,11 @@ class LakehouseMonitorsAPI:
|
|
|
6249
6827
|
if schedule is not None: body['schedule'] = schedule.as_dict()
|
|
6250
6828
|
if skip_builtin_dashboard is not None: body['skip_builtin_dashboard'] = skip_builtin_dashboard
|
|
6251
6829
|
if slicing_exprs is not None: body['slicing_exprs'] = [v for v in slicing_exprs]
|
|
6252
|
-
if snapshot is not None: body['snapshot'] = snapshot
|
|
6830
|
+
if snapshot is not None: body['snapshot'] = snapshot.as_dict()
|
|
6253
6831
|
if time_series is not None: body['time_series'] = time_series.as_dict()
|
|
6254
6832
|
if warehouse_id is not None: body['warehouse_id'] = warehouse_id
|
|
6255
6833
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6834
|
+
|
|
6256
6835
|
res = self._api.do('POST',
|
|
6257
6836
|
f'/api/2.1/unity-catalog/tables/{full_name}/monitor',
|
|
6258
6837
|
body=body,
|
|
@@ -6281,6 +6860,7 @@ class LakehouseMonitorsAPI:
|
|
|
6281
6860
|
"""
|
|
6282
6861
|
|
|
6283
6862
|
headers = {}
|
|
6863
|
+
|
|
6284
6864
|
self._api.do('DELETE', f'/api/2.1/unity-catalog/tables/{full_name}/monitor', headers=headers)
|
|
6285
6865
|
|
|
6286
6866
|
def get(self, full_name: str) -> MonitorInfo:
|
|
@@ -6304,6 +6884,7 @@ class LakehouseMonitorsAPI:
|
|
|
6304
6884
|
"""
|
|
6305
6885
|
|
|
6306
6886
|
headers = {'Accept': 'application/json', }
|
|
6887
|
+
|
|
6307
6888
|
res = self._api.do('GET', f'/api/2.1/unity-catalog/tables/{full_name}/monitor', headers=headers)
|
|
6308
6889
|
return MonitorInfo.from_dict(res)
|
|
6309
6890
|
|
|
@@ -6328,6 +6909,7 @@ class LakehouseMonitorsAPI:
|
|
|
6328
6909
|
"""
|
|
6329
6910
|
|
|
6330
6911
|
headers = {'Accept': 'application/json', }
|
|
6912
|
+
|
|
6331
6913
|
res = self._api.do('GET',
|
|
6332
6914
|
f'/api/2.1/unity-catalog/tables/{full_name}/monitor/refreshes/{refresh_id}',
|
|
6333
6915
|
headers=headers)
|
|
@@ -6352,6 +6934,7 @@ class LakehouseMonitorsAPI:
|
|
|
6352
6934
|
"""
|
|
6353
6935
|
|
|
6354
6936
|
headers = {'Accept': 'application/json', }
|
|
6937
|
+
|
|
6355
6938
|
res = self._api.do('GET',
|
|
6356
6939
|
f'/api/2.1/unity-catalog/tables/{full_name}/monitor/refreshes',
|
|
6357
6940
|
headers=headers)
|
|
@@ -6377,6 +6960,7 @@ class LakehouseMonitorsAPI:
|
|
|
6377
6960
|
"""
|
|
6378
6961
|
|
|
6379
6962
|
headers = {'Accept': 'application/json', }
|
|
6963
|
+
|
|
6380
6964
|
res = self._api.do('POST',
|
|
6381
6965
|
f'/api/2.1/unity-catalog/tables/{full_name}/monitor/refreshes',
|
|
6382
6966
|
headers=headers)
|
|
@@ -6384,7 +6968,6 @@ class LakehouseMonitorsAPI:
|
|
|
6384
6968
|
|
|
6385
6969
|
def update(self,
|
|
6386
6970
|
full_name: str,
|
|
6387
|
-
assets_dir: str,
|
|
6388
6971
|
output_schema_name: str,
|
|
6389
6972
|
*,
|
|
6390
6973
|
baseline_table_name: Optional[str] = None,
|
|
@@ -6394,7 +6977,7 @@ class LakehouseMonitorsAPI:
|
|
|
6394
6977
|
notifications: Optional[List[MonitorNotificationsConfig]] = None,
|
|
6395
6978
|
schedule: Optional[MonitorCronSchedule] = None,
|
|
6396
6979
|
slicing_exprs: Optional[List[str]] = None,
|
|
6397
|
-
snapshot: Optional[
|
|
6980
|
+
snapshot: Optional[MonitorSnapshotProfileType] = None,
|
|
6398
6981
|
time_series: Optional[MonitorTimeSeriesProfileType] = None) -> MonitorInfo:
|
|
6399
6982
|
"""Update a table monitor.
|
|
6400
6983
|
|
|
@@ -6412,8 +6995,6 @@ class LakehouseMonitorsAPI:
|
|
|
6412
6995
|
|
|
6413
6996
|
:param full_name: str
|
|
6414
6997
|
Full name of the table.
|
|
6415
|
-
:param assets_dir: str
|
|
6416
|
-
The directory to store monitoring assets (e.g. dashboard, metric tables).
|
|
6417
6998
|
:param output_schema_name: str
|
|
6418
6999
|
Schema where output metric tables are created.
|
|
6419
7000
|
:param baseline_table_name: str (optional)
|
|
@@ -6434,7 +7015,7 @@ class LakehouseMonitorsAPI:
|
|
|
6434
7015
|
List of column expressions to slice data with for targeted analysis. The data is grouped by each
|
|
6435
7016
|
expression independently, resulting in a separate slice for each predicate and its complements. For
|
|
6436
7017
|
high-cardinality columns, only the top 100 unique values by frequency will generate slices.
|
|
6437
|
-
:param snapshot:
|
|
7018
|
+
:param snapshot: :class:`MonitorSnapshotProfileType` (optional)
|
|
6438
7019
|
Configuration for monitoring snapshot tables.
|
|
6439
7020
|
:param time_series: :class:`MonitorTimeSeriesProfileType` (optional)
|
|
6440
7021
|
Configuration for monitoring time series tables.
|
|
@@ -6442,7 +7023,6 @@ class LakehouseMonitorsAPI:
|
|
|
6442
7023
|
:returns: :class:`MonitorInfo`
|
|
6443
7024
|
"""
|
|
6444
7025
|
body = {}
|
|
6445
|
-
if assets_dir is not None: body['assets_dir'] = assets_dir
|
|
6446
7026
|
if baseline_table_name is not None: body['baseline_table_name'] = baseline_table_name
|
|
6447
7027
|
if custom_metrics is not None: body['custom_metrics'] = [v.as_dict() for v in custom_metrics]
|
|
6448
7028
|
if data_classification_config is not None:
|
|
@@ -6452,9 +7032,10 @@ class LakehouseMonitorsAPI:
|
|
|
6452
7032
|
if output_schema_name is not None: body['output_schema_name'] = output_schema_name
|
|
6453
7033
|
if schedule is not None: body['schedule'] = schedule.as_dict()
|
|
6454
7034
|
if slicing_exprs is not None: body['slicing_exprs'] = [v for v in slicing_exprs]
|
|
6455
|
-
if snapshot is not None: body['snapshot'] = snapshot
|
|
7035
|
+
if snapshot is not None: body['snapshot'] = snapshot.as_dict()
|
|
6456
7036
|
if time_series is not None: body['time_series'] = time_series.as_dict()
|
|
6457
7037
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
7038
|
+
|
|
6458
7039
|
res = self._api.do('PUT',
|
|
6459
7040
|
f'/api/2.1/unity-catalog/tables/{full_name}/monitor',
|
|
6460
7041
|
body=body,
|
|
@@ -6498,6 +7079,7 @@ class MetastoresAPI:
|
|
|
6498
7079
|
if default_catalog_name is not None: body['default_catalog_name'] = default_catalog_name
|
|
6499
7080
|
if metastore_id is not None: body['metastore_id'] = metastore_id
|
|
6500
7081
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
7082
|
+
|
|
6501
7083
|
self._api.do('PUT',
|
|
6502
7084
|
f'/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore',
|
|
6503
7085
|
body=body,
|
|
@@ -6530,6 +7112,7 @@ class MetastoresAPI:
|
|
|
6530
7112
|
if region is not None: body['region'] = region
|
|
6531
7113
|
if storage_root is not None: body['storage_root'] = storage_root
|
|
6532
7114
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
7115
|
+
|
|
6533
7116
|
res = self._api.do('POST', '/api/2.1/unity-catalog/metastores', body=body, headers=headers)
|
|
6534
7117
|
return MetastoreInfo.from_dict(res)
|
|
6535
7118
|
|
|
@@ -6542,6 +7125,7 @@ class MetastoresAPI:
|
|
|
6542
7125
|
"""
|
|
6543
7126
|
|
|
6544
7127
|
headers = {'Accept': 'application/json', }
|
|
7128
|
+
|
|
6545
7129
|
res = self._api.do('GET', '/api/2.1/unity-catalog/current-metastore-assignment', headers=headers)
|
|
6546
7130
|
return MetastoreAssignment.from_dict(res)
|
|
6547
7131
|
|
|
@@ -6561,6 +7145,7 @@ class MetastoresAPI:
|
|
|
6561
7145
|
query = {}
|
|
6562
7146
|
if force is not None: query['force'] = force
|
|
6563
7147
|
headers = {'Accept': 'application/json', }
|
|
7148
|
+
|
|
6564
7149
|
self._api.do('DELETE', f'/api/2.1/unity-catalog/metastores/{id}', query=query, headers=headers)
|
|
6565
7150
|
|
|
6566
7151
|
def get(self, id: str) -> MetastoreInfo:
|
|
@@ -6576,6 +7161,7 @@ class MetastoresAPI:
|
|
|
6576
7161
|
"""
|
|
6577
7162
|
|
|
6578
7163
|
headers = {'Accept': 'application/json', }
|
|
7164
|
+
|
|
6579
7165
|
res = self._api.do('GET', f'/api/2.1/unity-catalog/metastores/{id}', headers=headers)
|
|
6580
7166
|
return MetastoreInfo.from_dict(res)
|
|
6581
7167
|
|
|
@@ -6589,6 +7175,7 @@ class MetastoresAPI:
|
|
|
6589
7175
|
"""
|
|
6590
7176
|
|
|
6591
7177
|
headers = {'Accept': 'application/json', }
|
|
7178
|
+
|
|
6592
7179
|
json = self._api.do('GET', '/api/2.1/unity-catalog/metastores', headers=headers)
|
|
6593
7180
|
parsed = ListMetastoresResponse.from_dict(json).metastores
|
|
6594
7181
|
return parsed if parsed is not None else []
|
|
@@ -6603,6 +7190,7 @@ class MetastoresAPI:
|
|
|
6603
7190
|
"""
|
|
6604
7191
|
|
|
6605
7192
|
headers = {'Accept': 'application/json', }
|
|
7193
|
+
|
|
6606
7194
|
res = self._api.do('GET', '/api/2.1/unity-catalog/metastore_summary', headers=headers)
|
|
6607
7195
|
return GetMetastoreSummaryResponse.from_dict(res)
|
|
6608
7196
|
|
|
@@ -6622,6 +7210,7 @@ class MetastoresAPI:
|
|
|
6622
7210
|
query = {}
|
|
6623
7211
|
if metastore_id is not None: query['metastore_id'] = metastore_id
|
|
6624
7212
|
headers = {'Accept': 'application/json', }
|
|
7213
|
+
|
|
6625
7214
|
self._api.do('DELETE',
|
|
6626
7215
|
f'/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore',
|
|
6627
7216
|
query=query,
|
|
@@ -6675,6 +7264,7 @@ class MetastoresAPI:
|
|
|
6675
7264
|
if storage_root_credential_id is not None:
|
|
6676
7265
|
body['storage_root_credential_id'] = storage_root_credential_id
|
|
6677
7266
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
7267
|
+
|
|
6678
7268
|
res = self._api.do('PATCH', f'/api/2.1/unity-catalog/metastores/{id}', body=body, headers=headers)
|
|
6679
7269
|
return MetastoreInfo.from_dict(res)
|
|
6680
7270
|
|
|
@@ -6703,6 +7293,7 @@ class MetastoresAPI:
|
|
|
6703
7293
|
if default_catalog_name is not None: body['default_catalog_name'] = default_catalog_name
|
|
6704
7294
|
if metastore_id is not None: body['metastore_id'] = metastore_id
|
|
6705
7295
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
7296
|
+
|
|
6706
7297
|
self._api.do('PATCH',
|
|
6707
7298
|
f'/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore',
|
|
6708
7299
|
body=body,
|
|
@@ -6739,6 +7330,7 @@ class ModelVersionsAPI:
|
|
|
6739
7330
|
"""
|
|
6740
7331
|
|
|
6741
7332
|
headers = {}
|
|
7333
|
+
|
|
6742
7334
|
self._api.do('DELETE',
|
|
6743
7335
|
f'/api/2.1/unity-catalog/models/{full_name}/versions/{version}',
|
|
6744
7336
|
headers=headers)
|
|
@@ -6761,6 +7353,7 @@ class ModelVersionsAPI:
|
|
|
6761
7353
|
"""
|
|
6762
7354
|
|
|
6763
7355
|
headers = {'Accept': 'application/json', }
|
|
7356
|
+
|
|
6764
7357
|
res = self._api.do('GET',
|
|
6765
7358
|
f'/api/2.1/unity-catalog/models/{full_name}/versions/{version}',
|
|
6766
7359
|
headers=headers)
|
|
@@ -6784,6 +7377,7 @@ class ModelVersionsAPI:
|
|
|
6784
7377
|
"""
|
|
6785
7378
|
|
|
6786
7379
|
headers = {'Accept': 'application/json', }
|
|
7380
|
+
|
|
6787
7381
|
res = self._api.do('GET',
|
|
6788
7382
|
f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}',
|
|
6789
7383
|
headers=headers)
|
|
@@ -6861,6 +7455,7 @@ class ModelVersionsAPI:
|
|
|
6861
7455
|
body = {}
|
|
6862
7456
|
if comment is not None: body['comment'] = comment
|
|
6863
7457
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
7458
|
+
|
|
6864
7459
|
res = self._api.do('PATCH',
|
|
6865
7460
|
f'/api/2.1/unity-catalog/models/{full_name}/versions/{version}',
|
|
6866
7461
|
body=body,
|
|
@@ -6868,6 +7463,66 @@ class ModelVersionsAPI:
|
|
|
6868
7463
|
return ModelVersionInfo.from_dict(res)
|
|
6869
7464
|
|
|
6870
7465
|
|
|
7466
|
+
class OnlineTablesAPI:
|
|
7467
|
+
"""Online tables provide lower latency and higher QPS access to data from Delta tables."""
|
|
7468
|
+
|
|
7469
|
+
def __init__(self, api_client):
|
|
7470
|
+
self._api = api_client
|
|
7471
|
+
|
|
7472
|
+
def create(self, *, name: Optional[str] = None, spec: Optional[OnlineTableSpec] = None) -> OnlineTable:
|
|
7473
|
+
"""Create an Online Table.
|
|
7474
|
+
|
|
7475
|
+
Create a new Online Table.
|
|
7476
|
+
|
|
7477
|
+
:param name: str (optional)
|
|
7478
|
+
Full three-part (catalog, schema, table) name of the table.
|
|
7479
|
+
:param spec: :class:`OnlineTableSpec` (optional)
|
|
7480
|
+
Specification of the online table.
|
|
7481
|
+
|
|
7482
|
+
:returns: :class:`OnlineTable`
|
|
7483
|
+
"""
|
|
7484
|
+
body = {}
|
|
7485
|
+
if name is not None: body['name'] = name
|
|
7486
|
+
if spec is not None: body['spec'] = spec.as_dict()
|
|
7487
|
+
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
7488
|
+
|
|
7489
|
+
res = self._api.do('POST', '/api/2.0/online-tables', body=body, headers=headers)
|
|
7490
|
+
return OnlineTable.from_dict(res)
|
|
7491
|
+
|
|
7492
|
+
def delete(self, name: str):
|
|
7493
|
+
"""Delete an Online Table.
|
|
7494
|
+
|
|
7495
|
+
Delete an online table. Warning: This will delete all the data in the online table. If the source
|
|
7496
|
+
Delta table was deleted or modified since this Online Table was created, this will lose the data
|
|
7497
|
+
forever!
|
|
7498
|
+
|
|
7499
|
+
:param name: str
|
|
7500
|
+
Full three-part (catalog, schema, table) name of the table.
|
|
7501
|
+
|
|
7502
|
+
|
|
7503
|
+
"""
|
|
7504
|
+
|
|
7505
|
+
headers = {'Accept': 'application/json', }
|
|
7506
|
+
|
|
7507
|
+
self._api.do('DELETE', f'/api/2.0/online-tables/{name}', headers=headers)
|
|
7508
|
+
|
|
7509
|
+
def get(self, name: str) -> OnlineTable:
|
|
7510
|
+
"""Get an Online Table.
|
|
7511
|
+
|
|
7512
|
+
Get information about an existing online table and its status.
|
|
7513
|
+
|
|
7514
|
+
:param name: str
|
|
7515
|
+
Full three-part (catalog, schema, table) name of the table.
|
|
7516
|
+
|
|
7517
|
+
:returns: :class:`OnlineTable`
|
|
7518
|
+
"""
|
|
7519
|
+
|
|
7520
|
+
headers = {'Accept': 'application/json', }
|
|
7521
|
+
|
|
7522
|
+
res = self._api.do('GET', f'/api/2.0/online-tables/{name}', headers=headers)
|
|
7523
|
+
return OnlineTable.from_dict(res)
|
|
7524
|
+
|
|
7525
|
+
|
|
6871
7526
|
class RegisteredModelsAPI:
|
|
6872
7527
|
"""Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity Catalog
|
|
6873
7528
|
provide centralized access control, auditing, lineage, and discovery of ML models across Databricks
|
|
@@ -6936,6 +7591,7 @@ class RegisteredModelsAPI:
|
|
|
6936
7591
|
if schema_name is not None: body['schema_name'] = schema_name
|
|
6937
7592
|
if storage_location is not None: body['storage_location'] = storage_location
|
|
6938
7593
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
7594
|
+
|
|
6939
7595
|
res = self._api.do('POST', '/api/2.1/unity-catalog/models', body=body, headers=headers)
|
|
6940
7596
|
return RegisteredModelInfo.from_dict(res)
|
|
6941
7597
|
|
|
@@ -6955,6 +7611,7 @@ class RegisteredModelsAPI:
|
|
|
6955
7611
|
"""
|
|
6956
7612
|
|
|
6957
7613
|
headers = {}
|
|
7614
|
+
|
|
6958
7615
|
self._api.do('DELETE', f'/api/2.1/unity-catalog/models/{full_name}', headers=headers)
|
|
6959
7616
|
|
|
6960
7617
|
def delete_alias(self, full_name: str, alias: str):
|
|
@@ -6975,6 +7632,7 @@ class RegisteredModelsAPI:
|
|
|
6975
7632
|
"""
|
|
6976
7633
|
|
|
6977
7634
|
headers = {}
|
|
7635
|
+
|
|
6978
7636
|
self._api.do('DELETE', f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}', headers=headers)
|
|
6979
7637
|
|
|
6980
7638
|
def get(self, full_name: str) -> RegisteredModelInfo:
|
|
@@ -6993,6 +7651,7 @@ class RegisteredModelsAPI:
|
|
|
6993
7651
|
"""
|
|
6994
7652
|
|
|
6995
7653
|
headers = {'Accept': 'application/json', }
|
|
7654
|
+
|
|
6996
7655
|
res = self._api.do('GET', f'/api/2.1/unity-catalog/models/{full_name}', headers=headers)
|
|
6997
7656
|
return RegisteredModelInfo.from_dict(res)
|
|
6998
7657
|
|
|
@@ -7068,6 +7727,7 @@ class RegisteredModelsAPI:
|
|
|
7068
7727
|
body = {}
|
|
7069
7728
|
if version_num is not None: body['version_num'] = version_num
|
|
7070
7729
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
7730
|
+
|
|
7071
7731
|
res = self._api.do('PUT',
|
|
7072
7732
|
f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}',
|
|
7073
7733
|
body=body,
|
|
@@ -7106,6 +7766,7 @@ class RegisteredModelsAPI:
|
|
|
7106
7766
|
if new_name is not None: body['new_name'] = new_name
|
|
7107
7767
|
if owner is not None: body['owner'] = owner
|
|
7108
7768
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
7769
|
+
|
|
7109
7770
|
res = self._api.do('PATCH', f'/api/2.1/unity-catalog/models/{full_name}', body=body, headers=headers)
|
|
7110
7771
|
return RegisteredModelInfo.from_dict(res)
|
|
7111
7772
|
|
|
@@ -7151,6 +7812,7 @@ class SchemasAPI:
|
|
|
7151
7812
|
if properties is not None: body['properties'] = properties
|
|
7152
7813
|
if storage_root is not None: body['storage_root'] = storage_root
|
|
7153
7814
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
7815
|
+
|
|
7154
7816
|
res = self._api.do('POST', '/api/2.1/unity-catalog/schemas', body=body, headers=headers)
|
|
7155
7817
|
return SchemaInfo.from_dict(res)
|
|
7156
7818
|
|
|
@@ -7167,6 +7829,7 @@ class SchemasAPI:
|
|
|
7167
7829
|
"""
|
|
7168
7830
|
|
|
7169
7831
|
headers = {'Accept': 'application/json', }
|
|
7832
|
+
|
|
7170
7833
|
self._api.do('DELETE', f'/api/2.1/unity-catalog/schemas/{full_name}', headers=headers)
|
|
7171
7834
|
|
|
7172
7835
|
def get(self, full_name: str) -> SchemaInfo:
|
|
@@ -7182,6 +7845,7 @@ class SchemasAPI:
|
|
|
7182
7845
|
"""
|
|
7183
7846
|
|
|
7184
7847
|
headers = {'Accept': 'application/json', }
|
|
7848
|
+
|
|
7185
7849
|
res = self._api.do('GET', f'/api/2.1/unity-catalog/schemas/{full_name}', headers=headers)
|
|
7186
7850
|
return SchemaInfo.from_dict(res)
|
|
7187
7851
|
|
|
@@ -7264,6 +7928,7 @@ class SchemasAPI:
|
|
|
7264
7928
|
if owner is not None: body['owner'] = owner
|
|
7265
7929
|
if properties is not None: body['properties'] = properties
|
|
7266
7930
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
7931
|
+
|
|
7267
7932
|
res = self._api.do('PATCH', f'/api/2.1/unity-catalog/schemas/{full_name}', body=body, headers=headers)
|
|
7268
7933
|
return SchemaInfo.from_dict(res)
|
|
7269
7934
|
|
|
@@ -7291,7 +7956,7 @@ class StorageCredentialsAPI:
|
|
|
7291
7956
|
azure_service_principal: Optional[AzureServicePrincipal] = None,
|
|
7292
7957
|
cloudflare_api_token: Optional[CloudflareApiToken] = None,
|
|
7293
7958
|
comment: Optional[str] = None,
|
|
7294
|
-
databricks_gcp_service_account: Optional[
|
|
7959
|
+
databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None,
|
|
7295
7960
|
read_only: Optional[bool] = None,
|
|
7296
7961
|
skip_validation: Optional[bool] = None) -> StorageCredentialInfo:
|
|
7297
7962
|
"""Create a storage credential.
|
|
@@ -7310,7 +7975,7 @@ class StorageCredentialsAPI:
|
|
|
7310
7975
|
The Cloudflare API token configuration.
|
|
7311
7976
|
:param comment: str (optional)
|
|
7312
7977
|
Comment associated with the credential.
|
|
7313
|
-
:param databricks_gcp_service_account:
|
|
7978
|
+
:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
|
|
7314
7979
|
The <Databricks> managed GCP service account configuration.
|
|
7315
7980
|
:param read_only: bool (optional)
|
|
7316
7981
|
Whether the storage credential is only usable for read operations.
|
|
@@ -7328,11 +7993,12 @@ class StorageCredentialsAPI:
|
|
|
7328
7993
|
if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict()
|
|
7329
7994
|
if comment is not None: body['comment'] = comment
|
|
7330
7995
|
if databricks_gcp_service_account is not None:
|
|
7331
|
-
body['databricks_gcp_service_account'] = databricks_gcp_service_account
|
|
7996
|
+
body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict()
|
|
7332
7997
|
if name is not None: body['name'] = name
|
|
7333
7998
|
if read_only is not None: body['read_only'] = read_only
|
|
7334
7999
|
if skip_validation is not None: body['skip_validation'] = skip_validation
|
|
7335
8000
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
8001
|
+
|
|
7336
8002
|
res = self._api.do('POST', '/api/2.1/unity-catalog/storage-credentials', body=body, headers=headers)
|
|
7337
8003
|
return StorageCredentialInfo.from_dict(res)
|
|
7338
8004
|
|
|
@@ -7353,6 +8019,7 @@ class StorageCredentialsAPI:
|
|
|
7353
8019
|
query = {}
|
|
7354
8020
|
if force is not None: query['force'] = force
|
|
7355
8021
|
headers = {'Accept': 'application/json', }
|
|
8022
|
+
|
|
7356
8023
|
self._api.do('DELETE',
|
|
7357
8024
|
f'/api/2.1/unity-catalog/storage-credentials/{name}',
|
|
7358
8025
|
query=query,
|
|
@@ -7371,6 +8038,7 @@ class StorageCredentialsAPI:
|
|
|
7371
8038
|
"""
|
|
7372
8039
|
|
|
7373
8040
|
headers = {'Accept': 'application/json', }
|
|
8041
|
+
|
|
7374
8042
|
res = self._api.do('GET', f'/api/2.1/unity-catalog/storage-credentials/{name}', headers=headers)
|
|
7375
8043
|
return StorageCredentialInfo.from_dict(res)
|
|
7376
8044
|
|
|
@@ -7423,7 +8091,7 @@ class StorageCredentialsAPI:
|
|
|
7423
8091
|
azure_service_principal: Optional[AzureServicePrincipal] = None,
|
|
7424
8092
|
cloudflare_api_token: Optional[CloudflareApiToken] = None,
|
|
7425
8093
|
comment: Optional[str] = None,
|
|
7426
|
-
databricks_gcp_service_account: Optional[
|
|
8094
|
+
databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None,
|
|
7427
8095
|
force: Optional[bool] = None,
|
|
7428
8096
|
new_name: Optional[str] = None,
|
|
7429
8097
|
owner: Optional[str] = None,
|
|
@@ -7445,7 +8113,7 @@ class StorageCredentialsAPI:
|
|
|
7445
8113
|
The Cloudflare API token configuration.
|
|
7446
8114
|
:param comment: str (optional)
|
|
7447
8115
|
Comment associated with the credential.
|
|
7448
|
-
:param databricks_gcp_service_account:
|
|
8116
|
+
:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
|
|
7449
8117
|
The <Databricks> managed GCP service account configuration.
|
|
7450
8118
|
:param force: bool (optional)
|
|
7451
8119
|
Force update even if there are dependent external locations or external tables.
|
|
@@ -7469,13 +8137,14 @@ class StorageCredentialsAPI:
|
|
|
7469
8137
|
if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict()
|
|
7470
8138
|
if comment is not None: body['comment'] = comment
|
|
7471
8139
|
if databricks_gcp_service_account is not None:
|
|
7472
|
-
body['databricks_gcp_service_account'] = databricks_gcp_service_account
|
|
8140
|
+
body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict()
|
|
7473
8141
|
if force is not None: body['force'] = force
|
|
7474
8142
|
if new_name is not None: body['new_name'] = new_name
|
|
7475
8143
|
if owner is not None: body['owner'] = owner
|
|
7476
8144
|
if read_only is not None: body['read_only'] = read_only
|
|
7477
8145
|
if skip_validation is not None: body['skip_validation'] = skip_validation
|
|
7478
8146
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
8147
|
+
|
|
7479
8148
|
res = self._api.do('PATCH',
|
|
7480
8149
|
f'/api/2.1/unity-catalog/storage-credentials/{name}',
|
|
7481
8150
|
body=body,
|
|
@@ -7488,10 +8157,10 @@ class StorageCredentialsAPI:
|
|
|
7488
8157
|
azure_managed_identity: Optional[AzureManagedIdentity] = None,
|
|
7489
8158
|
azure_service_principal: Optional[AzureServicePrincipal] = None,
|
|
7490
8159
|
cloudflare_api_token: Optional[CloudflareApiToken] = None,
|
|
7491
|
-
databricks_gcp_service_account: Optional[
|
|
8160
|
+
databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None,
|
|
7492
8161
|
external_location_name: Optional[str] = None,
|
|
7493
8162
|
read_only: Optional[bool] = None,
|
|
7494
|
-
storage_credential_name: Optional[
|
|
8163
|
+
storage_credential_name: Optional[str] = None,
|
|
7495
8164
|
url: Optional[str] = None) -> ValidateStorageCredentialResponse:
|
|
7496
8165
|
"""Validate a storage credential.
|
|
7497
8166
|
|
|
@@ -7513,13 +8182,13 @@ class StorageCredentialsAPI:
|
|
|
7513
8182
|
The Azure service principal configuration.
|
|
7514
8183
|
:param cloudflare_api_token: :class:`CloudflareApiToken` (optional)
|
|
7515
8184
|
The Cloudflare API token configuration.
|
|
7516
|
-
:param databricks_gcp_service_account:
|
|
8185
|
+
:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
|
|
7517
8186
|
The Databricks created GCP service account configuration.
|
|
7518
8187
|
:param external_location_name: str (optional)
|
|
7519
8188
|
The name of an existing external location to validate.
|
|
7520
8189
|
:param read_only: bool (optional)
|
|
7521
8190
|
Whether the storage credential is only usable for read operations.
|
|
7522
|
-
:param storage_credential_name:
|
|
8191
|
+
:param storage_credential_name: str (optional)
|
|
7523
8192
|
The name of the storage credential to validate.
|
|
7524
8193
|
:param url: str (optional)
|
|
7525
8194
|
The external location url to validate.
|
|
@@ -7534,12 +8203,13 @@ class StorageCredentialsAPI:
|
|
|
7534
8203
|
body['azure_service_principal'] = azure_service_principal.as_dict()
|
|
7535
8204
|
if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict()
|
|
7536
8205
|
if databricks_gcp_service_account is not None:
|
|
7537
|
-
body['databricks_gcp_service_account'] = databricks_gcp_service_account
|
|
8206
|
+
body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict()
|
|
7538
8207
|
if external_location_name is not None: body['external_location_name'] = external_location_name
|
|
7539
8208
|
if read_only is not None: body['read_only'] = read_only
|
|
7540
8209
|
if storage_credential_name is not None: body['storage_credential_name'] = storage_credential_name
|
|
7541
8210
|
if url is not None: body['url'] = url
|
|
7542
8211
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
8212
|
+
|
|
7543
8213
|
res = self._api.do('POST',
|
|
7544
8214
|
'/api/2.1/unity-catalog/validate-storage-credentials',
|
|
7545
8215
|
body=body,
|
|
@@ -7569,6 +8239,7 @@ class SystemSchemasAPI:
|
|
|
7569
8239
|
"""
|
|
7570
8240
|
|
|
7571
8241
|
headers = {'Accept': 'application/json', }
|
|
8242
|
+
|
|
7572
8243
|
self._api.do('DELETE',
|
|
7573
8244
|
f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name.value}',
|
|
7574
8245
|
headers=headers)
|
|
@@ -7588,6 +8259,7 @@ class SystemSchemasAPI:
|
|
|
7588
8259
|
"""
|
|
7589
8260
|
|
|
7590
8261
|
headers = {'Accept': 'application/json', }
|
|
8262
|
+
|
|
7591
8263
|
self._api.do('PUT',
|
|
7592
8264
|
f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name.value}',
|
|
7593
8265
|
headers=headers)
|
|
@@ -7605,6 +8277,7 @@ class SystemSchemasAPI:
|
|
|
7605
8277
|
"""
|
|
7606
8278
|
|
|
7607
8279
|
headers = {'Accept': 'application/json', }
|
|
8280
|
+
|
|
7608
8281
|
json = self._api.do('GET',
|
|
7609
8282
|
f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas',
|
|
7610
8283
|
headers=headers)
|
|
@@ -7651,6 +8324,7 @@ class TableConstraintsAPI:
|
|
|
7651
8324
|
if constraint is not None: body['constraint'] = constraint.as_dict()
|
|
7652
8325
|
if full_name_arg is not None: body['full_name_arg'] = full_name_arg
|
|
7653
8326
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
8327
|
+
|
|
7654
8328
|
res = self._api.do('POST', '/api/2.1/unity-catalog/constraints', body=body, headers=headers)
|
|
7655
8329
|
return TableConstraint.from_dict(res)
|
|
7656
8330
|
|
|
@@ -7681,6 +8355,7 @@ class TableConstraintsAPI:
|
|
|
7681
8355
|
if cascade is not None: query['cascade'] = cascade
|
|
7682
8356
|
if constraint_name is not None: query['constraint_name'] = constraint_name
|
|
7683
8357
|
headers = {'Accept': 'application/json', }
|
|
8358
|
+
|
|
7684
8359
|
self._api.do('DELETE',
|
|
7685
8360
|
f'/api/2.1/unity-catalog/constraints/{full_name}',
|
|
7686
8361
|
query=query,
|
|
@@ -7715,6 +8390,7 @@ class TablesAPI:
|
|
|
7715
8390
|
"""
|
|
7716
8391
|
|
|
7717
8392
|
headers = {'Accept': 'application/json', }
|
|
8393
|
+
|
|
7718
8394
|
self._api.do('DELETE', f'/api/2.1/unity-catalog/tables/{full_name}', headers=headers)
|
|
7719
8395
|
|
|
7720
8396
|
def exists(self, full_name: str) -> TableExistsResponse:
|
|
@@ -7734,6 +8410,7 @@ class TablesAPI:
|
|
|
7734
8410
|
"""
|
|
7735
8411
|
|
|
7736
8412
|
headers = {'Accept': 'application/json', }
|
|
8413
|
+
|
|
7737
8414
|
res = self._api.do('GET', f'/api/2.1/unity-catalog/tables/{full_name}/exists', headers=headers)
|
|
7738
8415
|
return TableExistsResponse.from_dict(res)
|
|
7739
8416
|
|
|
@@ -7757,6 +8434,7 @@ class TablesAPI:
|
|
|
7757
8434
|
query = {}
|
|
7758
8435
|
if include_delta_metadata is not None: query['include_delta_metadata'] = include_delta_metadata
|
|
7759
8436
|
headers = {'Accept': 'application/json', }
|
|
8437
|
+
|
|
7760
8438
|
res = self._api.do('GET', f'/api/2.1/unity-catalog/tables/{full_name}', query=query, headers=headers)
|
|
7761
8439
|
return TableInfo.from_dict(res)
|
|
7762
8440
|
|
|
@@ -7889,6 +8567,7 @@ class TablesAPI:
|
|
|
7889
8567
|
body = {}
|
|
7890
8568
|
if owner is not None: body['owner'] = owner
|
|
7891
8569
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
8570
|
+
|
|
7892
8571
|
self._api.do('PATCH', f'/api/2.1/unity-catalog/tables/{full_name}', body=body, headers=headers)
|
|
7893
8572
|
|
|
7894
8573
|
|
|
@@ -7951,10 +8630,11 @@ class VolumesAPI:
|
|
|
7951
8630
|
if storage_location is not None: body['storage_location'] = storage_location
|
|
7952
8631
|
if volume_type is not None: body['volume_type'] = volume_type.value
|
|
7953
8632
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
8633
|
+
|
|
7954
8634
|
res = self._api.do('POST', '/api/2.1/unity-catalog/volumes', body=body, headers=headers)
|
|
7955
8635
|
return VolumeInfo.from_dict(res)
|
|
7956
8636
|
|
|
7957
|
-
def delete(self,
|
|
8637
|
+
def delete(self, name: str):
|
|
7958
8638
|
"""Delete a Volume.
|
|
7959
8639
|
|
|
7960
8640
|
Deletes a volume from the specified parent catalog and schema.
|
|
@@ -7963,19 +8643,25 @@ class VolumesAPI:
|
|
|
7963
8643
|
also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
|
|
7964
8644
|
privilege on the parent schema.
|
|
7965
8645
|
|
|
7966
|
-
:param
|
|
8646
|
+
:param name: str
|
|
7967
8647
|
The three-level (fully qualified) name of the volume
|
|
7968
8648
|
|
|
7969
8649
|
|
|
7970
8650
|
"""
|
|
7971
8651
|
|
|
7972
8652
|
headers = {}
|
|
7973
|
-
self._api.do('DELETE', f'/api/2.1/unity-catalog/volumes/{full_name_arg}', headers=headers)
|
|
7974
8653
|
|
|
7975
|
-
|
|
8654
|
+
self._api.do('DELETE', f'/api/2.1/unity-catalog/volumes/{name}', headers=headers)
|
|
8655
|
+
|
|
8656
|
+
def list(self,
|
|
8657
|
+
catalog_name: str,
|
|
8658
|
+
schema_name: str,
|
|
8659
|
+
*,
|
|
8660
|
+
max_results: Optional[int] = None,
|
|
8661
|
+
page_token: Optional[str] = None) -> Iterator[VolumeInfo]:
|
|
7976
8662
|
"""List Volumes.
|
|
7977
8663
|
|
|
7978
|
-
Gets an array of
|
|
8664
|
+
Gets an array of volumes for the current metastore under the parent catalog and schema.
|
|
7979
8665
|
|
|
7980
8666
|
The returned volumes are filtered based on the privileges of the calling user. For example, the
|
|
7981
8667
|
metastore admin is able to list all the volumes. A regular user needs to be the owner or have the
|
|
@@ -7989,19 +8675,42 @@ class VolumesAPI:
|
|
|
7989
8675
|
The identifier of the catalog
|
|
7990
8676
|
:param schema_name: str
|
|
7991
8677
|
The identifier of the schema
|
|
8678
|
+
:param max_results: int (optional)
|
|
8679
|
+
Maximum number of volumes to return (page length).
|
|
8680
|
+
|
|
8681
|
+
If not set, the page length is set to a server configured value (10000, as of 1/29/2024). - when set
|
|
8682
|
+
to a value greater than 0, the page length is the minimum of this value and a server configured
|
|
8683
|
+
value (10000, as of 1/29/2024); - when set to 0, the page length is set to a server configured value
|
|
8684
|
+
(10000, as of 1/29/2024) (recommended); - when set to a value less than 0, an invalid parameter
|
|
8685
|
+
error is returned;
|
|
8686
|
+
|
|
8687
|
+
Note: this parameter controls only the maximum number of volumes to return. The actual number of
|
|
8688
|
+
volumes returned in a page may be smaller than this value, including 0, even if there are more
|
|
8689
|
+
pages.
|
|
8690
|
+
:param page_token: str (optional)
|
|
8691
|
+
Opaque token returned by a previous request. It must be included in the request to retrieve the next
|
|
8692
|
+
page of results (pagination).
|
|
7992
8693
|
|
|
7993
8694
|
:returns: Iterator over :class:`VolumeInfo`
|
|
7994
8695
|
"""
|
|
7995
8696
|
|
|
7996
8697
|
query = {}
|
|
7997
8698
|
if catalog_name is not None: query['catalog_name'] = catalog_name
|
|
8699
|
+
if max_results is not None: query['max_results'] = max_results
|
|
8700
|
+
if page_token is not None: query['page_token'] = page_token
|
|
7998
8701
|
if schema_name is not None: query['schema_name'] = schema_name
|
|
7999
8702
|
headers = {'Accept': 'application/json', }
|
|
8000
|
-
json = self._api.do('GET', '/api/2.1/unity-catalog/volumes', query=query, headers=headers)
|
|
8001
|
-
parsed = ListVolumesResponseContent.from_dict(json).volumes
|
|
8002
|
-
return parsed if parsed is not None else []
|
|
8003
8703
|
|
|
8004
|
-
|
|
8704
|
+
while True:
|
|
8705
|
+
json = self._api.do('GET', '/api/2.1/unity-catalog/volumes', query=query, headers=headers)
|
|
8706
|
+
if 'volumes' in json:
|
|
8707
|
+
for v in json['volumes']:
|
|
8708
|
+
yield VolumeInfo.from_dict(v)
|
|
8709
|
+
if 'next_page_token' not in json or not json['next_page_token']:
|
|
8710
|
+
return
|
|
8711
|
+
query['page_token'] = json['next_page_token']
|
|
8712
|
+
|
|
8713
|
+
def read(self, name: str) -> VolumeInfo:
|
|
8005
8714
|
"""Get a Volume.
|
|
8006
8715
|
|
|
8007
8716
|
Gets a volume from the metastore for a specific catalog and schema.
|
|
@@ -8010,18 +8719,19 @@ class VolumesAPI:
|
|
|
8010
8719
|
volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege
|
|
8011
8720
|
on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
|
|
8012
8721
|
|
|
8013
|
-
:param
|
|
8722
|
+
:param name: str
|
|
8014
8723
|
The three-level (fully qualified) name of the volume
|
|
8015
8724
|
|
|
8016
8725
|
:returns: :class:`VolumeInfo`
|
|
8017
8726
|
"""
|
|
8018
8727
|
|
|
8019
8728
|
headers = {'Accept': 'application/json', }
|
|
8020
|
-
|
|
8729
|
+
|
|
8730
|
+
res = self._api.do('GET', f'/api/2.1/unity-catalog/volumes/{name}', headers=headers)
|
|
8021
8731
|
return VolumeInfo.from_dict(res)
|
|
8022
8732
|
|
|
8023
8733
|
def update(self,
|
|
8024
|
-
|
|
8734
|
+
name: str,
|
|
8025
8735
|
*,
|
|
8026
8736
|
comment: Optional[str] = None,
|
|
8027
8737
|
new_name: Optional[str] = None,
|
|
@@ -8036,7 +8746,7 @@ class VolumesAPI:
|
|
|
8036
8746
|
|
|
8037
8747
|
Currently only the name, the owner or the comment of the volume could be updated.
|
|
8038
8748
|
|
|
8039
|
-
:param
|
|
8749
|
+
:param name: str
|
|
8040
8750
|
The three-level (fully qualified) name of the volume
|
|
8041
8751
|
:param comment: str (optional)
|
|
8042
8752
|
The comment attached to the volume
|
|
@@ -8052,10 +8762,8 @@ class VolumesAPI:
|
|
|
8052
8762
|
if new_name is not None: body['new_name'] = new_name
|
|
8053
8763
|
if owner is not None: body['owner'] = owner
|
|
8054
8764
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
8055
|
-
|
|
8056
|
-
|
|
8057
|
-
body=body,
|
|
8058
|
-
headers=headers)
|
|
8765
|
+
|
|
8766
|
+
res = self._api.do('PATCH', f'/api/2.1/unity-catalog/volumes/{name}', body=body, headers=headers)
|
|
8059
8767
|
return VolumeInfo.from_dict(res)
|
|
8060
8768
|
|
|
8061
8769
|
|
|
@@ -8091,6 +8799,7 @@ class WorkspaceBindingsAPI:
|
|
|
8091
8799
|
"""
|
|
8092
8800
|
|
|
8093
8801
|
headers = {'Accept': 'application/json', }
|
|
8802
|
+
|
|
8094
8803
|
res = self._api.do('GET',
|
|
8095
8804
|
f'/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}',
|
|
8096
8805
|
headers=headers)
|
|
@@ -8111,6 +8820,7 @@ class WorkspaceBindingsAPI:
|
|
|
8111
8820
|
"""
|
|
8112
8821
|
|
|
8113
8822
|
headers = {'Accept': 'application/json', }
|
|
8823
|
+
|
|
8114
8824
|
res = self._api.do('GET',
|
|
8115
8825
|
f'/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}',
|
|
8116
8826
|
headers=headers)
|
|
@@ -8139,6 +8849,7 @@ class WorkspaceBindingsAPI:
|
|
|
8139
8849
|
if assign_workspaces is not None: body['assign_workspaces'] = [v for v in assign_workspaces]
|
|
8140
8850
|
if unassign_workspaces is not None: body['unassign_workspaces'] = [v for v in unassign_workspaces]
|
|
8141
8851
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
8852
|
+
|
|
8142
8853
|
res = self._api.do('PATCH',
|
|
8143
8854
|
f'/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}',
|
|
8144
8855
|
body=body,
|
|
@@ -8171,6 +8882,7 @@ class WorkspaceBindingsAPI:
|
|
|
8171
8882
|
if add is not None: body['add'] = [v.as_dict() for v in add]
|
|
8172
8883
|
if remove is not None: body['remove'] = [v.as_dict() for v in remove]
|
|
8173
8884
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
8885
|
+
|
|
8174
8886
|
res = self._api.do('PATCH',
|
|
8175
8887
|
f'/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}',
|
|
8176
8888
|
body=body,
|