databricks-sdk 0.28.0__py3-none-any.whl → 0.29.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +7 -3
- databricks/sdk/config.py +65 -10
- databricks/sdk/core.py +22 -0
- databricks/sdk/credentials_provider.py +121 -44
- databricks/sdk/dbutils.py +81 -3
- databricks/sdk/oauth.py +8 -6
- databricks/sdk/service/catalog.py +132 -28
- databricks/sdk/service/compute.py +21 -13
- databricks/sdk/service/dashboards.py +707 -2
- databricks/sdk/service/jobs.py +126 -152
- databricks/sdk/service/marketplace.py +136 -0
- databricks/sdk/service/oauth2.py +22 -0
- databricks/sdk/service/pipelines.py +1 -1
- databricks/sdk/service/serving.py +140 -55
- databricks/sdk/service/settings.py +1 -0
- databricks/sdk/service/sharing.py +0 -1
- databricks/sdk/service/sql.py +103 -23
- databricks/sdk/service/vectorsearch.py +75 -0
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.29.0.dist-info}/METADATA +2 -1
- {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.29.0.dist-info}/RECORD +25 -25
- {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.29.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.29.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.29.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.29.0.dist-info}/top_level.txt +0 -0
databricks/sdk/oauth.py
CHANGED
|
@@ -21,6 +21,10 @@ import requests.auth
|
|
|
21
21
|
# See https://stackoverflow.com/a/75466778/277035 for more info
|
|
22
22
|
NO_ORIGIN_FOR_SPA_CLIENT_ERROR = 'AADSTS9002327'
|
|
23
23
|
|
|
24
|
+
URL_ENCODED_CONTENT_TYPE = "application/x-www-form-urlencoded"
|
|
25
|
+
JWT_BEARER_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:jwt-bearer"
|
|
26
|
+
OIDC_TOKEN_PATH = "/oidc/v1/token"
|
|
27
|
+
|
|
24
28
|
logger = logging.getLogger(__name__)
|
|
25
29
|
|
|
26
30
|
|
|
@@ -358,18 +362,15 @@ class OAuthClient:
|
|
|
358
362
|
client_secret: str = None):
|
|
359
363
|
# TODO: is it a circular dependency?..
|
|
360
364
|
from .core import Config
|
|
361
|
-
from .credentials_provider import
|
|
365
|
+
from .credentials_provider import credentials_strategy
|
|
362
366
|
|
|
363
|
-
@
|
|
367
|
+
@credentials_strategy('noop', [])
|
|
364
368
|
def noop_credentials(_: any):
|
|
365
369
|
return lambda: {}
|
|
366
370
|
|
|
367
|
-
config = Config(host=host,
|
|
371
|
+
config = Config(host=host, credentials_strategy=noop_credentials)
|
|
368
372
|
if not scopes:
|
|
369
373
|
scopes = ['all-apis']
|
|
370
|
-
if config.is_azure:
|
|
371
|
-
# Azure AD only supports full access to Azure Databricks.
|
|
372
|
-
scopes = [f'{config.effective_azure_login_app_id}/user_impersonation', 'offline_access']
|
|
373
374
|
oidc = config.oidc_endpoints
|
|
374
375
|
if not oidc:
|
|
375
376
|
raise ValueError(f'{host} does not support OAuth')
|
|
@@ -381,6 +382,7 @@ class OAuthClient:
|
|
|
381
382
|
self.token_url = oidc.token_endpoint
|
|
382
383
|
self.is_aws = config.is_aws
|
|
383
384
|
self.is_azure = config.is_azure
|
|
385
|
+
self.is_gcp = config.is_gcp
|
|
384
386
|
|
|
385
387
|
self._auth_url = oidc.authorization_endpoint
|
|
386
388
|
self._scopes = scopes
|
|
@@ -448,7 +448,7 @@ class CatalogInfo:
|
|
|
448
448
|
full_name: Optional[str] = None
|
|
449
449
|
"""The full name of the catalog. Corresponds with the name field."""
|
|
450
450
|
|
|
451
|
-
isolation_mode: Optional[
|
|
451
|
+
isolation_mode: Optional[CatalogIsolationMode] = None
|
|
452
452
|
"""Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
|
|
453
453
|
|
|
454
454
|
metastore_id: Optional[str] = None
|
|
@@ -541,7 +541,7 @@ class CatalogInfo:
|
|
|
541
541
|
enable_predictive_optimization=_enum(d, 'enable_predictive_optimization',
|
|
542
542
|
EnablePredictiveOptimization),
|
|
543
543
|
full_name=d.get('full_name', None),
|
|
544
|
-
isolation_mode=_enum(d, 'isolation_mode',
|
|
544
|
+
isolation_mode=_enum(d, 'isolation_mode', CatalogIsolationMode),
|
|
545
545
|
metastore_id=d.get('metastore_id', None),
|
|
546
546
|
name=d.get('name', None),
|
|
547
547
|
options=d.get('options', None),
|
|
@@ -571,13 +571,18 @@ class CatalogInfoSecurableKind(Enum):
|
|
|
571
571
|
CATALOG_FOREIGN_SQLDW = 'CATALOG_FOREIGN_SQLDW'
|
|
572
572
|
CATALOG_FOREIGN_SQLSERVER = 'CATALOG_FOREIGN_SQLSERVER'
|
|
573
573
|
CATALOG_INTERNAL = 'CATALOG_INTERNAL'
|
|
574
|
-
CATALOG_ONLINE = 'CATALOG_ONLINE'
|
|
575
|
-
CATALOG_ONLINE_INDEX = 'CATALOG_ONLINE_INDEX'
|
|
576
574
|
CATALOG_STANDARD = 'CATALOG_STANDARD'
|
|
577
575
|
CATALOG_SYSTEM = 'CATALOG_SYSTEM'
|
|
578
576
|
CATALOG_SYSTEM_DELTASHARING = 'CATALOG_SYSTEM_DELTASHARING'
|
|
579
577
|
|
|
580
578
|
|
|
579
|
+
class CatalogIsolationMode(Enum):
|
|
580
|
+
"""Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
|
|
581
|
+
|
|
582
|
+
ISOLATED = 'ISOLATED'
|
|
583
|
+
OPEN = 'OPEN'
|
|
584
|
+
|
|
585
|
+
|
|
581
586
|
class CatalogType(Enum):
|
|
582
587
|
"""The type of the catalog."""
|
|
583
588
|
|
|
@@ -1222,8 +1227,9 @@ class CreateMetastore:
|
|
|
1222
1227
|
"""The user-specified name of the metastore."""
|
|
1223
1228
|
|
|
1224
1229
|
region: Optional[str] = None
|
|
1225
|
-
"""Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).
|
|
1226
|
-
the
|
|
1230
|
+
"""Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). The field can be omitted
|
|
1231
|
+
in the __workspace-level__ __API__ but not in the __account-level__ __API__. If this field is
|
|
1232
|
+
omitted, the region of the workspace receiving the request will be used."""
|
|
1227
1233
|
|
|
1228
1234
|
storage_root: Optional[str] = None
|
|
1229
1235
|
"""The storage root URL for metastore"""
|
|
@@ -1494,7 +1500,7 @@ class CreateStorageCredential:
|
|
|
1494
1500
|
"""Comment associated with the credential."""
|
|
1495
1501
|
|
|
1496
1502
|
databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None
|
|
1497
|
-
"""The
|
|
1503
|
+
"""The Databricks managed GCP service account configuration."""
|
|
1498
1504
|
|
|
1499
1505
|
read_only: Optional[bool] = None
|
|
1500
1506
|
"""Whether the storage credential is only usable for read operations."""
|
|
@@ -1627,14 +1633,28 @@ class DataSourceFormat(Enum):
|
|
|
1627
1633
|
"""Data source format"""
|
|
1628
1634
|
|
|
1629
1635
|
AVRO = 'AVRO'
|
|
1636
|
+
BIGQUERY_FORMAT = 'BIGQUERY_FORMAT'
|
|
1630
1637
|
CSV = 'CSV'
|
|
1638
|
+
DATABRICKS_FORMAT = 'DATABRICKS_FORMAT'
|
|
1631
1639
|
DELTA = 'DELTA'
|
|
1632
1640
|
DELTASHARING = 'DELTASHARING'
|
|
1641
|
+
HIVE_CUSTOM = 'HIVE_CUSTOM'
|
|
1642
|
+
HIVE_SERDE = 'HIVE_SERDE'
|
|
1633
1643
|
JSON = 'JSON'
|
|
1644
|
+
MYSQL_FORMAT = 'MYSQL_FORMAT'
|
|
1645
|
+
NETSUITE_FORMAT = 'NETSUITE_FORMAT'
|
|
1634
1646
|
ORC = 'ORC'
|
|
1635
1647
|
PARQUET = 'PARQUET'
|
|
1648
|
+
POSTGRESQL_FORMAT = 'POSTGRESQL_FORMAT'
|
|
1649
|
+
REDSHIFT_FORMAT = 'REDSHIFT_FORMAT'
|
|
1650
|
+
SALESFORCE_FORMAT = 'SALESFORCE_FORMAT'
|
|
1651
|
+
SNOWFLAKE_FORMAT = 'SNOWFLAKE_FORMAT'
|
|
1652
|
+
SQLDW_FORMAT = 'SQLDW_FORMAT'
|
|
1653
|
+
SQLSERVER_FORMAT = 'SQLSERVER_FORMAT'
|
|
1636
1654
|
TEXT = 'TEXT'
|
|
1637
1655
|
UNITY_CATALOG = 'UNITY_CATALOG'
|
|
1656
|
+
VECTOR_INDEX_FORMAT = 'VECTOR_INDEX_FORMAT'
|
|
1657
|
+
WORKDAY_RAAS_FORMAT = 'WORKDAY_RAAS_FORMAT'
|
|
1638
1658
|
|
|
1639
1659
|
|
|
1640
1660
|
@dataclass
|
|
@@ -1954,6 +1974,9 @@ class ExternalLocationInfo:
|
|
|
1954
1974
|
encryption_details: Optional[EncryptionDetails] = None
|
|
1955
1975
|
"""Encryption options that apply to clients connecting to cloud storage."""
|
|
1956
1976
|
|
|
1977
|
+
isolation_mode: Optional[IsolationMode] = None
|
|
1978
|
+
"""Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
|
|
1979
|
+
|
|
1957
1980
|
metastore_id: Optional[str] = None
|
|
1958
1981
|
"""Unique identifier of metastore hosting the external location."""
|
|
1959
1982
|
|
|
@@ -1986,6 +2009,7 @@ class ExternalLocationInfo:
|
|
|
1986
2009
|
if self.credential_id is not None: body['credential_id'] = self.credential_id
|
|
1987
2010
|
if self.credential_name is not None: body['credential_name'] = self.credential_name
|
|
1988
2011
|
if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict()
|
|
2012
|
+
if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
|
|
1989
2013
|
if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
|
|
1990
2014
|
if self.name is not None: body['name'] = self.name
|
|
1991
2015
|
if self.owner is not None: body['owner'] = self.owner
|
|
@@ -2006,6 +2030,7 @@ class ExternalLocationInfo:
|
|
|
2006
2030
|
credential_id=d.get('credential_id', None),
|
|
2007
2031
|
credential_name=d.get('credential_name', None),
|
|
2008
2032
|
encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails),
|
|
2033
|
+
isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
|
|
2009
2034
|
metastore_id=d.get('metastore_id', None),
|
|
2010
2035
|
name=d.get('name', None),
|
|
2011
2036
|
owner=d.get('owner', None),
|
|
@@ -2515,8 +2540,8 @@ class GetMetastoreSummaryResponseDeltaSharingScope(Enum):
|
|
|
2515
2540
|
class IsolationMode(Enum):
|
|
2516
2541
|
"""Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
|
|
2517
2542
|
|
|
2518
|
-
|
|
2519
|
-
|
|
2543
|
+
ISOLATION_MODE_ISOLATED = 'ISOLATION_MODE_ISOLATED'
|
|
2544
|
+
ISOLATION_MODE_OPEN = 'ISOLATION_MODE_OPEN'
|
|
2520
2545
|
|
|
2521
2546
|
|
|
2522
2547
|
@dataclass
|
|
@@ -2537,21 +2562,45 @@ class ListAccountMetastoreAssignmentsResponse:
|
|
|
2537
2562
|
return cls(workspace_ids=d.get('workspace_ids', None))
|
|
2538
2563
|
|
|
2539
2564
|
|
|
2565
|
+
@dataclass
|
|
2566
|
+
class ListAccountStorageCredentialsResponse:
|
|
2567
|
+
storage_credentials: Optional[List[StorageCredentialInfo]] = None
|
|
2568
|
+
"""An array of metastore storage credentials."""
|
|
2569
|
+
|
|
2570
|
+
def as_dict(self) -> dict:
|
|
2571
|
+
"""Serializes the ListAccountStorageCredentialsResponse into a dictionary suitable for use as a JSON request body."""
|
|
2572
|
+
body = {}
|
|
2573
|
+
if self.storage_credentials:
|
|
2574
|
+
body['storage_credentials'] = [v.as_dict() for v in self.storage_credentials]
|
|
2575
|
+
return body
|
|
2576
|
+
|
|
2577
|
+
@classmethod
|
|
2578
|
+
def from_dict(cls, d: Dict[str, any]) -> ListAccountStorageCredentialsResponse:
|
|
2579
|
+
"""Deserializes the ListAccountStorageCredentialsResponse from a dictionary."""
|
|
2580
|
+
return cls(storage_credentials=_repeated_dict(d, 'storage_credentials', StorageCredentialInfo))
|
|
2581
|
+
|
|
2582
|
+
|
|
2540
2583
|
@dataclass
|
|
2541
2584
|
class ListCatalogsResponse:
|
|
2542
2585
|
catalogs: Optional[List[CatalogInfo]] = None
|
|
2543
2586
|
"""An array of catalog information objects."""
|
|
2544
2587
|
|
|
2588
|
+
next_page_token: Optional[str] = None
|
|
2589
|
+
"""Opaque token to retrieve the next page of results. Absent if there are no more pages.
|
|
2590
|
+
__page_token__ should be set to this value for the next request (for the next page of results)."""
|
|
2591
|
+
|
|
2545
2592
|
def as_dict(self) -> dict:
|
|
2546
2593
|
"""Serializes the ListCatalogsResponse into a dictionary suitable for use as a JSON request body."""
|
|
2547
2594
|
body = {}
|
|
2548
2595
|
if self.catalogs: body['catalogs'] = [v.as_dict() for v in self.catalogs]
|
|
2596
|
+
if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
|
|
2549
2597
|
return body
|
|
2550
2598
|
|
|
2551
2599
|
@classmethod
|
|
2552
2600
|
def from_dict(cls, d: Dict[str, any]) -> ListCatalogsResponse:
|
|
2553
2601
|
"""Deserializes the ListCatalogsResponse from a dictionary."""
|
|
2554
|
-
return cls(catalogs=_repeated_dict(d, 'catalogs', CatalogInfo)
|
|
2602
|
+
return cls(catalogs=_repeated_dict(d, 'catalogs', CatalogInfo),
|
|
2603
|
+
next_page_token=d.get('next_page_token', None))
|
|
2555
2604
|
|
|
2556
2605
|
|
|
2557
2606
|
@dataclass
|
|
@@ -3578,12 +3627,16 @@ class OnlineTable:
|
|
|
3578
3627
|
status: Optional[OnlineTableStatus] = None
|
|
3579
3628
|
"""Online Table status"""
|
|
3580
3629
|
|
|
3630
|
+
table_serving_url: Optional[str] = None
|
|
3631
|
+
"""Data serving REST API URL for this table"""
|
|
3632
|
+
|
|
3581
3633
|
def as_dict(self) -> dict:
|
|
3582
3634
|
"""Serializes the OnlineTable into a dictionary suitable for use as a JSON request body."""
|
|
3583
3635
|
body = {}
|
|
3584
3636
|
if self.name is not None: body['name'] = self.name
|
|
3585
3637
|
if self.spec: body['spec'] = self.spec.as_dict()
|
|
3586
3638
|
if self.status: body['status'] = self.status.as_dict()
|
|
3639
|
+
if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url
|
|
3587
3640
|
return body
|
|
3588
3641
|
|
|
3589
3642
|
@classmethod
|
|
@@ -3591,7 +3644,8 @@ class OnlineTable:
|
|
|
3591
3644
|
"""Deserializes the OnlineTable from a dictionary."""
|
|
3592
3645
|
return cls(name=d.get('name', None),
|
|
3593
3646
|
spec=_from_dict(d, 'spec', OnlineTableSpec),
|
|
3594
|
-
status=_from_dict(d, 'status', OnlineTableStatus)
|
|
3647
|
+
status=_from_dict(d, 'status', OnlineTableStatus),
|
|
3648
|
+
table_serving_url=d.get('table_serving_url', None))
|
|
3595
3649
|
|
|
3596
3650
|
|
|
3597
3651
|
@dataclass
|
|
@@ -3889,7 +3943,6 @@ class Privilege(Enum):
|
|
|
3889
3943
|
REFRESH = 'REFRESH'
|
|
3890
3944
|
SELECT = 'SELECT'
|
|
3891
3945
|
SET_SHARE_PERMISSION = 'SET_SHARE_PERMISSION'
|
|
3892
|
-
SINGLE_USER_ACCESS = 'SINGLE_USER_ACCESS'
|
|
3893
3946
|
USAGE = 'USAGE'
|
|
3894
3947
|
USE_CATALOG = 'USE_CATALOG'
|
|
3895
3948
|
USE_CONNECTION = 'USE_CONNECTION'
|
|
@@ -4318,11 +4371,14 @@ class StorageCredentialInfo:
|
|
|
4318
4371
|
"""Username of credential creator."""
|
|
4319
4372
|
|
|
4320
4373
|
databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountResponse] = None
|
|
4321
|
-
"""The
|
|
4374
|
+
"""The Databricks managed GCP service account configuration."""
|
|
4322
4375
|
|
|
4323
4376
|
id: Optional[str] = None
|
|
4324
4377
|
"""The unique identifier of the credential."""
|
|
4325
4378
|
|
|
4379
|
+
isolation_mode: Optional[IsolationMode] = None
|
|
4380
|
+
"""Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
|
|
4381
|
+
|
|
4326
4382
|
metastore_id: Optional[str] = None
|
|
4327
4383
|
"""Unique identifier of parent metastore."""
|
|
4328
4384
|
|
|
@@ -4358,6 +4414,7 @@ class StorageCredentialInfo:
|
|
|
4358
4414
|
if self.databricks_gcp_service_account:
|
|
4359
4415
|
body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
|
|
4360
4416
|
if self.id is not None: body['id'] = self.id
|
|
4417
|
+
if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
|
|
4361
4418
|
if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
|
|
4362
4419
|
if self.name is not None: body['name'] = self.name
|
|
4363
4420
|
if self.owner is not None: body['owner'] = self.owner
|
|
@@ -4382,6 +4439,7 @@ class StorageCredentialInfo:
|
|
|
4382
4439
|
databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
|
|
4383
4440
|
DatabricksGcpServiceAccountResponse),
|
|
4384
4441
|
id=d.get('id', None),
|
|
4442
|
+
isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
|
|
4385
4443
|
metastore_id=d.get('metastore_id', None),
|
|
4386
4444
|
name=d.get('name', None),
|
|
4387
4445
|
owner=d.get('owner', None),
|
|
@@ -4720,7 +4778,10 @@ class TableSummary:
|
|
|
4720
4778
|
class TableType(Enum):
|
|
4721
4779
|
|
|
4722
4780
|
EXTERNAL = 'EXTERNAL'
|
|
4781
|
+
EXTERNAL_SHALLOW_CLONE = 'EXTERNAL_SHALLOW_CLONE'
|
|
4782
|
+
FOREIGN = 'FOREIGN'
|
|
4723
4783
|
MANAGED = 'MANAGED'
|
|
4784
|
+
MANAGED_SHALLOW_CLONE = 'MANAGED_SHALLOW_CLONE'
|
|
4724
4785
|
MATERIALIZED_VIEW = 'MATERIALIZED_VIEW'
|
|
4725
4786
|
STREAMING_TABLE = 'STREAMING_TABLE'
|
|
4726
4787
|
VIEW = 'VIEW'
|
|
@@ -4796,7 +4857,7 @@ class UpdateCatalog:
|
|
|
4796
4857
|
enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None
|
|
4797
4858
|
"""Whether predictive optimization should be enabled for this object and objects under it."""
|
|
4798
4859
|
|
|
4799
|
-
isolation_mode: Optional[
|
|
4860
|
+
isolation_mode: Optional[CatalogIsolationMode] = None
|
|
4800
4861
|
"""Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
|
|
4801
4862
|
|
|
4802
4863
|
name: Optional[str] = None
|
|
@@ -4830,7 +4891,7 @@ class UpdateCatalog:
|
|
|
4830
4891
|
return cls(comment=d.get('comment', None),
|
|
4831
4892
|
enable_predictive_optimization=_enum(d, 'enable_predictive_optimization',
|
|
4832
4893
|
EnablePredictiveOptimization),
|
|
4833
|
-
isolation_mode=_enum(d, 'isolation_mode',
|
|
4894
|
+
isolation_mode=_enum(d, 'isolation_mode', CatalogIsolationMode),
|
|
4834
4895
|
name=d.get('name', None),
|
|
4835
4896
|
new_name=d.get('new_name', None),
|
|
4836
4897
|
owner=d.get('owner', None),
|
|
@@ -4886,6 +4947,9 @@ class UpdateExternalLocation:
|
|
|
4886
4947
|
force: Optional[bool] = None
|
|
4887
4948
|
"""Force update even if changing url invalidates dependent external tables or mounts."""
|
|
4888
4949
|
|
|
4950
|
+
isolation_mode: Optional[IsolationMode] = None
|
|
4951
|
+
"""Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
|
|
4952
|
+
|
|
4889
4953
|
name: Optional[str] = None
|
|
4890
4954
|
"""Name of the external location."""
|
|
4891
4955
|
|
|
@@ -4912,6 +4976,7 @@ class UpdateExternalLocation:
|
|
|
4912
4976
|
if self.credential_name is not None: body['credential_name'] = self.credential_name
|
|
4913
4977
|
if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict()
|
|
4914
4978
|
if self.force is not None: body['force'] = self.force
|
|
4979
|
+
if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
|
|
4915
4980
|
if self.name is not None: body['name'] = self.name
|
|
4916
4981
|
if self.new_name is not None: body['new_name'] = self.new_name
|
|
4917
4982
|
if self.owner is not None: body['owner'] = self.owner
|
|
@@ -4928,6 +4993,7 @@ class UpdateExternalLocation:
|
|
|
4928
4993
|
credential_name=d.get('credential_name', None),
|
|
4929
4994
|
encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails),
|
|
4930
4995
|
force=d.get('force', None),
|
|
4996
|
+
isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
|
|
4931
4997
|
name=d.get('name', None),
|
|
4932
4998
|
new_name=d.get('new_name', None),
|
|
4933
4999
|
owner=d.get('owner', None),
|
|
@@ -5293,11 +5359,14 @@ class UpdateStorageCredential:
|
|
|
5293
5359
|
"""Comment associated with the credential."""
|
|
5294
5360
|
|
|
5295
5361
|
databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None
|
|
5296
|
-
"""The
|
|
5362
|
+
"""The Databricks managed GCP service account configuration."""
|
|
5297
5363
|
|
|
5298
5364
|
force: Optional[bool] = None
|
|
5299
5365
|
"""Force update even if there are dependent external locations or external tables."""
|
|
5300
5366
|
|
|
5367
|
+
isolation_mode: Optional[IsolationMode] = None
|
|
5368
|
+
"""Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
|
|
5369
|
+
|
|
5301
5370
|
name: Optional[str] = None
|
|
5302
5371
|
"""Name of the storage credential."""
|
|
5303
5372
|
|
|
@@ -5325,6 +5394,7 @@ class UpdateStorageCredential:
|
|
|
5325
5394
|
if self.databricks_gcp_service_account:
|
|
5326
5395
|
body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
|
|
5327
5396
|
if self.force is not None: body['force'] = self.force
|
|
5397
|
+
if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
|
|
5328
5398
|
if self.name is not None: body['name'] = self.name
|
|
5329
5399
|
if self.new_name is not None: body['new_name'] = self.new_name
|
|
5330
5400
|
if self.owner is not None: body['owner'] = self.owner
|
|
@@ -5344,6 +5414,7 @@ class UpdateStorageCredential:
|
|
|
5344
5414
|
databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
|
|
5345
5415
|
DatabricksGcpServiceAccountRequest),
|
|
5346
5416
|
force=d.get('force', None),
|
|
5417
|
+
isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
|
|
5347
5418
|
name=d.get('name', None),
|
|
5348
5419
|
new_name=d.get('new_name', None),
|
|
5349
5420
|
owner=d.get('owner', None),
|
|
@@ -6041,11 +6112,12 @@ class AccountStorageCredentialsAPI:
|
|
|
6041
6112
|
|
|
6042
6113
|
headers = {'Accept': 'application/json', }
|
|
6043
6114
|
|
|
6044
|
-
|
|
6115
|
+
json = self._api.do(
|
|
6045
6116
|
'GET',
|
|
6046
6117
|
f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials',
|
|
6047
6118
|
headers=headers)
|
|
6048
|
-
|
|
6119
|
+
parsed = ListAccountStorageCredentialsResponse.from_dict(json).storage_credentials
|
|
6120
|
+
return parsed if parsed is not None else []
|
|
6049
6121
|
|
|
6050
6122
|
def update(self,
|
|
6051
6123
|
metastore_id: str,
|
|
@@ -6232,7 +6304,11 @@ class CatalogsAPI:
|
|
|
6232
6304
|
res = self._api.do('GET', f'/api/2.1/unity-catalog/catalogs/{name}', query=query, headers=headers)
|
|
6233
6305
|
return CatalogInfo.from_dict(res)
|
|
6234
6306
|
|
|
6235
|
-
def list(self,
|
|
6307
|
+
def list(self,
|
|
6308
|
+
*,
|
|
6309
|
+
include_browse: Optional[bool] = None,
|
|
6310
|
+
max_results: Optional[int] = None,
|
|
6311
|
+
page_token: Optional[str] = None) -> Iterator[CatalogInfo]:
|
|
6236
6312
|
"""List catalogs.
|
|
6237
6313
|
|
|
6238
6314
|
Gets an array of catalogs in the metastore. If the caller is the metastore admin, all catalogs will be
|
|
@@ -6243,24 +6319,41 @@ class CatalogsAPI:
|
|
|
6243
6319
|
:param include_browse: bool (optional)
|
|
6244
6320
|
Whether to include catalogs in the response for which the principal can only access selective
|
|
6245
6321
|
metadata for
|
|
6322
|
+
:param max_results: int (optional)
|
|
6323
|
+
Maximum number of catalogs to return. - when set to 0, the page length is set to a server configured
|
|
6324
|
+
value (recommended); - when set to a value greater than 0, the page length is the minimum of this
|
|
6325
|
+
value and a server configured value; - when set to a value less than 0, an invalid parameter error
|
|
6326
|
+
is returned; - If not set, all valid catalogs are returned (not recommended). - Note: The number of
|
|
6327
|
+
returned catalogs might be less than the specified max_results size, even zero. The only definitive
|
|
6328
|
+
indication that no further catalogs can be fetched is when the next_page_token is unset from the
|
|
6329
|
+
response.
|
|
6330
|
+
:param page_token: str (optional)
|
|
6331
|
+
Opaque pagination token to go to next page based on previous query.
|
|
6246
6332
|
|
|
6247
6333
|
:returns: Iterator over :class:`CatalogInfo`
|
|
6248
6334
|
"""
|
|
6249
6335
|
|
|
6250
6336
|
query = {}
|
|
6251
6337
|
if include_browse is not None: query['include_browse'] = include_browse
|
|
6338
|
+
if max_results is not None: query['max_results'] = max_results
|
|
6339
|
+
if page_token is not None: query['page_token'] = page_token
|
|
6252
6340
|
headers = {'Accept': 'application/json', }
|
|
6253
6341
|
|
|
6254
|
-
|
|
6255
|
-
|
|
6256
|
-
|
|
6342
|
+
while True:
|
|
6343
|
+
json = self._api.do('GET', '/api/2.1/unity-catalog/catalogs', query=query, headers=headers)
|
|
6344
|
+
if 'catalogs' in json:
|
|
6345
|
+
for v in json['catalogs']:
|
|
6346
|
+
yield CatalogInfo.from_dict(v)
|
|
6347
|
+
if 'next_page_token' not in json or not json['next_page_token']:
|
|
6348
|
+
return
|
|
6349
|
+
query['page_token'] = json['next_page_token']
|
|
6257
6350
|
|
|
6258
6351
|
def update(self,
|
|
6259
6352
|
name: str,
|
|
6260
6353
|
*,
|
|
6261
6354
|
comment: Optional[str] = None,
|
|
6262
6355
|
enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None,
|
|
6263
|
-
isolation_mode: Optional[
|
|
6356
|
+
isolation_mode: Optional[CatalogIsolationMode] = None,
|
|
6264
6357
|
new_name: Optional[str] = None,
|
|
6265
6358
|
owner: Optional[str] = None,
|
|
6266
6359
|
properties: Optional[Dict[str, str]] = None) -> CatalogInfo:
|
|
@@ -6275,7 +6368,7 @@ class CatalogsAPI:
|
|
|
6275
6368
|
User-provided free-form text description.
|
|
6276
6369
|
:param enable_predictive_optimization: :class:`EnablePredictiveOptimization` (optional)
|
|
6277
6370
|
Whether predictive optimization should be enabled for this object and objects under it.
|
|
6278
|
-
:param isolation_mode: :class:`
|
|
6371
|
+
:param isolation_mode: :class:`CatalogIsolationMode` (optional)
|
|
6279
6372
|
Whether the current securable is accessible from all workspaces or a specific set of workspaces.
|
|
6280
6373
|
:param new_name: str (optional)
|
|
6281
6374
|
New name for the catalog.
|
|
@@ -6613,6 +6706,7 @@ class ExternalLocationsAPI:
|
|
|
6613
6706
|
credential_name: Optional[str] = None,
|
|
6614
6707
|
encryption_details: Optional[EncryptionDetails] = None,
|
|
6615
6708
|
force: Optional[bool] = None,
|
|
6709
|
+
isolation_mode: Optional[IsolationMode] = None,
|
|
6616
6710
|
new_name: Optional[str] = None,
|
|
6617
6711
|
owner: Optional[str] = None,
|
|
6618
6712
|
read_only: Optional[bool] = None,
|
|
@@ -6636,6 +6730,8 @@ class ExternalLocationsAPI:
|
|
|
6636
6730
|
Encryption options that apply to clients connecting to cloud storage.
|
|
6637
6731
|
:param force: bool (optional)
|
|
6638
6732
|
Force update even if changing url invalidates dependent external tables or mounts.
|
|
6733
|
+
:param isolation_mode: :class:`IsolationMode` (optional)
|
|
6734
|
+
Whether the current securable is accessible from all workspaces or a specific set of workspaces.
|
|
6639
6735
|
:param new_name: str (optional)
|
|
6640
6736
|
New name for the external location.
|
|
6641
6737
|
:param owner: str (optional)
|
|
@@ -6655,6 +6751,7 @@ class ExternalLocationsAPI:
|
|
|
6655
6751
|
if credential_name is not None: body['credential_name'] = credential_name
|
|
6656
6752
|
if encryption_details is not None: body['encryption_details'] = encryption_details.as_dict()
|
|
6657
6753
|
if force is not None: body['force'] = force
|
|
6754
|
+
if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value
|
|
6658
6755
|
if new_name is not None: body['new_name'] = new_name
|
|
6659
6756
|
if owner is not None: body['owner'] = owner
|
|
6660
6757
|
if read_only is not None: body['read_only'] = read_only
|
|
@@ -6682,6 +6779,8 @@ class FunctionsAPI:
|
|
|
6682
6779
|
def create(self, function_info: CreateFunction) -> FunctionInfo:
|
|
6683
6780
|
"""Create a function.
|
|
6684
6781
|
|
|
6782
|
+
**WARNING: This API is experimental and will change in future versions**
|
|
6783
|
+
|
|
6685
6784
|
Creates a new function
|
|
6686
6785
|
|
|
6687
6786
|
The user must have the following permissions in order for the function to be created: -
|
|
@@ -6986,8 +7085,9 @@ class MetastoresAPI:
|
|
|
6986
7085
|
:param name: str
|
|
6987
7086
|
The user-specified name of the metastore.
|
|
6988
7087
|
:param region: str (optional)
|
|
6989
|
-
Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).
|
|
6990
|
-
|
|
7088
|
+
Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). The field can be omitted in
|
|
7089
|
+
the __workspace-level__ __API__ but not in the __account-level__ __API__. If this field is omitted,
|
|
7090
|
+
the region of the workspace receiving the request will be used.
|
|
6991
7091
|
:param storage_root: str (optional)
|
|
6992
7092
|
The storage root URL for metastore
|
|
6993
7093
|
|
|
@@ -8241,7 +8341,7 @@ class StorageCredentialsAPI:
|
|
|
8241
8341
|
:param comment: str (optional)
|
|
8242
8342
|
Comment associated with the credential.
|
|
8243
8343
|
:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
|
|
8244
|
-
The
|
|
8344
|
+
The Databricks managed GCP service account configuration.
|
|
8245
8345
|
:param read_only: bool (optional)
|
|
8246
8346
|
Whether the storage credential is only usable for read operations.
|
|
8247
8347
|
:param skip_validation: bool (optional)
|
|
@@ -8357,6 +8457,7 @@ class StorageCredentialsAPI:
|
|
|
8357
8457
|
comment: Optional[str] = None,
|
|
8358
8458
|
databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None,
|
|
8359
8459
|
force: Optional[bool] = None,
|
|
8460
|
+
isolation_mode: Optional[IsolationMode] = None,
|
|
8360
8461
|
new_name: Optional[str] = None,
|
|
8361
8462
|
owner: Optional[str] = None,
|
|
8362
8463
|
read_only: Optional[bool] = None,
|
|
@@ -8378,9 +8479,11 @@ class StorageCredentialsAPI:
|
|
|
8378
8479
|
:param comment: str (optional)
|
|
8379
8480
|
Comment associated with the credential.
|
|
8380
8481
|
:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
|
|
8381
|
-
The
|
|
8482
|
+
The Databricks managed GCP service account configuration.
|
|
8382
8483
|
:param force: bool (optional)
|
|
8383
8484
|
Force update even if there are dependent external locations or external tables.
|
|
8485
|
+
:param isolation_mode: :class:`IsolationMode` (optional)
|
|
8486
|
+
Whether the current securable is accessible from all workspaces or a specific set of workspaces.
|
|
8384
8487
|
:param new_name: str (optional)
|
|
8385
8488
|
New name for the storage credential.
|
|
8386
8489
|
:param owner: str (optional)
|
|
@@ -8403,6 +8506,7 @@ class StorageCredentialsAPI:
|
|
|
8403
8506
|
if databricks_gcp_service_account is not None:
|
|
8404
8507
|
body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict()
|
|
8405
8508
|
if force is not None: body['force'] = force
|
|
8509
|
+
if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value
|
|
8406
8510
|
if new_name is not None: body['new_name'] = new_name
|
|
8407
8511
|
if owner is not None: body['owner'] = owner
|
|
8408
8512
|
if read_only is not None: body['read_only'] = read_only
|
|
@@ -555,7 +555,8 @@ class ClusterAttributes:
|
|
|
555
555
|
* `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
|
|
556
556
|
`LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high
|
|
557
557
|
concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy
|
|
558
|
-
Passthrough on standard clusters.
|
|
558
|
+
Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that
|
|
559
|
+
doesn’t have UC nor passthrough enabled."""
|
|
559
560
|
|
|
560
561
|
docker_image: Optional[DockerImage] = None
|
|
561
562
|
|
|
@@ -769,7 +770,8 @@ class ClusterDetails:
|
|
|
769
770
|
* `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
|
|
770
771
|
`LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high
|
|
771
772
|
concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy
|
|
772
|
-
Passthrough on standard clusters.
|
|
773
|
+
Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that
|
|
774
|
+
doesn’t have UC nor passthrough enabled."""
|
|
773
775
|
|
|
774
776
|
default_tags: Optional[Dict[str, str]] = None
|
|
775
777
|
"""Tags that are added by Databricks regardless of any `custom_tags`, including:
|
|
@@ -788,7 +790,7 @@ class ClusterDetails:
|
|
|
788
790
|
|
|
789
791
|
driver: Optional[SparkNode] = None
|
|
790
792
|
"""Node on which the Spark driver resides. The driver node contains the Spark master and the
|
|
791
|
-
|
|
793
|
+
Databricks application that manages the per-notebook Spark REPLs."""
|
|
792
794
|
|
|
793
795
|
driver_instance_pool_id: Optional[str] = None
|
|
794
796
|
"""The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster
|
|
@@ -1478,7 +1480,8 @@ class ClusterSpec:
|
|
|
1478
1480
|
* `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
|
|
1479
1481
|
`LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high
|
|
1480
1482
|
concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy
|
|
1481
|
-
Passthrough on standard clusters.
|
|
1483
|
+
Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that
|
|
1484
|
+
doesn’t have UC nor passthrough enabled."""
|
|
1482
1485
|
|
|
1483
1486
|
docker_image: Optional[DockerImage] = None
|
|
1484
1487
|
|
|
@@ -1793,7 +1796,8 @@ class CreateCluster:
|
|
|
1793
1796
|
* `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
|
|
1794
1797
|
`LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high
|
|
1795
1798
|
concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy
|
|
1796
|
-
Passthrough on standard clusters.
|
|
1799
|
+
Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that
|
|
1800
|
+
doesn’t have UC nor passthrough enabled."""
|
|
1797
1801
|
|
|
1798
1802
|
docker_image: Optional[DockerImage] = None
|
|
1799
1803
|
|
|
@@ -2269,10 +2273,12 @@ class DataSecurityMode(Enum):
|
|
|
2269
2273
|
* `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
|
|
2270
2274
|
`LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high
|
|
2271
2275
|
concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy
|
|
2272
|
-
Passthrough on standard clusters.
|
|
2276
|
+
Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that
|
|
2277
|
+
doesn’t have UC nor passthrough enabled."""
|
|
2273
2278
|
|
|
2274
2279
|
LEGACY_PASSTHROUGH = 'LEGACY_PASSTHROUGH'
|
|
2275
2280
|
LEGACY_SINGLE_USER = 'LEGACY_SINGLE_USER'
|
|
2281
|
+
LEGACY_SINGLE_USER_STANDARD = 'LEGACY_SINGLE_USER_STANDARD'
|
|
2276
2282
|
LEGACY_TABLE_ACL = 'LEGACY_TABLE_ACL'
|
|
2277
2283
|
NONE = 'NONE'
|
|
2278
2284
|
SINGLE_USER = 'SINGLE_USER'
|
|
@@ -2637,7 +2643,8 @@ class EditCluster:
|
|
|
2637
2643
|
* `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
|
|
2638
2644
|
`LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high
|
|
2639
2645
|
concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy
|
|
2640
|
-
Passthrough on standard clusters.
|
|
2646
|
+
Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that
|
|
2647
|
+
doesn’t have UC nor passthrough enabled."""
|
|
2641
2648
|
|
|
2642
2649
|
docker_image: Optional[DockerImage] = None
|
|
2643
2650
|
|
|
@@ -2977,9 +2984,8 @@ class EditResponse:
|
|
|
2977
2984
|
|
|
2978
2985
|
@dataclass
|
|
2979
2986
|
class Environment:
|
|
2980
|
-
"""The
|
|
2981
|
-
|
|
2982
|
-
supported. Next ID: 5"""
|
|
2987
|
+
"""The environment entity used to preserve serverless environment side panel and jobs' environment
|
|
2988
|
+
for non-notebook task. In this minimal environment spec, only pip dependencies are supported."""
|
|
2983
2989
|
|
|
2984
2990
|
client: str
|
|
2985
2991
|
"""Client version used by the environment The client is the user-facing environment of the runtime.
|
|
@@ -5069,7 +5075,7 @@ class Policy:
|
|
|
5069
5075
|
"""Additional human-readable description of the cluster policy."""
|
|
5070
5076
|
|
|
5071
5077
|
is_default: Optional[bool] = None
|
|
5072
|
-
"""If true, policy is a default policy created and managed by
|
|
5078
|
+
"""If true, policy is a default policy created and managed by Databricks. Default policies cannot
|
|
5073
5079
|
be deleted, and their policy families cannot be changed."""
|
|
5074
5080
|
|
|
5075
5081
|
libraries: Optional[List[Library]] = None
|
|
@@ -6352,7 +6358,8 @@ class ClustersAPI:
|
|
|
6352
6358
|
* `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
|
|
6353
6359
|
`LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency
|
|
6354
6360
|
clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on
|
|
6355
|
-
standard clusters.
|
|
6361
|
+
standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC
|
|
6362
|
+
nor passthrough enabled.
|
|
6356
6363
|
:param docker_image: :class:`DockerImage` (optional)
|
|
6357
6364
|
:param driver_instance_pool_id: str (optional)
|
|
6358
6365
|
The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses
|
|
@@ -6645,7 +6652,8 @@ class ClustersAPI:
|
|
|
6645
6652
|
* `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
|
|
6646
6653
|
`LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency
|
|
6647
6654
|
clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on
|
|
6648
|
-
standard clusters.
|
|
6655
|
+
standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC
|
|
6656
|
+
nor passthrough enabled.
|
|
6649
6657
|
:param docker_image: :class:`DockerImage` (optional)
|
|
6650
6658
|
:param driver_instance_pool_id: str (optional)
|
|
6651
6659
|
The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses
|