databricks-sdk 0.39.0__tar.gz → 0.41.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- {databricks_sdk-0.39.0/databricks_sdk.egg-info → databricks-sdk-0.41.0}/PKG-INFO +3 -28
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/__init__.py +38 -4
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/_base_client.py +16 -3
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/config.py +5 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/credentials_provider.py +23 -14
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/data_plane.py +1 -1
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/mixins/files.py +184 -1
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/mixins/open_ai_client.py +40 -1
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/apps.py +12 -4
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/catalog.py +3 -2
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/cleanrooms.py +2 -1
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/compute.py +365 -63
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/dashboards.py +23 -6
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/files.py +6 -3
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/iam.py +158 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/jobs.py +257 -30
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/oauth2.py +498 -29
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/pipelines.py +92 -15
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/serving.py +423 -215
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/sharing.py +51 -54
- databricks-sdk-0.41.0/databricks/sdk/version.py +1 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0/databricks_sdk.egg-info}/PKG-INFO +3 -28
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks_sdk.egg-info/SOURCES.txt +2 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_base_client.py +5 -5
- databricks-sdk-0.41.0/tests/test_credentials_provider.py +145 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_data_plane.py +1 -1
- databricks-sdk-0.41.0/tests/test_files.py +340 -0
- databricks_sdk-0.39.0/databricks/sdk/version.py +0 -1
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/LICENSE +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/NOTICE +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/README.md +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/__init__.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/_property.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/_widgets/__init__.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/azure.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/casing.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/clock.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/core.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/dbutils.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/environments.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/__init__.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/base.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/customizer.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/deserializer.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/mapper.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/overrides.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/parser.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/platform.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/private_link.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/sdk.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/logger/__init__.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/logger/round_trip_logger.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/mixins/__init__.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/mixins/compute.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/mixins/jobs.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/mixins/workspace.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/oauth.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/py.typed +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/retries.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/runtime/__init__.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/__init__.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/_internal.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/billing.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/marketplace.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/ml.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/provisioning.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/settings.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/sql.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/vectorsearch.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/service/workspace.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks/sdk/useragent.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks_sdk.egg-info/requires.txt +14 -14
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/databricks_sdk.egg-info/top_level.txt +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/setup.cfg +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/setup.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_auth.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_auth_manual_tests.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_client.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_compute_mixins.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_config.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_core.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_dbfs_mixins.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_dbutils.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_environments.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_errors.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_init_file.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_internal.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_jobs.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_jobs_mixin.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_metadata_service_auth.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_misc.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_model_serving_auth.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_oauth.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_open_ai_mixin.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_retries.py +0 -0
- {databricks_sdk-0.39.0 → databricks-sdk-0.41.0}/tests/test_user_agent.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: databricks-sdk
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.41.0
|
|
4
4
|
Summary: Databricks SDK for Python (Beta)
|
|
5
5
|
Home-page: https://databricks-sdk-py.readthedocs.io
|
|
6
6
|
Author: Serge Smertin
|
|
@@ -20,36 +20,11 @@ Classifier: Programming Language :: Python :: 3.12
|
|
|
20
20
|
Classifier: Operating System :: OS Independent
|
|
21
21
|
Requires-Python: >=3.7
|
|
22
22
|
Description-Content-Type: text/markdown
|
|
23
|
-
License-File: LICENSE
|
|
24
|
-
License-File: NOTICE
|
|
25
|
-
Requires-Dist: requests<3,>=2.28.1
|
|
26
|
-
Requires-Dist: google-auth~=2.0
|
|
27
23
|
Provides-Extra: dev
|
|
28
|
-
Requires-Dist: pytest; extra == "dev"
|
|
29
|
-
Requires-Dist: pytest-cov; extra == "dev"
|
|
30
|
-
Requires-Dist: pytest-xdist; extra == "dev"
|
|
31
|
-
Requires-Dist: pytest-mock; extra == "dev"
|
|
32
|
-
Requires-Dist: yapf; extra == "dev"
|
|
33
|
-
Requires-Dist: pycodestyle; extra == "dev"
|
|
34
|
-
Requires-Dist: autoflake; extra == "dev"
|
|
35
|
-
Requires-Dist: isort; extra == "dev"
|
|
36
|
-
Requires-Dist: wheel; extra == "dev"
|
|
37
|
-
Requires-Dist: ipython; extra == "dev"
|
|
38
|
-
Requires-Dist: ipywidgets; extra == "dev"
|
|
39
|
-
Requires-Dist: requests-mock; extra == "dev"
|
|
40
|
-
Requires-Dist: pyfakefs; extra == "dev"
|
|
41
|
-
Requires-Dist: databricks-connect; extra == "dev"
|
|
42
|
-
Requires-Dist: pytest-rerunfailures; extra == "dev"
|
|
43
|
-
Requires-Dist: openai; extra == "dev"
|
|
44
|
-
Requires-Dist: langchain-openai; python_version > "3.7" and extra == "dev"
|
|
45
|
-
Requires-Dist: httpx; extra == "dev"
|
|
46
24
|
Provides-Extra: notebook
|
|
47
|
-
Requires-Dist: ipython<9,>=8; extra == "notebook"
|
|
48
|
-
Requires-Dist: ipywidgets<9,>=8; extra == "notebook"
|
|
49
25
|
Provides-Extra: openai
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
Requires-Dist: httpx; extra == "openai"
|
|
26
|
+
License-File: LICENSE
|
|
27
|
+
License-File: NOTICE
|
|
53
28
|
|
|
54
29
|
# Databricks SDK for Python (Beta)
|
|
55
30
|
|
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
|
|
2
|
+
|
|
3
|
+
import logging
|
|
1
4
|
from typing import Optional
|
|
2
5
|
|
|
3
6
|
import databricks.sdk.core as client
|
|
@@ -5,7 +8,7 @@ import databricks.sdk.dbutils as dbutils
|
|
|
5
8
|
from databricks.sdk import azure
|
|
6
9
|
from databricks.sdk.credentials_provider import CredentialsStrategy
|
|
7
10
|
from databricks.sdk.mixins.compute import ClustersExt
|
|
8
|
-
from databricks.sdk.mixins.files import DbfsExt
|
|
11
|
+
from databricks.sdk.mixins.files import DbfsExt, FilesExt
|
|
9
12
|
from databricks.sdk.mixins.jobs import JobsExt
|
|
10
13
|
from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
|
|
11
14
|
from databricks.sdk.mixins.workspace import WorkspaceExt
|
|
@@ -40,7 +43,8 @@ from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI,
|
|
|
40
43
|
PolicyFamiliesAPI)
|
|
41
44
|
from databricks.sdk.service.dashboards import GenieAPI, LakeviewAPI
|
|
42
45
|
from databricks.sdk.service.files import DbfsAPI, FilesAPI
|
|
43
|
-
from databricks.sdk.service.iam import (
|
|
46
|
+
from databricks.sdk.service.iam import (AccessControlAPI,
|
|
47
|
+
AccountAccessControlAPI,
|
|
44
48
|
AccountAccessControlProxyAPI,
|
|
45
49
|
AccountGroupsAPI,
|
|
46
50
|
AccountServicePrincipalsAPI,
|
|
@@ -56,9 +60,11 @@ from databricks.sdk.service.marketplace import (
|
|
|
56
60
|
ProviderListingsAPI, ProviderPersonalizationRequestsAPI,
|
|
57
61
|
ProviderProviderAnalyticsDashboardsAPI, ProviderProvidersAPI)
|
|
58
62
|
from databricks.sdk.service.ml import ExperimentsAPI, ModelRegistryAPI
|
|
59
|
-
from databricks.sdk.service.oauth2 import (
|
|
63
|
+
from databricks.sdk.service.oauth2 import (AccountFederationPolicyAPI,
|
|
64
|
+
CustomAppIntegrationAPI,
|
|
60
65
|
OAuthPublishedAppsAPI,
|
|
61
66
|
PublishedAppIntegrationAPI,
|
|
67
|
+
ServicePrincipalFederationPolicyAPI,
|
|
62
68
|
ServicePrincipalSecretsAPI)
|
|
63
69
|
from databricks.sdk.service.pipelines import PipelinesAPI
|
|
64
70
|
from databricks.sdk.service.provisioning import (CredentialsAPI,
|
|
@@ -95,6 +101,8 @@ from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI,
|
|
|
95
101
|
from databricks.sdk.service.workspace import (GitCredentialsAPI, ReposAPI,
|
|
96
102
|
SecretsAPI, WorkspaceAPI)
|
|
97
103
|
|
|
104
|
+
_LOG = logging.getLogger(__name__)
|
|
105
|
+
|
|
98
106
|
|
|
99
107
|
def _make_dbutils(config: client.Config):
|
|
100
108
|
# We try to directly check if we are in runtime, instead of
|
|
@@ -112,6 +120,14 @@ def _make_dbutils(config: client.Config):
|
|
|
112
120
|
return runtime_dbutils
|
|
113
121
|
|
|
114
122
|
|
|
123
|
+
def _make_files_client(apiClient: client.ApiClient, config: client.Config):
|
|
124
|
+
if config.enable_experimental_files_api_client:
|
|
125
|
+
_LOG.info("Experimental Files API client is enabled")
|
|
126
|
+
return FilesExt(apiClient, config)
|
|
127
|
+
else:
|
|
128
|
+
return FilesAPI(apiClient)
|
|
129
|
+
|
|
130
|
+
|
|
115
131
|
class WorkspaceClient:
|
|
116
132
|
"""
|
|
117
133
|
The WorkspaceClient is a client for the workspace-level Databricks REST API.
|
|
@@ -173,6 +189,7 @@ class WorkspaceClient:
|
|
|
173
189
|
self._dbutils = _make_dbutils(self._config)
|
|
174
190
|
self._api_client = client.ApiClient(self._config)
|
|
175
191
|
serving_endpoints = ServingEndpointsExt(self._api_client)
|
|
192
|
+
self._access_control = AccessControlAPI(self._api_client)
|
|
176
193
|
self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client)
|
|
177
194
|
self._alerts = AlertsAPI(self._api_client)
|
|
178
195
|
self._alerts_legacy = AlertsLegacyAPI(self._api_client)
|
|
@@ -201,7 +218,7 @@ class WorkspaceClient:
|
|
|
201
218
|
self._dbsql_permissions = DbsqlPermissionsAPI(self._api_client)
|
|
202
219
|
self._experiments = ExperimentsAPI(self._api_client)
|
|
203
220
|
self._external_locations = ExternalLocationsAPI(self._api_client)
|
|
204
|
-
self._files =
|
|
221
|
+
self._files = _make_files_client(self._api_client, self._config)
|
|
205
222
|
self._functions = FunctionsAPI(self._api_client)
|
|
206
223
|
self._genie = GenieAPI(self._api_client)
|
|
207
224
|
self._git_credentials = GitCredentialsAPI(self._api_client)
|
|
@@ -281,6 +298,11 @@ class WorkspaceClient:
|
|
|
281
298
|
def dbutils(self) -> dbutils.RemoteDbUtils:
|
|
282
299
|
return self._dbutils
|
|
283
300
|
|
|
301
|
+
@property
|
|
302
|
+
def access_control(self) -> AccessControlAPI:
|
|
303
|
+
"""Rule based Access Control for Databricks Resources."""
|
|
304
|
+
return self._access_control
|
|
305
|
+
|
|
284
306
|
@property
|
|
285
307
|
def account_access_control_proxy(self) -> AccountAccessControlProxyAPI:
|
|
286
308
|
"""These APIs manage access rules on resources in an account."""
|
|
@@ -826,6 +848,7 @@ class AccountClient:
|
|
|
826
848
|
self._credentials = CredentialsAPI(self._api_client)
|
|
827
849
|
self._custom_app_integration = CustomAppIntegrationAPI(self._api_client)
|
|
828
850
|
self._encryption_keys = EncryptionKeysAPI(self._api_client)
|
|
851
|
+
self._federation_policy = AccountFederationPolicyAPI(self._api_client)
|
|
829
852
|
self._groups = AccountGroupsAPI(self._api_client)
|
|
830
853
|
self._ip_access_lists = AccountIpAccessListsAPI(self._api_client)
|
|
831
854
|
self._log_delivery = LogDeliveryAPI(self._api_client)
|
|
@@ -836,6 +859,7 @@ class AccountClient:
|
|
|
836
859
|
self._o_auth_published_apps = OAuthPublishedAppsAPI(self._api_client)
|
|
837
860
|
self._private_access = PrivateAccessAPI(self._api_client)
|
|
838
861
|
self._published_app_integration = PublishedAppIntegrationAPI(self._api_client)
|
|
862
|
+
self._service_principal_federation_policy = ServicePrincipalFederationPolicyAPI(self._api_client)
|
|
839
863
|
self._service_principal_secrets = ServicePrincipalSecretsAPI(self._api_client)
|
|
840
864
|
self._service_principals = AccountServicePrincipalsAPI(self._api_client)
|
|
841
865
|
self._settings = AccountSettingsAPI(self._api_client)
|
|
@@ -881,6 +905,11 @@ class AccountClient:
|
|
|
881
905
|
"""These APIs manage encryption key configurations for this workspace (optional)."""
|
|
882
906
|
return self._encryption_keys
|
|
883
907
|
|
|
908
|
+
@property
|
|
909
|
+
def federation_policy(self) -> AccountFederationPolicyAPI:
|
|
910
|
+
"""These APIs manage account federation policies."""
|
|
911
|
+
return self._federation_policy
|
|
912
|
+
|
|
884
913
|
@property
|
|
885
914
|
def groups(self) -> AccountGroupsAPI:
|
|
886
915
|
"""Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects."""
|
|
@@ -931,6 +960,11 @@ class AccountClient:
|
|
|
931
960
|
"""These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud."""
|
|
932
961
|
return self._published_app_integration
|
|
933
962
|
|
|
963
|
+
@property
|
|
964
|
+
def service_principal_federation_policy(self) -> ServicePrincipalFederationPolicyAPI:
|
|
965
|
+
"""These APIs manage service principal federation policies."""
|
|
966
|
+
return self._service_principal_federation_policy
|
|
967
|
+
|
|
934
968
|
@property
|
|
935
969
|
def service_principal_secrets(self) -> ServicePrincipalSecretsAPI:
|
|
936
970
|
"""These APIs enable administrators to manage service principal secrets."""
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import io
|
|
2
2
|
import logging
|
|
3
3
|
import urllib.parse
|
|
4
|
+
from abc import ABC, abstractmethod
|
|
4
5
|
from datetime import timedelta
|
|
5
6
|
from types import TracebackType
|
|
6
7
|
from typing import (Any, BinaryIO, Callable, Dict, Iterable, Iterator, List,
|
|
@@ -285,8 +286,20 @@ class _BaseClient:
|
|
|
285
286
|
logger.debug(RoundTrip(response, self._debug_headers, self._debug_truncate_bytes, raw).generate())
|
|
286
287
|
|
|
287
288
|
|
|
289
|
+
class _RawResponse(ABC):
|
|
290
|
+
|
|
291
|
+
@abstractmethod
|
|
292
|
+
# follows Response signature: https://github.com/psf/requests/blob/main/src/requests/models.py#L799
|
|
293
|
+
def iter_content(self, chunk_size: int = 1, decode_unicode: bool = False):
|
|
294
|
+
pass
|
|
295
|
+
|
|
296
|
+
@abstractmethod
|
|
297
|
+
def close(self):
|
|
298
|
+
pass
|
|
299
|
+
|
|
300
|
+
|
|
288
301
|
class _StreamingResponse(BinaryIO):
|
|
289
|
-
_response:
|
|
302
|
+
_response: _RawResponse
|
|
290
303
|
_buffer: bytes
|
|
291
304
|
_content: Union[Iterator[bytes], None]
|
|
292
305
|
_chunk_size: Union[int, None]
|
|
@@ -298,7 +311,7 @@ class _StreamingResponse(BinaryIO):
|
|
|
298
311
|
def flush(self) -> int:
|
|
299
312
|
pass
|
|
300
313
|
|
|
301
|
-
def __init__(self, response:
|
|
314
|
+
def __init__(self, response: _RawResponse, chunk_size: Union[int, None] = None):
|
|
302
315
|
self._response = response
|
|
303
316
|
self._buffer = b''
|
|
304
317
|
self._content = None
|
|
@@ -308,7 +321,7 @@ class _StreamingResponse(BinaryIO):
|
|
|
308
321
|
if self._closed:
|
|
309
322
|
raise ValueError("I/O operation on closed file")
|
|
310
323
|
if not self._content:
|
|
311
|
-
self._content = self._response.iter_content(chunk_size=self._chunk_size)
|
|
324
|
+
self._content = self._response.iter_content(chunk_size=self._chunk_size, decode_unicode=False)
|
|
312
325
|
|
|
313
326
|
def __enter__(self) -> BinaryIO:
|
|
314
327
|
self._open()
|
|
@@ -92,6 +92,11 @@ class Config:
|
|
|
92
92
|
max_connections_per_pool: int = ConfigAttribute()
|
|
93
93
|
databricks_environment: Optional[DatabricksEnvironment] = None
|
|
94
94
|
|
|
95
|
+
enable_experimental_files_api_client: bool = ConfigAttribute(
|
|
96
|
+
env='DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT')
|
|
97
|
+
files_api_client_download_max_total_recovers = None
|
|
98
|
+
files_api_client_download_max_total_recovers_without_progressing = 1
|
|
99
|
+
|
|
95
100
|
def __init__(
|
|
96
101
|
self,
|
|
97
102
|
*,
|
|
@@ -167,6 +167,7 @@ def oauth_service_principal(cfg: 'Config') -> Optional[CredentialsProvider]:
|
|
|
167
167
|
oidc = cfg.oidc_endpoints
|
|
168
168
|
if oidc is None:
|
|
169
169
|
return None
|
|
170
|
+
|
|
170
171
|
token_source = ClientCredentials(client_id=cfg.client_id,
|
|
171
172
|
client_secret=cfg.client_secret,
|
|
172
173
|
token_url=oidc.token_endpoint,
|
|
@@ -187,6 +188,7 @@ def oauth_service_principal(cfg: 'Config') -> Optional[CredentialsProvider]:
|
|
|
187
188
|
def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
|
|
188
189
|
if cfg.auth_type != 'external-browser':
|
|
189
190
|
return None
|
|
191
|
+
|
|
190
192
|
client_id, client_secret = None, None
|
|
191
193
|
if cfg.client_id:
|
|
192
194
|
client_id = cfg.client_id
|
|
@@ -194,12 +196,11 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
|
|
|
194
196
|
elif cfg.azure_client_id:
|
|
195
197
|
client_id = cfg.azure_client
|
|
196
198
|
client_secret = cfg.azure_client_secret
|
|
197
|
-
|
|
198
199
|
if not client_id:
|
|
199
200
|
client_id = 'databricks-cli'
|
|
200
201
|
|
|
201
|
-
# Load cached credentials from disk if they exist.
|
|
202
|
-
#
|
|
202
|
+
# Load cached credentials from disk if they exist. Note that these are
|
|
203
|
+
# local to the Python SDK and not reused by other SDKs.
|
|
203
204
|
oidc_endpoints = cfg.oidc_endpoints
|
|
204
205
|
redirect_url = 'http://localhost:8020'
|
|
205
206
|
token_cache = TokenCache(host=cfg.host,
|
|
@@ -209,17 +210,25 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
|
|
|
209
210
|
redirect_url=redirect_url)
|
|
210
211
|
credentials = token_cache.load()
|
|
211
212
|
if credentials:
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
213
|
+
try:
|
|
214
|
+
# Pro-actively refresh the loaded credentials. This is done
|
|
215
|
+
# to detect if the token is expired and needs to be refreshed
|
|
216
|
+
# by going through the OAuth login flow.
|
|
217
|
+
credentials.token()
|
|
218
|
+
return credentials(cfg)
|
|
219
|
+
# TODO: We should ideally use more specific exceptions.
|
|
220
|
+
except Exception as e:
|
|
221
|
+
logger.warning(f'Failed to refresh cached token: {e}. Initiating new OAuth login flow')
|
|
222
|
+
|
|
223
|
+
oauth_client = OAuthClient(oidc_endpoints=oidc_endpoints,
|
|
224
|
+
client_id=client_id,
|
|
225
|
+
redirect_url=redirect_url,
|
|
226
|
+
client_secret=client_secret)
|
|
227
|
+
consent = oauth_client.initiate_consent()
|
|
228
|
+
if not consent:
|
|
229
|
+
return None
|
|
230
|
+
|
|
231
|
+
credentials = consent.launch_external_browser()
|
|
223
232
|
token_cache.save(credentials)
|
|
224
233
|
return credentials(cfg)
|
|
225
234
|
|
|
@@ -3,7 +3,6 @@ from dataclasses import dataclass
|
|
|
3
3
|
from typing import Callable, List
|
|
4
4
|
|
|
5
5
|
from databricks.sdk.oauth import Token
|
|
6
|
-
from databricks.sdk.service.oauth2 import DataPlaneInfo
|
|
7
6
|
|
|
8
7
|
|
|
9
8
|
@dataclass
|
|
@@ -19,6 +18,7 @@ class DataPlaneDetails:
|
|
|
19
18
|
|
|
20
19
|
class DataPlaneService:
|
|
21
20
|
"""Helper class to fetch and manage DataPlane details."""
|
|
21
|
+
from .service.serving import DataPlaneInfo
|
|
22
22
|
|
|
23
23
|
def __init__(self):
|
|
24
24
|
self._data_plane_info = {}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import base64
|
|
4
|
+
import logging
|
|
4
5
|
import os
|
|
5
6
|
import pathlib
|
|
6
7
|
import platform
|
|
@@ -8,19 +9,27 @@ import shutil
|
|
|
8
9
|
import sys
|
|
9
10
|
from abc import ABC, abstractmethod
|
|
10
11
|
from collections import deque
|
|
12
|
+
from collections.abc import Iterator
|
|
11
13
|
from io import BytesIO
|
|
12
14
|
from types import TracebackType
|
|
13
15
|
from typing import (TYPE_CHECKING, AnyStr, BinaryIO, Generator, Iterable,
|
|
14
|
-
|
|
16
|
+
Optional, Type, Union)
|
|
15
17
|
from urllib import parse
|
|
16
18
|
|
|
19
|
+
from requests import RequestException
|
|
20
|
+
|
|
21
|
+
from .._base_client import _RawResponse, _StreamingResponse
|
|
17
22
|
from .._property import _cached_property
|
|
18
23
|
from ..errors import NotFound
|
|
19
24
|
from ..service import files
|
|
25
|
+
from ..service._internal import _escape_multi_segment_path_parameter
|
|
26
|
+
from ..service.files import DownloadResponse
|
|
20
27
|
|
|
21
28
|
if TYPE_CHECKING:
|
|
22
29
|
from _typeshed import Self
|
|
23
30
|
|
|
31
|
+
_LOG = logging.getLogger(__name__)
|
|
32
|
+
|
|
24
33
|
|
|
25
34
|
class _DbfsIO(BinaryIO):
|
|
26
35
|
MAX_CHUNK_SIZE = 1024 * 1024
|
|
@@ -636,3 +645,177 @@ class DbfsExt(files.DbfsAPI):
|
|
|
636
645
|
if p.is_dir and not recursive:
|
|
637
646
|
raise IOError('deleting directories requires recursive flag')
|
|
638
647
|
p.delete(recursive=recursive)
|
|
648
|
+
|
|
649
|
+
|
|
650
|
+
class FilesExt(files.FilesAPI):
|
|
651
|
+
__doc__ = files.FilesAPI.__doc__
|
|
652
|
+
|
|
653
|
+
def __init__(self, api_client, config: Config):
|
|
654
|
+
super().__init__(api_client)
|
|
655
|
+
self._config = config.copy()
|
|
656
|
+
|
|
657
|
+
def download(self, file_path: str) -> DownloadResponse:
|
|
658
|
+
"""Download a file.
|
|
659
|
+
|
|
660
|
+
Downloads a file of any size. The file contents are the response body.
|
|
661
|
+
This is a standard HTTP file download, not a JSON RPC.
|
|
662
|
+
|
|
663
|
+
It is strongly recommended, for fault tolerance reasons,
|
|
664
|
+
to iteratively consume from the stream with a maximum read(size)
|
|
665
|
+
defined instead of using indefinite-size reads.
|
|
666
|
+
|
|
667
|
+
:param file_path: str
|
|
668
|
+
The remote path of the file, e.g. /Volumes/path/to/your/file
|
|
669
|
+
|
|
670
|
+
:returns: :class:`DownloadResponse`
|
|
671
|
+
"""
|
|
672
|
+
|
|
673
|
+
initial_response: DownloadResponse = self._download_raw_stream(file_path=file_path,
|
|
674
|
+
start_byte_offset=0,
|
|
675
|
+
if_unmodified_since_timestamp=None)
|
|
676
|
+
|
|
677
|
+
wrapped_response = self._wrap_stream(file_path, initial_response)
|
|
678
|
+
initial_response.contents._response = wrapped_response
|
|
679
|
+
return initial_response
|
|
680
|
+
|
|
681
|
+
def _download_raw_stream(self,
|
|
682
|
+
file_path: str,
|
|
683
|
+
start_byte_offset: int,
|
|
684
|
+
if_unmodified_since_timestamp: Optional[str] = None) -> DownloadResponse:
|
|
685
|
+
headers = {'Accept': 'application/octet-stream', }
|
|
686
|
+
|
|
687
|
+
if start_byte_offset and not if_unmodified_since_timestamp:
|
|
688
|
+
raise Exception("if_unmodified_since_timestamp is required if start_byte_offset is specified")
|
|
689
|
+
|
|
690
|
+
if start_byte_offset:
|
|
691
|
+
headers['Range'] = f'bytes={start_byte_offset}-'
|
|
692
|
+
|
|
693
|
+
if if_unmodified_since_timestamp:
|
|
694
|
+
headers['If-Unmodified-Since'] = if_unmodified_since_timestamp
|
|
695
|
+
|
|
696
|
+
response_headers = ['content-length', 'content-type', 'last-modified', ]
|
|
697
|
+
res = self._api.do('GET',
|
|
698
|
+
f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}',
|
|
699
|
+
headers=headers,
|
|
700
|
+
response_headers=response_headers,
|
|
701
|
+
raw=True)
|
|
702
|
+
|
|
703
|
+
result = DownloadResponse.from_dict(res)
|
|
704
|
+
if not isinstance(result.contents, _StreamingResponse):
|
|
705
|
+
raise Exception("Internal error: response contents is of unexpected type: " +
|
|
706
|
+
type(result.contents).__name__)
|
|
707
|
+
|
|
708
|
+
return result
|
|
709
|
+
|
|
710
|
+
def _wrap_stream(self, file_path: str, downloadResponse: DownloadResponse):
|
|
711
|
+
underlying_response = _ResilientIterator._extract_raw_response(downloadResponse)
|
|
712
|
+
return _ResilientResponse(self,
|
|
713
|
+
file_path,
|
|
714
|
+
downloadResponse.last_modified,
|
|
715
|
+
offset=0,
|
|
716
|
+
underlying_response=underlying_response)
|
|
717
|
+
|
|
718
|
+
|
|
719
|
+
class _ResilientResponse(_RawResponse):
|
|
720
|
+
|
|
721
|
+
def __init__(self, api: FilesExt, file_path: str, file_last_modified: str, offset: int,
|
|
722
|
+
underlying_response: _RawResponse):
|
|
723
|
+
self.api = api
|
|
724
|
+
self.file_path = file_path
|
|
725
|
+
self.underlying_response = underlying_response
|
|
726
|
+
self.offset = offset
|
|
727
|
+
self.file_last_modified = file_last_modified
|
|
728
|
+
|
|
729
|
+
def iter_content(self, chunk_size=1, decode_unicode=False):
|
|
730
|
+
if decode_unicode:
|
|
731
|
+
raise ValueError('Decode unicode is not supported')
|
|
732
|
+
|
|
733
|
+
iterator = self.underlying_response.iter_content(chunk_size=chunk_size, decode_unicode=False)
|
|
734
|
+
self.iterator = _ResilientIterator(iterator, self.file_path, self.file_last_modified, self.offset,
|
|
735
|
+
self.api, chunk_size)
|
|
736
|
+
return self.iterator
|
|
737
|
+
|
|
738
|
+
def close(self):
|
|
739
|
+
self.iterator.close()
|
|
740
|
+
|
|
741
|
+
|
|
742
|
+
class _ResilientIterator(Iterator):
|
|
743
|
+
# This class tracks current offset (returned to the client code)
|
|
744
|
+
# and recovers from failures by requesting download from the current offset.
|
|
745
|
+
|
|
746
|
+
@staticmethod
|
|
747
|
+
def _extract_raw_response(download_response: DownloadResponse) -> _RawResponse:
|
|
748
|
+
streaming_response: _StreamingResponse = download_response.contents # this is an instance of _StreamingResponse
|
|
749
|
+
return streaming_response._response
|
|
750
|
+
|
|
751
|
+
def __init__(self, underlying_iterator, file_path: str, file_last_modified: str, offset: int,
|
|
752
|
+
api: FilesExt, chunk_size: int):
|
|
753
|
+
self._underlying_iterator = underlying_iterator
|
|
754
|
+
self._api = api
|
|
755
|
+
self._file_path = file_path
|
|
756
|
+
|
|
757
|
+
# Absolute current offset (0-based), i.e. number of bytes from the beginning of the file
|
|
758
|
+
# that were so far returned to the caller code.
|
|
759
|
+
self._offset = offset
|
|
760
|
+
self._file_last_modified = file_last_modified
|
|
761
|
+
self._chunk_size = chunk_size
|
|
762
|
+
|
|
763
|
+
self._total_recovers_count: int = 0
|
|
764
|
+
self._recovers_without_progressing_count: int = 0
|
|
765
|
+
self._closed: bool = False
|
|
766
|
+
|
|
767
|
+
def _should_recover(self) -> bool:
|
|
768
|
+
if self._total_recovers_count == self._api._config.files_api_client_download_max_total_recovers:
|
|
769
|
+
_LOG.debug("Total recovers limit exceeded")
|
|
770
|
+
return False
|
|
771
|
+
if self._api._config.files_api_client_download_max_total_recovers_without_progressing is not None and self._recovers_without_progressing_count >= self._api._config.files_api_client_download_max_total_recovers_without_progressing:
|
|
772
|
+
_LOG.debug("No progression recovers limit exceeded")
|
|
773
|
+
return False
|
|
774
|
+
return True
|
|
775
|
+
|
|
776
|
+
def _recover(self) -> bool:
|
|
777
|
+
if not self._should_recover():
|
|
778
|
+
return False # recover suppressed, rethrow original exception
|
|
779
|
+
|
|
780
|
+
self._total_recovers_count += 1
|
|
781
|
+
self._recovers_without_progressing_count += 1
|
|
782
|
+
|
|
783
|
+
try:
|
|
784
|
+
self._underlying_iterator.close()
|
|
785
|
+
|
|
786
|
+
_LOG.debug("Trying to recover from offset " + str(self._offset))
|
|
787
|
+
|
|
788
|
+
# following call includes all the required network retries
|
|
789
|
+
downloadResponse = self._api._download_raw_stream(self._file_path, self._offset,
|
|
790
|
+
self._file_last_modified)
|
|
791
|
+
underlying_response = _ResilientIterator._extract_raw_response(downloadResponse)
|
|
792
|
+
self._underlying_iterator = underlying_response.iter_content(chunk_size=self._chunk_size,
|
|
793
|
+
decode_unicode=False)
|
|
794
|
+
_LOG.debug("Recover succeeded")
|
|
795
|
+
return True
|
|
796
|
+
except:
|
|
797
|
+
return False # recover failed, rethrow original exception
|
|
798
|
+
|
|
799
|
+
def __next__(self):
|
|
800
|
+
if self._closed:
|
|
801
|
+
# following _BaseClient
|
|
802
|
+
raise ValueError("I/O operation on closed file")
|
|
803
|
+
|
|
804
|
+
while True:
|
|
805
|
+
try:
|
|
806
|
+
returned_bytes = next(self._underlying_iterator)
|
|
807
|
+
self._offset += len(returned_bytes)
|
|
808
|
+
self._recovers_without_progressing_count = 0
|
|
809
|
+
return returned_bytes
|
|
810
|
+
|
|
811
|
+
except StopIteration:
|
|
812
|
+
raise
|
|
813
|
+
|
|
814
|
+
# https://requests.readthedocs.io/en/latest/user/quickstart/#errors-and-exceptions
|
|
815
|
+
except RequestException:
|
|
816
|
+
if not self._recover():
|
|
817
|
+
raise
|
|
818
|
+
|
|
819
|
+
def close(self):
|
|
820
|
+
self._underlying_iterator.close()
|
|
821
|
+
self._closed = True
|
|
@@ -1,4 +1,9 @@
|
|
|
1
|
-
|
|
1
|
+
import json as js
|
|
2
|
+
from typing import Dict, Optional
|
|
3
|
+
|
|
4
|
+
from databricks.sdk.service.serving import (ExternalFunctionRequestHttpMethod,
|
|
5
|
+
ExternalFunctionResponse,
|
|
6
|
+
ServingEndpointsAPI)
|
|
2
7
|
|
|
3
8
|
|
|
4
9
|
class ServingEndpointsExt(ServingEndpointsAPI):
|
|
@@ -50,3 +55,37 @@ class ServingEndpointsExt(ServingEndpointsAPI):
|
|
|
50
55
|
openai_api_base=self._api._cfg.host + "/serving-endpoints",
|
|
51
56
|
api_key="no-token", # Passing in a placeholder to pass validations, this will not be used
|
|
52
57
|
http_client=self._get_authorized_http_client())
|
|
58
|
+
|
|
59
|
+
def http_request(self,
|
|
60
|
+
conn: str,
|
|
61
|
+
method: ExternalFunctionRequestHttpMethod,
|
|
62
|
+
path: str,
|
|
63
|
+
*,
|
|
64
|
+
headers: Optional[Dict[str, str]] = None,
|
|
65
|
+
json: Optional[Dict[str, str]] = None,
|
|
66
|
+
params: Optional[Dict[str, str]] = None) -> ExternalFunctionResponse:
|
|
67
|
+
"""Make external services call using the credentials stored in UC Connection.
|
|
68
|
+
**NOTE:** Experimental: This API may change or be removed in a future release without warning.
|
|
69
|
+
:param conn: str
|
|
70
|
+
The connection name to use. This is required to identify the external connection.
|
|
71
|
+
:param method: :class:`ExternalFunctionRequestHttpMethod`
|
|
72
|
+
The HTTP method to use (e.g., 'GET', 'POST'). This is required.
|
|
73
|
+
:param path: str
|
|
74
|
+
The relative path for the API endpoint. This is required.
|
|
75
|
+
:param headers: Dict[str,str] (optional)
|
|
76
|
+
Additional headers for the request. If not provided, only auth headers from connections would be
|
|
77
|
+
passed.
|
|
78
|
+
:param json: Dict[str,str] (optional)
|
|
79
|
+
JSON payload for the request.
|
|
80
|
+
:param params: Dict[str,str] (optional)
|
|
81
|
+
Query parameters for the request.
|
|
82
|
+
:returns: :class:`ExternalFunctionResponse`
|
|
83
|
+
"""
|
|
84
|
+
|
|
85
|
+
return super.http_request(connection_name=conn,
|
|
86
|
+
method=method,
|
|
87
|
+
path=path,
|
|
88
|
+
headers=js.dumps(headers),
|
|
89
|
+
json=js.dumps(json),
|
|
90
|
+
params=js.dumps(params),
|
|
91
|
+
)
|
|
@@ -967,25 +967,33 @@ class AppsAPI:
|
|
|
967
967
|
attempt += 1
|
|
968
968
|
raise TimeoutError(f'timed out after {timeout}: {status_message}')
|
|
969
969
|
|
|
970
|
-
def create(self, *, app: Optional[App] = None) -> Wait[App]:
|
|
970
|
+
def create(self, *, app: Optional[App] = None, no_compute: Optional[bool] = None) -> Wait[App]:
|
|
971
971
|
"""Create an app.
|
|
972
972
|
|
|
973
973
|
Creates a new app.
|
|
974
974
|
|
|
975
975
|
:param app: :class:`App` (optional)
|
|
976
|
+
:param no_compute: bool (optional)
|
|
977
|
+
If true, the app will not be started after creation.
|
|
976
978
|
|
|
977
979
|
:returns:
|
|
978
980
|
Long-running operation waiter for :class:`App`.
|
|
979
981
|
See :method:wait_get_app_active for more details.
|
|
980
982
|
"""
|
|
981
983
|
body = app.as_dict()
|
|
984
|
+
query = {}
|
|
985
|
+
if no_compute is not None: query['no_compute'] = no_compute
|
|
982
986
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
983
987
|
|
|
984
|
-
op_response = self._api.do('POST', '/api/2.0/apps', body=body, headers=headers)
|
|
988
|
+
op_response = self._api.do('POST', '/api/2.0/apps', query=query, body=body, headers=headers)
|
|
985
989
|
return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response['name'])
|
|
986
990
|
|
|
987
|
-
def create_and_wait(self,
|
|
988
|
-
|
|
991
|
+
def create_and_wait(self,
|
|
992
|
+
*,
|
|
993
|
+
app: Optional[App] = None,
|
|
994
|
+
no_compute: Optional[bool] = None,
|
|
995
|
+
timeout=timedelta(minutes=20)) -> App:
|
|
996
|
+
return self.create(app=app, no_compute=no_compute).result(timeout=timeout)
|
|
989
997
|
|
|
990
998
|
def delete(self, name: str) -> App:
|
|
991
999
|
"""Delete an app.
|
|
@@ -3704,8 +3704,8 @@ class GenerateTemporaryTableCredentialResponse:
|
|
|
3704
3704
|
class GetBindingsSecurableType(Enum):
|
|
3705
3705
|
|
|
3706
3706
|
CATALOG = 'catalog'
|
|
3707
|
+
CREDENTIAL = 'credential'
|
|
3707
3708
|
EXTERNAL_LOCATION = 'external_location'
|
|
3708
|
-
SERVICE_CREDENTIAL = 'service_credential'
|
|
3709
3709
|
STORAGE_CREDENTIAL = 'storage_credential'
|
|
3710
3710
|
|
|
3711
3711
|
|
|
@@ -5810,6 +5810,7 @@ class ProvisioningInfo:
|
|
|
5810
5810
|
class ProvisioningInfoState(Enum):
|
|
5811
5811
|
|
|
5812
5812
|
ACTIVE = 'ACTIVE'
|
|
5813
|
+
DEGRADED = 'DEGRADED'
|
|
5813
5814
|
DELETING = 'DELETING'
|
|
5814
5815
|
FAILED = 'FAILED'
|
|
5815
5816
|
PROVISIONING = 'PROVISIONING'
|
|
@@ -7067,8 +7068,8 @@ class UpdateAssignmentResponse:
|
|
|
7067
7068
|
class UpdateBindingsSecurableType(Enum):
|
|
7068
7069
|
|
|
7069
7070
|
CATALOG = 'catalog'
|
|
7071
|
+
CREDENTIAL = 'credential'
|
|
7070
7072
|
EXTERNAL_LOCATION = 'external_location'
|
|
7071
|
-
SERVICE_CREDENTIAL = 'service_credential'
|
|
7072
7073
|
STORAGE_CREDENTIAL = 'storage_credential'
|
|
7073
7074
|
|
|
7074
7075
|
|
|
@@ -312,6 +312,7 @@ class CleanRoomAssetNotebook:
|
|
|
312
312
|
class CleanRoomAssetStatusEnum(Enum):
|
|
313
313
|
|
|
314
314
|
ACTIVE = 'ACTIVE'
|
|
315
|
+
PENDING = 'PENDING'
|
|
315
316
|
PERMISSION_DENIED = 'PERMISSION_DENIED'
|
|
316
317
|
|
|
317
318
|
|
|
@@ -443,7 +444,7 @@ class CleanRoomAssetVolumeLocalDetails:
|
|
|
443
444
|
class CleanRoomCollaborator:
|
|
444
445
|
"""Publicly visible clean room collaborator."""
|
|
445
446
|
|
|
446
|
-
collaborator_alias:
|
|
447
|
+
collaborator_alias: str
|
|
447
448
|
"""Collaborator alias specified by the clean room creator. It is unique across all collaborators of
|
|
448
449
|
this clean room, and used to derive multiple values internally such as catalog alias and clean
|
|
449
450
|
room name for single metastore clean rooms. It should follow [UC securable naming requirements].
|