databricks-sdk 0.40.0__tar.gz → 0.41.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- {databricks_sdk-0.40.0/databricks_sdk.egg-info → databricks-sdk-0.41.0}/PKG-INFO +3 -28
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/__init__.py +23 -3
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/_base_client.py +16 -3
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/config.py +5 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/credentials_provider.py +23 -14
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/data_plane.py +1 -1
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/mixins/files.py +184 -1
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/mixins/open_ai_client.py +40 -1
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/apps.py +12 -4
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/catalog.py +1 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/cleanrooms.py +2 -1
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/compute.py +4 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/dashboards.py +7 -4
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/files.py +6 -3
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/iam.py +158 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/jobs.py +168 -16
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/oauth2.py +53 -45
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/pipelines.py +89 -12
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/serving.py +423 -215
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/sharing.py +51 -54
- databricks-sdk-0.41.0/databricks/sdk/version.py +1 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0/databricks_sdk.egg-info}/PKG-INFO +3 -28
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks_sdk.egg-info/SOURCES.txt +2 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_base_client.py +5 -5
- databricks-sdk-0.41.0/tests/test_credentials_provider.py +145 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_data_plane.py +1 -1
- databricks-sdk-0.41.0/tests/test_files.py +340 -0
- databricks_sdk-0.40.0/databricks/sdk/version.py +0 -1
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/LICENSE +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/NOTICE +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/README.md +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/__init__.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/_property.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/_widgets/__init__.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/azure.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/casing.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/clock.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/core.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/dbutils.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/environments.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/__init__.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/base.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/customizer.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/deserializer.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/mapper.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/overrides.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/parser.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/platform.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/private_link.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/errors/sdk.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/logger/__init__.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/logger/round_trip_logger.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/mixins/__init__.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/mixins/compute.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/mixins/jobs.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/mixins/workspace.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/oauth.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/py.typed +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/retries.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/runtime/__init__.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/__init__.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/_internal.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/billing.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/marketplace.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/ml.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/provisioning.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/settings.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/sql.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/vectorsearch.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/service/workspace.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks/sdk/useragent.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks_sdk.egg-info/requires.txt +14 -14
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/databricks_sdk.egg-info/top_level.txt +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/setup.cfg +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/setup.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_auth.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_auth_manual_tests.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_client.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_compute_mixins.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_config.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_core.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_dbfs_mixins.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_dbutils.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_environments.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_errors.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_init_file.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_internal.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_jobs.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_jobs_mixin.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_metadata_service_auth.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_misc.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_model_serving_auth.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_oauth.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_open_ai_mixin.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_retries.py +0 -0
- {databricks_sdk-0.40.0 → databricks-sdk-0.41.0}/tests/test_user_agent.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: databricks-sdk
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.41.0
|
|
4
4
|
Summary: Databricks SDK for Python (Beta)
|
|
5
5
|
Home-page: https://databricks-sdk-py.readthedocs.io
|
|
6
6
|
Author: Serge Smertin
|
|
@@ -20,36 +20,11 @@ Classifier: Programming Language :: Python :: 3.12
|
|
|
20
20
|
Classifier: Operating System :: OS Independent
|
|
21
21
|
Requires-Python: >=3.7
|
|
22
22
|
Description-Content-Type: text/markdown
|
|
23
|
-
License-File: LICENSE
|
|
24
|
-
License-File: NOTICE
|
|
25
|
-
Requires-Dist: requests<3,>=2.28.1
|
|
26
|
-
Requires-Dist: google-auth~=2.0
|
|
27
23
|
Provides-Extra: dev
|
|
28
|
-
Requires-Dist: pytest; extra == "dev"
|
|
29
|
-
Requires-Dist: pytest-cov; extra == "dev"
|
|
30
|
-
Requires-Dist: pytest-xdist; extra == "dev"
|
|
31
|
-
Requires-Dist: pytest-mock; extra == "dev"
|
|
32
|
-
Requires-Dist: yapf; extra == "dev"
|
|
33
|
-
Requires-Dist: pycodestyle; extra == "dev"
|
|
34
|
-
Requires-Dist: autoflake; extra == "dev"
|
|
35
|
-
Requires-Dist: isort; extra == "dev"
|
|
36
|
-
Requires-Dist: wheel; extra == "dev"
|
|
37
|
-
Requires-Dist: ipython; extra == "dev"
|
|
38
|
-
Requires-Dist: ipywidgets; extra == "dev"
|
|
39
|
-
Requires-Dist: requests-mock; extra == "dev"
|
|
40
|
-
Requires-Dist: pyfakefs; extra == "dev"
|
|
41
|
-
Requires-Dist: databricks-connect; extra == "dev"
|
|
42
|
-
Requires-Dist: pytest-rerunfailures; extra == "dev"
|
|
43
|
-
Requires-Dist: openai; extra == "dev"
|
|
44
|
-
Requires-Dist: langchain-openai; python_version > "3.7" and extra == "dev"
|
|
45
|
-
Requires-Dist: httpx; extra == "dev"
|
|
46
24
|
Provides-Extra: notebook
|
|
47
|
-
Requires-Dist: ipython<9,>=8; extra == "notebook"
|
|
48
|
-
Requires-Dist: ipywidgets<9,>=8; extra == "notebook"
|
|
49
25
|
Provides-Extra: openai
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
Requires-Dist: httpx; extra == "openai"
|
|
26
|
+
License-File: LICENSE
|
|
27
|
+
License-File: NOTICE
|
|
53
28
|
|
|
54
29
|
# Databricks SDK for Python (Beta)
|
|
55
30
|
|
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
|
|
2
|
+
|
|
3
|
+
import logging
|
|
1
4
|
from typing import Optional
|
|
2
5
|
|
|
3
6
|
import databricks.sdk.core as client
|
|
@@ -5,7 +8,7 @@ import databricks.sdk.dbutils as dbutils
|
|
|
5
8
|
from databricks.sdk import azure
|
|
6
9
|
from databricks.sdk.credentials_provider import CredentialsStrategy
|
|
7
10
|
from databricks.sdk.mixins.compute import ClustersExt
|
|
8
|
-
from databricks.sdk.mixins.files import DbfsExt
|
|
11
|
+
from databricks.sdk.mixins.files import DbfsExt, FilesExt
|
|
9
12
|
from databricks.sdk.mixins.jobs import JobsExt
|
|
10
13
|
from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
|
|
11
14
|
from databricks.sdk.mixins.workspace import WorkspaceExt
|
|
@@ -40,7 +43,8 @@ from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI,
|
|
|
40
43
|
PolicyFamiliesAPI)
|
|
41
44
|
from databricks.sdk.service.dashboards import GenieAPI, LakeviewAPI
|
|
42
45
|
from databricks.sdk.service.files import DbfsAPI, FilesAPI
|
|
43
|
-
from databricks.sdk.service.iam import (
|
|
46
|
+
from databricks.sdk.service.iam import (AccessControlAPI,
|
|
47
|
+
AccountAccessControlAPI,
|
|
44
48
|
AccountAccessControlProxyAPI,
|
|
45
49
|
AccountGroupsAPI,
|
|
46
50
|
AccountServicePrincipalsAPI,
|
|
@@ -97,6 +101,8 @@ from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI,
|
|
|
97
101
|
from databricks.sdk.service.workspace import (GitCredentialsAPI, ReposAPI,
|
|
98
102
|
SecretsAPI, WorkspaceAPI)
|
|
99
103
|
|
|
104
|
+
_LOG = logging.getLogger(__name__)
|
|
105
|
+
|
|
100
106
|
|
|
101
107
|
def _make_dbutils(config: client.Config):
|
|
102
108
|
# We try to directly check if we are in runtime, instead of
|
|
@@ -114,6 +120,14 @@ def _make_dbutils(config: client.Config):
|
|
|
114
120
|
return runtime_dbutils
|
|
115
121
|
|
|
116
122
|
|
|
123
|
+
def _make_files_client(apiClient: client.ApiClient, config: client.Config):
|
|
124
|
+
if config.enable_experimental_files_api_client:
|
|
125
|
+
_LOG.info("Experimental Files API client is enabled")
|
|
126
|
+
return FilesExt(apiClient, config)
|
|
127
|
+
else:
|
|
128
|
+
return FilesAPI(apiClient)
|
|
129
|
+
|
|
130
|
+
|
|
117
131
|
class WorkspaceClient:
|
|
118
132
|
"""
|
|
119
133
|
The WorkspaceClient is a client for the workspace-level Databricks REST API.
|
|
@@ -175,6 +189,7 @@ class WorkspaceClient:
|
|
|
175
189
|
self._dbutils = _make_dbutils(self._config)
|
|
176
190
|
self._api_client = client.ApiClient(self._config)
|
|
177
191
|
serving_endpoints = ServingEndpointsExt(self._api_client)
|
|
192
|
+
self._access_control = AccessControlAPI(self._api_client)
|
|
178
193
|
self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client)
|
|
179
194
|
self._alerts = AlertsAPI(self._api_client)
|
|
180
195
|
self._alerts_legacy = AlertsLegacyAPI(self._api_client)
|
|
@@ -203,7 +218,7 @@ class WorkspaceClient:
|
|
|
203
218
|
self._dbsql_permissions = DbsqlPermissionsAPI(self._api_client)
|
|
204
219
|
self._experiments = ExperimentsAPI(self._api_client)
|
|
205
220
|
self._external_locations = ExternalLocationsAPI(self._api_client)
|
|
206
|
-
self._files =
|
|
221
|
+
self._files = _make_files_client(self._api_client, self._config)
|
|
207
222
|
self._functions = FunctionsAPI(self._api_client)
|
|
208
223
|
self._genie = GenieAPI(self._api_client)
|
|
209
224
|
self._git_credentials = GitCredentialsAPI(self._api_client)
|
|
@@ -283,6 +298,11 @@ class WorkspaceClient:
|
|
|
283
298
|
def dbutils(self) -> dbutils.RemoteDbUtils:
|
|
284
299
|
return self._dbutils
|
|
285
300
|
|
|
301
|
+
@property
|
|
302
|
+
def access_control(self) -> AccessControlAPI:
|
|
303
|
+
"""Rule based Access Control for Databricks Resources."""
|
|
304
|
+
return self._access_control
|
|
305
|
+
|
|
286
306
|
@property
|
|
287
307
|
def account_access_control_proxy(self) -> AccountAccessControlProxyAPI:
|
|
288
308
|
"""These APIs manage access rules on resources in an account."""
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import io
|
|
2
2
|
import logging
|
|
3
3
|
import urllib.parse
|
|
4
|
+
from abc import ABC, abstractmethod
|
|
4
5
|
from datetime import timedelta
|
|
5
6
|
from types import TracebackType
|
|
6
7
|
from typing import (Any, BinaryIO, Callable, Dict, Iterable, Iterator, List,
|
|
@@ -285,8 +286,20 @@ class _BaseClient:
|
|
|
285
286
|
logger.debug(RoundTrip(response, self._debug_headers, self._debug_truncate_bytes, raw).generate())
|
|
286
287
|
|
|
287
288
|
|
|
289
|
+
class _RawResponse(ABC):
|
|
290
|
+
|
|
291
|
+
@abstractmethod
|
|
292
|
+
# follows Response signature: https://github.com/psf/requests/blob/main/src/requests/models.py#L799
|
|
293
|
+
def iter_content(self, chunk_size: int = 1, decode_unicode: bool = False):
|
|
294
|
+
pass
|
|
295
|
+
|
|
296
|
+
@abstractmethod
|
|
297
|
+
def close(self):
|
|
298
|
+
pass
|
|
299
|
+
|
|
300
|
+
|
|
288
301
|
class _StreamingResponse(BinaryIO):
|
|
289
|
-
_response:
|
|
302
|
+
_response: _RawResponse
|
|
290
303
|
_buffer: bytes
|
|
291
304
|
_content: Union[Iterator[bytes], None]
|
|
292
305
|
_chunk_size: Union[int, None]
|
|
@@ -298,7 +311,7 @@ class _StreamingResponse(BinaryIO):
|
|
|
298
311
|
def flush(self) -> int:
|
|
299
312
|
pass
|
|
300
313
|
|
|
301
|
-
def __init__(self, response:
|
|
314
|
+
def __init__(self, response: _RawResponse, chunk_size: Union[int, None] = None):
|
|
302
315
|
self._response = response
|
|
303
316
|
self._buffer = b''
|
|
304
317
|
self._content = None
|
|
@@ -308,7 +321,7 @@ class _StreamingResponse(BinaryIO):
|
|
|
308
321
|
if self._closed:
|
|
309
322
|
raise ValueError("I/O operation on closed file")
|
|
310
323
|
if not self._content:
|
|
311
|
-
self._content = self._response.iter_content(chunk_size=self._chunk_size)
|
|
324
|
+
self._content = self._response.iter_content(chunk_size=self._chunk_size, decode_unicode=False)
|
|
312
325
|
|
|
313
326
|
def __enter__(self) -> BinaryIO:
|
|
314
327
|
self._open()
|
|
@@ -92,6 +92,11 @@ class Config:
|
|
|
92
92
|
max_connections_per_pool: int = ConfigAttribute()
|
|
93
93
|
databricks_environment: Optional[DatabricksEnvironment] = None
|
|
94
94
|
|
|
95
|
+
enable_experimental_files_api_client: bool = ConfigAttribute(
|
|
96
|
+
env='DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT')
|
|
97
|
+
files_api_client_download_max_total_recovers = None
|
|
98
|
+
files_api_client_download_max_total_recovers_without_progressing = 1
|
|
99
|
+
|
|
95
100
|
def __init__(
|
|
96
101
|
self,
|
|
97
102
|
*,
|
|
@@ -167,6 +167,7 @@ def oauth_service_principal(cfg: 'Config') -> Optional[CredentialsProvider]:
|
|
|
167
167
|
oidc = cfg.oidc_endpoints
|
|
168
168
|
if oidc is None:
|
|
169
169
|
return None
|
|
170
|
+
|
|
170
171
|
token_source = ClientCredentials(client_id=cfg.client_id,
|
|
171
172
|
client_secret=cfg.client_secret,
|
|
172
173
|
token_url=oidc.token_endpoint,
|
|
@@ -187,6 +188,7 @@ def oauth_service_principal(cfg: 'Config') -> Optional[CredentialsProvider]:
|
|
|
187
188
|
def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
|
|
188
189
|
if cfg.auth_type != 'external-browser':
|
|
189
190
|
return None
|
|
191
|
+
|
|
190
192
|
client_id, client_secret = None, None
|
|
191
193
|
if cfg.client_id:
|
|
192
194
|
client_id = cfg.client_id
|
|
@@ -194,12 +196,11 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
|
|
|
194
196
|
elif cfg.azure_client_id:
|
|
195
197
|
client_id = cfg.azure_client
|
|
196
198
|
client_secret = cfg.azure_client_secret
|
|
197
|
-
|
|
198
199
|
if not client_id:
|
|
199
200
|
client_id = 'databricks-cli'
|
|
200
201
|
|
|
201
|
-
# Load cached credentials from disk if they exist.
|
|
202
|
-
#
|
|
202
|
+
# Load cached credentials from disk if they exist. Note that these are
|
|
203
|
+
# local to the Python SDK and not reused by other SDKs.
|
|
203
204
|
oidc_endpoints = cfg.oidc_endpoints
|
|
204
205
|
redirect_url = 'http://localhost:8020'
|
|
205
206
|
token_cache = TokenCache(host=cfg.host,
|
|
@@ -209,17 +210,25 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
|
|
|
209
210
|
redirect_url=redirect_url)
|
|
210
211
|
credentials = token_cache.load()
|
|
211
212
|
if credentials:
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
213
|
+
try:
|
|
214
|
+
# Pro-actively refresh the loaded credentials. This is done
|
|
215
|
+
# to detect if the token is expired and needs to be refreshed
|
|
216
|
+
# by going through the OAuth login flow.
|
|
217
|
+
credentials.token()
|
|
218
|
+
return credentials(cfg)
|
|
219
|
+
# TODO: We should ideally use more specific exceptions.
|
|
220
|
+
except Exception as e:
|
|
221
|
+
logger.warning(f'Failed to refresh cached token: {e}. Initiating new OAuth login flow')
|
|
222
|
+
|
|
223
|
+
oauth_client = OAuthClient(oidc_endpoints=oidc_endpoints,
|
|
224
|
+
client_id=client_id,
|
|
225
|
+
redirect_url=redirect_url,
|
|
226
|
+
client_secret=client_secret)
|
|
227
|
+
consent = oauth_client.initiate_consent()
|
|
228
|
+
if not consent:
|
|
229
|
+
return None
|
|
230
|
+
|
|
231
|
+
credentials = consent.launch_external_browser()
|
|
223
232
|
token_cache.save(credentials)
|
|
224
233
|
return credentials(cfg)
|
|
225
234
|
|
|
@@ -3,7 +3,6 @@ from dataclasses import dataclass
|
|
|
3
3
|
from typing import Callable, List
|
|
4
4
|
|
|
5
5
|
from databricks.sdk.oauth import Token
|
|
6
|
-
from databricks.sdk.service.oauth2 import DataPlaneInfo
|
|
7
6
|
|
|
8
7
|
|
|
9
8
|
@dataclass
|
|
@@ -19,6 +18,7 @@ class DataPlaneDetails:
|
|
|
19
18
|
|
|
20
19
|
class DataPlaneService:
|
|
21
20
|
"""Helper class to fetch and manage DataPlane details."""
|
|
21
|
+
from .service.serving import DataPlaneInfo
|
|
22
22
|
|
|
23
23
|
def __init__(self):
|
|
24
24
|
self._data_plane_info = {}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import base64
|
|
4
|
+
import logging
|
|
4
5
|
import os
|
|
5
6
|
import pathlib
|
|
6
7
|
import platform
|
|
@@ -8,19 +9,27 @@ import shutil
|
|
|
8
9
|
import sys
|
|
9
10
|
from abc import ABC, abstractmethod
|
|
10
11
|
from collections import deque
|
|
12
|
+
from collections.abc import Iterator
|
|
11
13
|
from io import BytesIO
|
|
12
14
|
from types import TracebackType
|
|
13
15
|
from typing import (TYPE_CHECKING, AnyStr, BinaryIO, Generator, Iterable,
|
|
14
|
-
|
|
16
|
+
Optional, Type, Union)
|
|
15
17
|
from urllib import parse
|
|
16
18
|
|
|
19
|
+
from requests import RequestException
|
|
20
|
+
|
|
21
|
+
from .._base_client import _RawResponse, _StreamingResponse
|
|
17
22
|
from .._property import _cached_property
|
|
18
23
|
from ..errors import NotFound
|
|
19
24
|
from ..service import files
|
|
25
|
+
from ..service._internal import _escape_multi_segment_path_parameter
|
|
26
|
+
from ..service.files import DownloadResponse
|
|
20
27
|
|
|
21
28
|
if TYPE_CHECKING:
|
|
22
29
|
from _typeshed import Self
|
|
23
30
|
|
|
31
|
+
_LOG = logging.getLogger(__name__)
|
|
32
|
+
|
|
24
33
|
|
|
25
34
|
class _DbfsIO(BinaryIO):
|
|
26
35
|
MAX_CHUNK_SIZE = 1024 * 1024
|
|
@@ -636,3 +645,177 @@ class DbfsExt(files.DbfsAPI):
|
|
|
636
645
|
if p.is_dir and not recursive:
|
|
637
646
|
raise IOError('deleting directories requires recursive flag')
|
|
638
647
|
p.delete(recursive=recursive)
|
|
648
|
+
|
|
649
|
+
|
|
650
|
+
class FilesExt(files.FilesAPI):
|
|
651
|
+
__doc__ = files.FilesAPI.__doc__
|
|
652
|
+
|
|
653
|
+
def __init__(self, api_client, config: Config):
|
|
654
|
+
super().__init__(api_client)
|
|
655
|
+
self._config = config.copy()
|
|
656
|
+
|
|
657
|
+
def download(self, file_path: str) -> DownloadResponse:
|
|
658
|
+
"""Download a file.
|
|
659
|
+
|
|
660
|
+
Downloads a file of any size. The file contents are the response body.
|
|
661
|
+
This is a standard HTTP file download, not a JSON RPC.
|
|
662
|
+
|
|
663
|
+
It is strongly recommended, for fault tolerance reasons,
|
|
664
|
+
to iteratively consume from the stream with a maximum read(size)
|
|
665
|
+
defined instead of using indefinite-size reads.
|
|
666
|
+
|
|
667
|
+
:param file_path: str
|
|
668
|
+
The remote path of the file, e.g. /Volumes/path/to/your/file
|
|
669
|
+
|
|
670
|
+
:returns: :class:`DownloadResponse`
|
|
671
|
+
"""
|
|
672
|
+
|
|
673
|
+
initial_response: DownloadResponse = self._download_raw_stream(file_path=file_path,
|
|
674
|
+
start_byte_offset=0,
|
|
675
|
+
if_unmodified_since_timestamp=None)
|
|
676
|
+
|
|
677
|
+
wrapped_response = self._wrap_stream(file_path, initial_response)
|
|
678
|
+
initial_response.contents._response = wrapped_response
|
|
679
|
+
return initial_response
|
|
680
|
+
|
|
681
|
+
def _download_raw_stream(self,
|
|
682
|
+
file_path: str,
|
|
683
|
+
start_byte_offset: int,
|
|
684
|
+
if_unmodified_since_timestamp: Optional[str] = None) -> DownloadResponse:
|
|
685
|
+
headers = {'Accept': 'application/octet-stream', }
|
|
686
|
+
|
|
687
|
+
if start_byte_offset and not if_unmodified_since_timestamp:
|
|
688
|
+
raise Exception("if_unmodified_since_timestamp is required if start_byte_offset is specified")
|
|
689
|
+
|
|
690
|
+
if start_byte_offset:
|
|
691
|
+
headers['Range'] = f'bytes={start_byte_offset}-'
|
|
692
|
+
|
|
693
|
+
if if_unmodified_since_timestamp:
|
|
694
|
+
headers['If-Unmodified-Since'] = if_unmodified_since_timestamp
|
|
695
|
+
|
|
696
|
+
response_headers = ['content-length', 'content-type', 'last-modified', ]
|
|
697
|
+
res = self._api.do('GET',
|
|
698
|
+
f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}',
|
|
699
|
+
headers=headers,
|
|
700
|
+
response_headers=response_headers,
|
|
701
|
+
raw=True)
|
|
702
|
+
|
|
703
|
+
result = DownloadResponse.from_dict(res)
|
|
704
|
+
if not isinstance(result.contents, _StreamingResponse):
|
|
705
|
+
raise Exception("Internal error: response contents is of unexpected type: " +
|
|
706
|
+
type(result.contents).__name__)
|
|
707
|
+
|
|
708
|
+
return result
|
|
709
|
+
|
|
710
|
+
def _wrap_stream(self, file_path: str, downloadResponse: DownloadResponse):
|
|
711
|
+
underlying_response = _ResilientIterator._extract_raw_response(downloadResponse)
|
|
712
|
+
return _ResilientResponse(self,
|
|
713
|
+
file_path,
|
|
714
|
+
downloadResponse.last_modified,
|
|
715
|
+
offset=0,
|
|
716
|
+
underlying_response=underlying_response)
|
|
717
|
+
|
|
718
|
+
|
|
719
|
+
class _ResilientResponse(_RawResponse):
|
|
720
|
+
|
|
721
|
+
def __init__(self, api: FilesExt, file_path: str, file_last_modified: str, offset: int,
|
|
722
|
+
underlying_response: _RawResponse):
|
|
723
|
+
self.api = api
|
|
724
|
+
self.file_path = file_path
|
|
725
|
+
self.underlying_response = underlying_response
|
|
726
|
+
self.offset = offset
|
|
727
|
+
self.file_last_modified = file_last_modified
|
|
728
|
+
|
|
729
|
+
def iter_content(self, chunk_size=1, decode_unicode=False):
|
|
730
|
+
if decode_unicode:
|
|
731
|
+
raise ValueError('Decode unicode is not supported')
|
|
732
|
+
|
|
733
|
+
iterator = self.underlying_response.iter_content(chunk_size=chunk_size, decode_unicode=False)
|
|
734
|
+
self.iterator = _ResilientIterator(iterator, self.file_path, self.file_last_modified, self.offset,
|
|
735
|
+
self.api, chunk_size)
|
|
736
|
+
return self.iterator
|
|
737
|
+
|
|
738
|
+
def close(self):
|
|
739
|
+
self.iterator.close()
|
|
740
|
+
|
|
741
|
+
|
|
742
|
+
class _ResilientIterator(Iterator):
|
|
743
|
+
# This class tracks current offset (returned to the client code)
|
|
744
|
+
# and recovers from failures by requesting download from the current offset.
|
|
745
|
+
|
|
746
|
+
@staticmethod
|
|
747
|
+
def _extract_raw_response(download_response: DownloadResponse) -> _RawResponse:
|
|
748
|
+
streaming_response: _StreamingResponse = download_response.contents # this is an instance of _StreamingResponse
|
|
749
|
+
return streaming_response._response
|
|
750
|
+
|
|
751
|
+
def __init__(self, underlying_iterator, file_path: str, file_last_modified: str, offset: int,
|
|
752
|
+
api: FilesExt, chunk_size: int):
|
|
753
|
+
self._underlying_iterator = underlying_iterator
|
|
754
|
+
self._api = api
|
|
755
|
+
self._file_path = file_path
|
|
756
|
+
|
|
757
|
+
# Absolute current offset (0-based), i.e. number of bytes from the beginning of the file
|
|
758
|
+
# that were so far returned to the caller code.
|
|
759
|
+
self._offset = offset
|
|
760
|
+
self._file_last_modified = file_last_modified
|
|
761
|
+
self._chunk_size = chunk_size
|
|
762
|
+
|
|
763
|
+
self._total_recovers_count: int = 0
|
|
764
|
+
self._recovers_without_progressing_count: int = 0
|
|
765
|
+
self._closed: bool = False
|
|
766
|
+
|
|
767
|
+
def _should_recover(self) -> bool:
|
|
768
|
+
if self._total_recovers_count == self._api._config.files_api_client_download_max_total_recovers:
|
|
769
|
+
_LOG.debug("Total recovers limit exceeded")
|
|
770
|
+
return False
|
|
771
|
+
if self._api._config.files_api_client_download_max_total_recovers_without_progressing is not None and self._recovers_without_progressing_count >= self._api._config.files_api_client_download_max_total_recovers_without_progressing:
|
|
772
|
+
_LOG.debug("No progression recovers limit exceeded")
|
|
773
|
+
return False
|
|
774
|
+
return True
|
|
775
|
+
|
|
776
|
+
def _recover(self) -> bool:
|
|
777
|
+
if not self._should_recover():
|
|
778
|
+
return False # recover suppressed, rethrow original exception
|
|
779
|
+
|
|
780
|
+
self._total_recovers_count += 1
|
|
781
|
+
self._recovers_without_progressing_count += 1
|
|
782
|
+
|
|
783
|
+
try:
|
|
784
|
+
self._underlying_iterator.close()
|
|
785
|
+
|
|
786
|
+
_LOG.debug("Trying to recover from offset " + str(self._offset))
|
|
787
|
+
|
|
788
|
+
# following call includes all the required network retries
|
|
789
|
+
downloadResponse = self._api._download_raw_stream(self._file_path, self._offset,
|
|
790
|
+
self._file_last_modified)
|
|
791
|
+
underlying_response = _ResilientIterator._extract_raw_response(downloadResponse)
|
|
792
|
+
self._underlying_iterator = underlying_response.iter_content(chunk_size=self._chunk_size,
|
|
793
|
+
decode_unicode=False)
|
|
794
|
+
_LOG.debug("Recover succeeded")
|
|
795
|
+
return True
|
|
796
|
+
except:
|
|
797
|
+
return False # recover failed, rethrow original exception
|
|
798
|
+
|
|
799
|
+
def __next__(self):
|
|
800
|
+
if self._closed:
|
|
801
|
+
# following _BaseClient
|
|
802
|
+
raise ValueError("I/O operation on closed file")
|
|
803
|
+
|
|
804
|
+
while True:
|
|
805
|
+
try:
|
|
806
|
+
returned_bytes = next(self._underlying_iterator)
|
|
807
|
+
self._offset += len(returned_bytes)
|
|
808
|
+
self._recovers_without_progressing_count = 0
|
|
809
|
+
return returned_bytes
|
|
810
|
+
|
|
811
|
+
except StopIteration:
|
|
812
|
+
raise
|
|
813
|
+
|
|
814
|
+
# https://requests.readthedocs.io/en/latest/user/quickstart/#errors-and-exceptions
|
|
815
|
+
except RequestException:
|
|
816
|
+
if not self._recover():
|
|
817
|
+
raise
|
|
818
|
+
|
|
819
|
+
def close(self):
|
|
820
|
+
self._underlying_iterator.close()
|
|
821
|
+
self._closed = True
|
|
@@ -1,4 +1,9 @@
|
|
|
1
|
-
|
|
1
|
+
import json as js
|
|
2
|
+
from typing import Dict, Optional
|
|
3
|
+
|
|
4
|
+
from databricks.sdk.service.serving import (ExternalFunctionRequestHttpMethod,
|
|
5
|
+
ExternalFunctionResponse,
|
|
6
|
+
ServingEndpointsAPI)
|
|
2
7
|
|
|
3
8
|
|
|
4
9
|
class ServingEndpointsExt(ServingEndpointsAPI):
|
|
@@ -50,3 +55,37 @@ class ServingEndpointsExt(ServingEndpointsAPI):
|
|
|
50
55
|
openai_api_base=self._api._cfg.host + "/serving-endpoints",
|
|
51
56
|
api_key="no-token", # Passing in a placeholder to pass validations, this will not be used
|
|
52
57
|
http_client=self._get_authorized_http_client())
|
|
58
|
+
|
|
59
|
+
def http_request(self,
|
|
60
|
+
conn: str,
|
|
61
|
+
method: ExternalFunctionRequestHttpMethod,
|
|
62
|
+
path: str,
|
|
63
|
+
*,
|
|
64
|
+
headers: Optional[Dict[str, str]] = None,
|
|
65
|
+
json: Optional[Dict[str, str]] = None,
|
|
66
|
+
params: Optional[Dict[str, str]] = None) -> ExternalFunctionResponse:
|
|
67
|
+
"""Make external services call using the credentials stored in UC Connection.
|
|
68
|
+
**NOTE:** Experimental: This API may change or be removed in a future release without warning.
|
|
69
|
+
:param conn: str
|
|
70
|
+
The connection name to use. This is required to identify the external connection.
|
|
71
|
+
:param method: :class:`ExternalFunctionRequestHttpMethod`
|
|
72
|
+
The HTTP method to use (e.g., 'GET', 'POST'). This is required.
|
|
73
|
+
:param path: str
|
|
74
|
+
The relative path for the API endpoint. This is required.
|
|
75
|
+
:param headers: Dict[str,str] (optional)
|
|
76
|
+
Additional headers for the request. If not provided, only auth headers from connections would be
|
|
77
|
+
passed.
|
|
78
|
+
:param json: Dict[str,str] (optional)
|
|
79
|
+
JSON payload for the request.
|
|
80
|
+
:param params: Dict[str,str] (optional)
|
|
81
|
+
Query parameters for the request.
|
|
82
|
+
:returns: :class:`ExternalFunctionResponse`
|
|
83
|
+
"""
|
|
84
|
+
|
|
85
|
+
return super.http_request(connection_name=conn,
|
|
86
|
+
method=method,
|
|
87
|
+
path=path,
|
|
88
|
+
headers=js.dumps(headers),
|
|
89
|
+
json=js.dumps(json),
|
|
90
|
+
params=js.dumps(params),
|
|
91
|
+
)
|
|
@@ -967,25 +967,33 @@ class AppsAPI:
|
|
|
967
967
|
attempt += 1
|
|
968
968
|
raise TimeoutError(f'timed out after {timeout}: {status_message}')
|
|
969
969
|
|
|
970
|
-
def create(self, *, app: Optional[App] = None) -> Wait[App]:
|
|
970
|
+
def create(self, *, app: Optional[App] = None, no_compute: Optional[bool] = None) -> Wait[App]:
|
|
971
971
|
"""Create an app.
|
|
972
972
|
|
|
973
973
|
Creates a new app.
|
|
974
974
|
|
|
975
975
|
:param app: :class:`App` (optional)
|
|
976
|
+
:param no_compute: bool (optional)
|
|
977
|
+
If true, the app will not be started after creation.
|
|
976
978
|
|
|
977
979
|
:returns:
|
|
978
980
|
Long-running operation waiter for :class:`App`.
|
|
979
981
|
See :method:wait_get_app_active for more details.
|
|
980
982
|
"""
|
|
981
983
|
body = app.as_dict()
|
|
984
|
+
query = {}
|
|
985
|
+
if no_compute is not None: query['no_compute'] = no_compute
|
|
982
986
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
983
987
|
|
|
984
|
-
op_response = self._api.do('POST', '/api/2.0/apps', body=body, headers=headers)
|
|
988
|
+
op_response = self._api.do('POST', '/api/2.0/apps', query=query, body=body, headers=headers)
|
|
985
989
|
return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response['name'])
|
|
986
990
|
|
|
987
|
-
def create_and_wait(self,
|
|
988
|
-
|
|
991
|
+
def create_and_wait(self,
|
|
992
|
+
*,
|
|
993
|
+
app: Optional[App] = None,
|
|
994
|
+
no_compute: Optional[bool] = None,
|
|
995
|
+
timeout=timedelta(minutes=20)) -> App:
|
|
996
|
+
return self.create(app=app, no_compute=no_compute).result(timeout=timeout)
|
|
989
997
|
|
|
990
998
|
def delete(self, name: str) -> App:
|
|
991
999
|
"""Delete an app.
|
|
@@ -312,6 +312,7 @@ class CleanRoomAssetNotebook:
|
|
|
312
312
|
class CleanRoomAssetStatusEnum(Enum):
|
|
313
313
|
|
|
314
314
|
ACTIVE = 'ACTIVE'
|
|
315
|
+
PENDING = 'PENDING'
|
|
315
316
|
PERMISSION_DENIED = 'PERMISSION_DENIED'
|
|
316
317
|
|
|
317
318
|
|
|
@@ -443,7 +444,7 @@ class CleanRoomAssetVolumeLocalDetails:
|
|
|
443
444
|
class CleanRoomCollaborator:
|
|
444
445
|
"""Publicly visible clean room collaborator."""
|
|
445
446
|
|
|
446
|
-
collaborator_alias:
|
|
447
|
+
collaborator_alias: str
|
|
447
448
|
"""Collaborator alias specified by the clean room creator. It is unique across all collaborators of
|
|
448
449
|
this clean room, and used to derive multiple values internally such as catalog alias and clean
|
|
449
450
|
room name for single metastore clean rooms. It should follow [UC securable naming requirements].
|
|
@@ -4184,6 +4184,10 @@ class EventDetailsCause(Enum):
|
|
|
4184
4184
|
|
|
4185
4185
|
class EventType(Enum):
|
|
4186
4186
|
|
|
4187
|
+
ADD_NODES_FAILED = 'ADD_NODES_FAILED'
|
|
4188
|
+
AUTOMATIC_CLUSTER_UPDATE = 'AUTOMATIC_CLUSTER_UPDATE'
|
|
4189
|
+
AUTOSCALING_BACKOFF = 'AUTOSCALING_BACKOFF'
|
|
4190
|
+
AUTOSCALING_FAILED = 'AUTOSCALING_FAILED'
|
|
4187
4191
|
AUTOSCALING_STATS_REPORT = 'AUTOSCALING_STATS_REPORT'
|
|
4188
4192
|
CREATING = 'CREATING'
|
|
4189
4193
|
DBFS_DOWN = 'DBFS_DOWN'
|
|
@@ -381,8 +381,9 @@ class GenieMessage:
|
|
|
381
381
|
status: Optional[MessageStatus] = None
|
|
382
382
|
"""MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data
|
|
383
383
|
sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. *
|
|
384
|
-
`ASKING_AI`: Waiting for the LLM to respond to the users question. * `
|
|
385
|
-
|
|
384
|
+
`ASKING_AI`: Waiting for the LLM to respond to the users question. * `PENDING_WAREHOUSE`:
|
|
385
|
+
Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing
|
|
386
|
+
AI provided SQL query. Get the SQL query result by calling
|
|
386
387
|
[getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message
|
|
387
388
|
status will stay in the `EXECUTING_QUERY` until a client calls
|
|
388
389
|
[getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a
|
|
@@ -678,8 +679,9 @@ class MessageErrorType(Enum):
|
|
|
678
679
|
class MessageStatus(Enum):
|
|
679
680
|
"""MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data
|
|
680
681
|
sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. *
|
|
681
|
-
`ASKING_AI`: Waiting for the LLM to respond to the users question. * `
|
|
682
|
-
|
|
682
|
+
`ASKING_AI`: Waiting for the LLM to respond to the users question. * `PENDING_WAREHOUSE`:
|
|
683
|
+
Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing
|
|
684
|
+
AI provided SQL query. Get the SQL query result by calling
|
|
683
685
|
[getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message
|
|
684
686
|
status will stay in the `EXECUTING_QUERY` until a client calls
|
|
685
687
|
[getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a
|
|
@@ -696,6 +698,7 @@ class MessageStatus(Enum):
|
|
|
696
698
|
FAILED = 'FAILED'
|
|
697
699
|
FETCHING_METADATA = 'FETCHING_METADATA'
|
|
698
700
|
FILTERING_CONTEXT = 'FILTERING_CONTEXT'
|
|
701
|
+
PENDING_WAREHOUSE = 'PENDING_WAREHOUSE'
|
|
699
702
|
QUERY_RESULT_EXPIRED = 'QUERY_RESULT_EXPIRED'
|
|
700
703
|
SUBMITTED = 'SUBMITTED'
|
|
701
704
|
|
|
@@ -925,9 +925,12 @@ class FilesAPI:
|
|
|
925
925
|
/Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>.
|
|
926
926
|
|
|
927
927
|
The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another one for
|
|
928
|
-
working with directories (`/fs/directories`). Both endpoints
|
|
929
|
-
|
|
930
|
-
|
|
928
|
+
working with directories (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, PUT,
|
|
929
|
+
and DELETE to manage files and directories specified using their URI path. The path is always absolute.
|
|
930
|
+
|
|
931
|
+
Some Files API client features are currently experimental. To enable them, set
|
|
932
|
+
`enable_experimental_files_api_client = True` in your configuration profile or use the environment
|
|
933
|
+
variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`.
|
|
931
934
|
|
|
932
935
|
[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html"""
|
|
933
936
|
|