databricks-sdk 0.41.0__py3-none-any.whl → 0.43.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +33 -5
- databricks/sdk/_base_client.py +20 -21
- databricks/sdk/credentials_provider.py +12 -6
- databricks/sdk/mixins/open_ai_client.py +25 -10
- databricks/sdk/retries.py +5 -1
- databricks/sdk/service/billing.py +348 -0
- databricks/sdk/service/catalog.py +33 -63
- databricks/sdk/service/cleanrooms.py +74 -3
- databricks/sdk/service/compute.py +36 -0
- databricks/sdk/service/dashboards.py +415 -0
- databricks/sdk/service/jobs.py +92 -1
- databricks/sdk/service/oauth2.py +41 -5
- databricks/sdk/service/serving.py +34 -40
- databricks/sdk/service/settings.py +454 -78
- databricks/sdk/service/sql.py +145 -18
- databricks/sdk/useragent.py +54 -0
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.41.0.dist-info → databricks_sdk-0.43.0.dist-info}/METADATA +1 -1
- {databricks_sdk-0.41.0.dist-info → databricks_sdk-0.43.0.dist-info}/RECORD +23 -23
- {databricks_sdk-0.41.0.dist-info → databricks_sdk-0.43.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.41.0.dist-info → databricks_sdk-0.43.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.41.0.dist-info → databricks_sdk-0.43.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.41.0.dist-info → databricks_sdk-0.43.0.dist-info}/top_level.txt +0 -0
databricks/sdk/__init__.py
CHANGED
|
@@ -13,8 +13,9 @@ from databricks.sdk.mixins.jobs import JobsExt
|
|
|
13
13
|
from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
|
|
14
14
|
from databricks.sdk.mixins.workspace import WorkspaceExt
|
|
15
15
|
from databricks.sdk.service.apps import AppsAPI
|
|
16
|
-
from databricks.sdk.service.billing import (BillableUsageAPI,
|
|
17
|
-
LogDeliveryAPI,
|
|
16
|
+
from databricks.sdk.service.billing import (BillableUsageAPI, BudgetPolicyAPI,
|
|
17
|
+
BudgetsAPI, LogDeliveryAPI,
|
|
18
|
+
UsageDashboardsAPI)
|
|
18
19
|
from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI,
|
|
19
20
|
AccountMetastoresAPI,
|
|
20
21
|
AccountStorageCredentialsAPI,
|
|
@@ -41,7 +42,9 @@ from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI,
|
|
|
41
42
|
InstanceProfilesAPI, LibrariesAPI,
|
|
42
43
|
PolicyComplianceForClustersAPI,
|
|
43
44
|
PolicyFamiliesAPI)
|
|
44
|
-
from databricks.sdk.service.dashboards import GenieAPI, LakeviewAPI
|
|
45
|
+
from databricks.sdk.service.dashboards import (GenieAPI, LakeviewAPI,
|
|
46
|
+
LakeviewEmbeddedAPI,
|
|
47
|
+
QueryExecutionAPI)
|
|
45
48
|
from databricks.sdk.service.files import DbfsAPI, FilesAPI
|
|
46
49
|
from databricks.sdk.service.iam import (AccessControlAPI,
|
|
47
50
|
AccountAccessControlAPI,
|
|
@@ -80,7 +83,7 @@ from databricks.sdk.service.settings import (
|
|
|
80
83
|
AibiDashboardEmbeddingApprovedDomainsAPI, AutomaticClusterUpdateAPI,
|
|
81
84
|
ComplianceSecurityProfileAPI, CredentialsManagerAPI,
|
|
82
85
|
CspEnablementAccountAPI, DefaultNamespaceAPI, DisableLegacyAccessAPI,
|
|
83
|
-
DisableLegacyDbfsAPI, DisableLegacyFeaturesAPI,
|
|
86
|
+
DisableLegacyDbfsAPI, DisableLegacyFeaturesAPI, EnableIpAccessListsAPI,
|
|
84
87
|
EnhancedSecurityMonitoringAPI, EsmEnablementAccountAPI, IpAccessListsAPI,
|
|
85
88
|
NetworkConnectivityAPI, NotificationDestinationsAPI, PersonalComputeAPI,
|
|
86
89
|
RestrictWorkspaceAdminsAPI, SettingsAPI, TokenManagementAPI, TokensAPI,
|
|
@@ -95,7 +98,8 @@ from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI,
|
|
|
95
98
|
QueryHistoryAPI,
|
|
96
99
|
QueryVisualizationsAPI,
|
|
97
100
|
QueryVisualizationsLegacyAPI,
|
|
98
|
-
StatementExecutionAPI,
|
|
101
|
+
RedashConfigAPI, StatementExecutionAPI,
|
|
102
|
+
WarehousesAPI)
|
|
99
103
|
from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI,
|
|
100
104
|
VectorSearchIndexesAPI)
|
|
101
105
|
from databricks.sdk.service.workspace import (GitCredentialsAPI, ReposAPI,
|
|
@@ -230,6 +234,7 @@ class WorkspaceClient:
|
|
|
230
234
|
self._ip_access_lists = IpAccessListsAPI(self._api_client)
|
|
231
235
|
self._jobs = JobsExt(self._api_client)
|
|
232
236
|
self._lakeview = LakeviewAPI(self._api_client)
|
|
237
|
+
self._lakeview_embedded = LakeviewEmbeddedAPI(self._api_client)
|
|
233
238
|
self._libraries = LibrariesAPI(self._api_client)
|
|
234
239
|
self._metastores = MetastoresAPI(self._api_client)
|
|
235
240
|
self._model_registry = ModelRegistryAPI(self._api_client)
|
|
@@ -254,11 +259,13 @@ class WorkspaceClient:
|
|
|
254
259
|
self._quality_monitors = QualityMonitorsAPI(self._api_client)
|
|
255
260
|
self._queries = QueriesAPI(self._api_client)
|
|
256
261
|
self._queries_legacy = QueriesLegacyAPI(self._api_client)
|
|
262
|
+
self._query_execution = QueryExecutionAPI(self._api_client)
|
|
257
263
|
self._query_history = QueryHistoryAPI(self._api_client)
|
|
258
264
|
self._query_visualizations = QueryVisualizationsAPI(self._api_client)
|
|
259
265
|
self._query_visualizations_legacy = QueryVisualizationsLegacyAPI(self._api_client)
|
|
260
266
|
self._recipient_activation = RecipientActivationAPI(self._api_client)
|
|
261
267
|
self._recipients = RecipientsAPI(self._api_client)
|
|
268
|
+
self._redash_config = RedashConfigAPI(self._api_client)
|
|
262
269
|
self._registered_models = RegisteredModelsAPI(self._api_client)
|
|
263
270
|
self._repos = ReposAPI(self._api_client)
|
|
264
271
|
self._resource_quotas = ResourceQuotasAPI(self._api_client)
|
|
@@ -503,6 +510,11 @@ class WorkspaceClient:
|
|
|
503
510
|
"""These APIs provide specific management operations for Lakeview dashboards."""
|
|
504
511
|
return self._lakeview
|
|
505
512
|
|
|
513
|
+
@property
|
|
514
|
+
def lakeview_embedded(self) -> LakeviewEmbeddedAPI:
|
|
515
|
+
"""Token-based Lakeview APIs for embedding dashboards in external applications."""
|
|
516
|
+
return self._lakeview_embedded
|
|
517
|
+
|
|
506
518
|
@property
|
|
507
519
|
def libraries(self) -> LibrariesAPI:
|
|
508
520
|
"""The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster."""
|
|
@@ -618,6 +630,11 @@ class WorkspaceClient:
|
|
|
618
630
|
"""These endpoints are used for CRUD operations on query definitions."""
|
|
619
631
|
return self._queries_legacy
|
|
620
632
|
|
|
633
|
+
@property
|
|
634
|
+
def query_execution(self) -> QueryExecutionAPI:
|
|
635
|
+
"""Query execution APIs for AI / BI Dashboards."""
|
|
636
|
+
return self._query_execution
|
|
637
|
+
|
|
621
638
|
@property
|
|
622
639
|
def query_history(self) -> QueryHistoryAPI:
|
|
623
640
|
"""A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute."""
|
|
@@ -643,6 +660,11 @@ class WorkspaceClient:
|
|
|
643
660
|
"""A recipient is an object you create using :method:recipients/create to represent an organization which you want to allow access shares."""
|
|
644
661
|
return self._recipients
|
|
645
662
|
|
|
663
|
+
@property
|
|
664
|
+
def redash_config(self) -> RedashConfigAPI:
|
|
665
|
+
"""Redash V2 service for workspace configurations (internal)."""
|
|
666
|
+
return self._redash_config
|
|
667
|
+
|
|
646
668
|
@property
|
|
647
669
|
def registered_models(self) -> RegisteredModelsAPI:
|
|
648
670
|
"""Databricks provides a hosted version of MLflow Model Registry in Unity Catalog."""
|
|
@@ -845,6 +867,7 @@ class AccountClient:
|
|
|
845
867
|
self._api_client = client.ApiClient(self._config)
|
|
846
868
|
self._access_control = AccountAccessControlAPI(self._api_client)
|
|
847
869
|
self._billable_usage = BillableUsageAPI(self._api_client)
|
|
870
|
+
self._budget_policy = BudgetPolicyAPI(self._api_client)
|
|
848
871
|
self._credentials = CredentialsAPI(self._api_client)
|
|
849
872
|
self._custom_app_integration = CustomAppIntegrationAPI(self._api_client)
|
|
850
873
|
self._encryption_keys = EncryptionKeysAPI(self._api_client)
|
|
@@ -890,6 +913,11 @@ class AccountClient:
|
|
|
890
913
|
"""This API allows you to download billable usage logs for the specified account and date range."""
|
|
891
914
|
return self._billable_usage
|
|
892
915
|
|
|
916
|
+
@property
|
|
917
|
+
def budget_policy(self) -> BudgetPolicyAPI:
|
|
918
|
+
"""A service serves REST API about Budget policies."""
|
|
919
|
+
return self._budget_policy
|
|
920
|
+
|
|
893
921
|
@property
|
|
894
922
|
def credentials(self) -> CredentialsAPI:
|
|
895
923
|
"""These APIs manage credential configurations for this workspace."""
|
databricks/sdk/_base_client.py
CHANGED
|
@@ -159,16 +159,29 @@ class _BaseClient:
|
|
|
159
159
|
if isinstance(data, (str, bytes)):
|
|
160
160
|
data = io.BytesIO(data.encode('utf-8') if isinstance(data, str) else data)
|
|
161
161
|
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
# re-read already read data from the body.
|
|
165
|
-
if data is not None and not self._is_seekable_stream(data):
|
|
166
|
-
logger.debug(f"Retry disabled for non-seekable stream: type={type(data)}")
|
|
167
|
-
call = self._perform
|
|
168
|
-
else:
|
|
162
|
+
if not data:
|
|
163
|
+
# The request is not a stream.
|
|
169
164
|
call = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
|
|
170
165
|
is_retryable=self._is_retryable,
|
|
171
166
|
clock=self._clock)(self._perform)
|
|
167
|
+
elif self._is_seekable_stream(data):
|
|
168
|
+
# Keep track of the initial position of the stream so that we can rewind to it
|
|
169
|
+
# if we need to retry the request.
|
|
170
|
+
initial_data_position = data.tell()
|
|
171
|
+
|
|
172
|
+
def rewind():
|
|
173
|
+
logger.debug(f"Rewinding input data to offset {initial_data_position} before retry")
|
|
174
|
+
data.seek(initial_data_position)
|
|
175
|
+
|
|
176
|
+
call = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
|
|
177
|
+
is_retryable=self._is_retryable,
|
|
178
|
+
clock=self._clock,
|
|
179
|
+
before_retry=rewind)(self._perform)
|
|
180
|
+
else:
|
|
181
|
+
# Do not retry if the stream is not seekable. This is necessary to avoid bugs
|
|
182
|
+
# where the retry doesn't re-read already read data from the stream.
|
|
183
|
+
logger.debug(f"Retry disabled for non-seekable stream: type={type(data)}")
|
|
184
|
+
call = self._perform
|
|
172
185
|
|
|
173
186
|
response = call(method,
|
|
174
187
|
url,
|
|
@@ -249,12 +262,6 @@ class _BaseClient:
|
|
|
249
262
|
files=None,
|
|
250
263
|
data=None,
|
|
251
264
|
auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None):
|
|
252
|
-
# Keep track of the initial position of the stream so that we can rewind it if
|
|
253
|
-
# we need to retry the request.
|
|
254
|
-
initial_data_position = 0
|
|
255
|
-
if self._is_seekable_stream(data):
|
|
256
|
-
initial_data_position = data.tell()
|
|
257
|
-
|
|
258
265
|
response = self._session.request(method,
|
|
259
266
|
url,
|
|
260
267
|
params=self._fix_query_string(query),
|
|
@@ -266,16 +273,8 @@ class _BaseClient:
|
|
|
266
273
|
stream=raw,
|
|
267
274
|
timeout=self._http_timeout_seconds)
|
|
268
275
|
self._record_request_log(response, raw=raw or data is not None or files is not None)
|
|
269
|
-
|
|
270
276
|
error = self._error_parser.get_api_error(response)
|
|
271
277
|
if error is not None:
|
|
272
|
-
# If the request body is a seekable stream, rewind it so that it is ready
|
|
273
|
-
# to be read again in case of a retry.
|
|
274
|
-
#
|
|
275
|
-
# TODO: This should be moved into a "before-retry" hook to avoid one
|
|
276
|
-
# unnecessary seek on the last failed retry before aborting.
|
|
277
|
-
if self._is_seekable_stream(data):
|
|
278
|
-
data.seek(initial_data_position)
|
|
279
278
|
raise error from None
|
|
280
279
|
|
|
281
280
|
return response
|
|
@@ -676,12 +676,18 @@ class MetadataServiceTokenSource(Refreshable):
|
|
|
676
676
|
self.host = cfg.host
|
|
677
677
|
|
|
678
678
|
def refresh(self) -> Token:
|
|
679
|
-
resp = requests.get(
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
679
|
+
resp = requests.get(
|
|
680
|
+
self.url,
|
|
681
|
+
timeout=self._metadata_service_timeout,
|
|
682
|
+
headers={
|
|
683
|
+
self.METADATA_SERVICE_VERSION_HEADER: self.METADATA_SERVICE_VERSION,
|
|
684
|
+
self.METADATA_SERVICE_HOST_HEADER: self.host
|
|
685
|
+
},
|
|
686
|
+
proxies={
|
|
687
|
+
# Explicitly exclude localhost from being proxied. This is necessary
|
|
688
|
+
# for Metadata URLs which typically point to localhost.
|
|
689
|
+
"no_proxy": "localhost,127.0.0.1"
|
|
690
|
+
})
|
|
685
691
|
json_resp: dict[str, Union[str, float]] = resp.json()
|
|
686
692
|
access_token = json_resp.get("access_token", None)
|
|
687
693
|
if access_token is None:
|
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import json as js
|
|
2
2
|
from typing import Dict, Optional
|
|
3
3
|
|
|
4
|
+
from requests import Response
|
|
5
|
+
|
|
4
6
|
from databricks.sdk.service.serving import (ExternalFunctionRequestHttpMethod,
|
|
5
|
-
ExternalFunctionResponse,
|
|
6
7
|
ServingEndpointsAPI)
|
|
7
8
|
|
|
8
9
|
|
|
@@ -63,7 +64,7 @@ class ServingEndpointsExt(ServingEndpointsAPI):
|
|
|
63
64
|
*,
|
|
64
65
|
headers: Optional[Dict[str, str]] = None,
|
|
65
66
|
json: Optional[Dict[str, str]] = None,
|
|
66
|
-
params: Optional[Dict[str, str]] = None) ->
|
|
67
|
+
params: Optional[Dict[str, str]] = None) -> Response:
|
|
67
68
|
"""Make external services call using the credentials stored in UC Connection.
|
|
68
69
|
**NOTE:** Experimental: This API may change or be removed in a future release without warning.
|
|
69
70
|
:param conn: str
|
|
@@ -79,13 +80,27 @@ class ServingEndpointsExt(ServingEndpointsAPI):
|
|
|
79
80
|
JSON payload for the request.
|
|
80
81
|
:param params: Dict[str,str] (optional)
|
|
81
82
|
Query parameters for the request.
|
|
82
|
-
:returns: :class:`
|
|
83
|
+
:returns: :class:`Response`
|
|
83
84
|
"""
|
|
85
|
+
response = Response()
|
|
86
|
+
response.status_code = 200
|
|
87
|
+
server_response = super().http_request(connection_name=conn,
|
|
88
|
+
method=method,
|
|
89
|
+
path=path,
|
|
90
|
+
headers=js.dumps(headers) if headers is not None else None,
|
|
91
|
+
json=js.dumps(json) if json is not None else None,
|
|
92
|
+
params=js.dumps(params) if params is not None else None)
|
|
93
|
+
|
|
94
|
+
# Read the content from the HttpRequestResponse object
|
|
95
|
+
if hasattr(server_response, "contents") and hasattr(server_response.contents, "read"):
|
|
96
|
+
raw_content = server_response.contents.read() # Read the bytes
|
|
97
|
+
else:
|
|
98
|
+
raise ValueError("Invalid response from the server.")
|
|
99
|
+
|
|
100
|
+
# Set the raw content
|
|
101
|
+
if isinstance(raw_content, bytes):
|
|
102
|
+
response._content = raw_content
|
|
103
|
+
else:
|
|
104
|
+
raise ValueError("Contents must be bytes.")
|
|
84
105
|
|
|
85
|
-
return
|
|
86
|
-
method=method,
|
|
87
|
-
path=path,
|
|
88
|
-
headers=js.dumps(headers),
|
|
89
|
-
json=js.dumps(json),
|
|
90
|
-
params=js.dumps(params),
|
|
91
|
-
)
|
|
106
|
+
return response
|
databricks/sdk/retries.py
CHANGED
|
@@ -13,7 +13,8 @@ def retried(*,
|
|
|
13
13
|
on: Sequence[Type[BaseException]] = None,
|
|
14
14
|
is_retryable: Callable[[BaseException], Optional[str]] = None,
|
|
15
15
|
timeout=timedelta(minutes=20),
|
|
16
|
-
clock: Clock = None
|
|
16
|
+
clock: Clock = None,
|
|
17
|
+
before_retry: Callable = None):
|
|
17
18
|
has_allowlist = on is not None
|
|
18
19
|
has_callback = is_retryable is not None
|
|
19
20
|
if not (has_allowlist or has_callback) or (has_allowlist and has_callback):
|
|
@@ -54,6 +55,9 @@ def retried(*,
|
|
|
54
55
|
raise err
|
|
55
56
|
|
|
56
57
|
logger.debug(f'Retrying: {retry_reason} (sleeping ~{sleep}s)')
|
|
58
|
+
if before_retry:
|
|
59
|
+
before_retry()
|
|
60
|
+
|
|
57
61
|
clock.sleep(sleep + random())
|
|
58
62
|
attempt += 1
|
|
59
63
|
raise TimeoutError(f'Timed out after {timeout}') from last_err
|