databricks-sdk 0.59.0__tar.gz → 0.60.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/CHANGELOG.md +35 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/PKG-INFO +1 -1
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/__init__.py +5 -5
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/credentials_provider.py +2 -2
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/mixins/files.py +43 -15
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/mixins/open_ai_client.py +28 -7
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/oidc.py +6 -2
- databricks_sdk-0.59.0/databricks/sdk/service/aibuilder.py → databricks_sdk-0.60.0/databricks/sdk/service/agentbricks.py +5 -5
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/catalog.py +1 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/cleanrooms.py +24 -24
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/database.py +16 -1
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/pipelines.py +2 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/settings.py +1 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/sharing.py +2 -1
- databricks_sdk-0.60.0/databricks/sdk/version.py +1 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks_sdk.egg-info/PKG-INFO +1 -1
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks_sdk.egg-info/SOURCES.txt +1 -1
- databricks_sdk-0.59.0/databricks/sdk/version.py +0 -1
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/CONTRIBUTING.md +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/DCO +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/LICENSE +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/MANIFEST.in +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/Makefile +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/NOTICE +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/README.md +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/SECURITY.md +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/__init__.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/_base_client.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/_property.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/_widgets/__init__.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/azure.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/casing.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/clock.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/config.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/core.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/data_plane.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/dbutils.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/environments.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/errors/__init__.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/errors/base.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/errors/customizer.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/errors/deserializer.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/errors/details.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/errors/mapper.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/errors/overrides.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/errors/parser.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/errors/platform.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/errors/private_link.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/errors/sdk.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/logger/__init__.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/logger/round_trip_logger.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/mixins/__init__.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/mixins/compute.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/mixins/jobs.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/mixins/workspace.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/oauth.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/oidc_token_supplier.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/py.typed +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/retries.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/runtime/__init__.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/__init__.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/_internal.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/apps.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/billing.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/compute.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/dashboards.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/files.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/iam.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/jobs.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/marketplace.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/ml.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/oauth2.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/provisioning.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/qualitymonitorv2.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/serving.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/sql.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/vectorsearch.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/service/workspace.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/useragent.py +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks_sdk.egg-info/requires.txt +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks_sdk.egg-info/top_level.txt +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/pyproject.toml +0 -0
- {databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/setup.cfg +0 -0
|
@@ -1,5 +1,40 @@
|
|
|
1
1
|
# Version changelog
|
|
2
2
|
|
|
3
|
+
## Release v0.60.0
|
|
4
|
+
|
|
5
|
+
### New Features and Improvements
|
|
6
|
+
|
|
7
|
+
* Added headers to HttpRequestResponse in OpenAI client.
|
|
8
|
+
|
|
9
|
+
### Bug Fixes
|
|
10
|
+
|
|
11
|
+
- Correctly issue in OIDC implementation that prevented the use of the feature (see #994).
|
|
12
|
+
- Fix a reported issue where `FilesExt` fails to retry if it receives certain status code from server.
|
|
13
|
+
|
|
14
|
+
### Internal Changes
|
|
15
|
+
|
|
16
|
+
- Refactor unit tests for `FilesExt` to improve its readability.
|
|
17
|
+
|
|
18
|
+
### API Changes
|
|
19
|
+
* Added `databricks.sdk.service.agentbricks` package.
|
|
20
|
+
* Added `provisioning_phase` field for `databricks.sdk.service.database.SyncedTablePipelineProgress`.
|
|
21
|
+
* Added `redshift` and `sqldw` enum values for `databricks.sdk.service.pipelines.IngestionSourceType`.
|
|
22
|
+
* Added `germany_c5` enum value for `databricks.sdk.service.settings.ComplianceStandard`.
|
|
23
|
+
* Changed `asset_type` and `name` fields for `databricks.sdk.service.cleanrooms.CleanRoomAsset` to be required.
|
|
24
|
+
* [Breaking] Changed `asset_type` and `name` fields for `databricks.sdk.service.cleanrooms.CleanRoomAsset` to be required.
|
|
25
|
+
* [Breaking] Changed `local_name` field for `databricks.sdk.service.cleanrooms.CleanRoomAssetForeignTableLocalDetails` to be required.
|
|
26
|
+
* Changed `local_name` field for `databricks.sdk.service.cleanrooms.CleanRoomAssetForeignTableLocalDetails` to be required.
|
|
27
|
+
* Changed `notebook_content` field for `databricks.sdk.service.cleanrooms.CleanRoomAssetNotebook` to be required.
|
|
28
|
+
* [Breaking] Changed `notebook_content` field for `databricks.sdk.service.cleanrooms.CleanRoomAssetNotebook` to be required.
|
|
29
|
+
* Changed `local_name` field for `databricks.sdk.service.cleanrooms.CleanRoomAssetTableLocalDetails` to be required.
|
|
30
|
+
* [Breaking] Changed `local_name` field for `databricks.sdk.service.cleanrooms.CleanRoomAssetTableLocalDetails` to be required.
|
|
31
|
+
* [Breaking] Changed `local_name` field for `databricks.sdk.service.cleanrooms.CleanRoomAssetViewLocalDetails` to be required.
|
|
32
|
+
* Changed `local_name` field for `databricks.sdk.service.cleanrooms.CleanRoomAssetViewLocalDetails` to be required.
|
|
33
|
+
* [Breaking] Changed `local_name` field for `databricks.sdk.service.cleanrooms.CleanRoomAssetVolumeLocalDetails` to be required.
|
|
34
|
+
* Changed `local_name` field for `databricks.sdk.service.cleanrooms.CleanRoomAssetVolumeLocalDetails` to be required.
|
|
35
|
+
* [Breaking] Removed `databricks.sdk.service.aibuilder` package.
|
|
36
|
+
|
|
37
|
+
|
|
3
38
|
## Release v0.59.0
|
|
4
39
|
|
|
5
40
|
### API Changes
|
|
@@ -13,7 +13,7 @@ from databricks.sdk.mixins.files import DbfsExt, FilesExt
|
|
|
13
13
|
from databricks.sdk.mixins.jobs import JobsExt
|
|
14
14
|
from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
|
|
15
15
|
from databricks.sdk.mixins.workspace import WorkspaceExt
|
|
16
|
-
from databricks.sdk.service import
|
|
16
|
+
from databricks.sdk.service import agentbricks as pkg_agentbricks
|
|
17
17
|
from databricks.sdk.service import apps as pkg_apps
|
|
18
18
|
from databricks.sdk.service import billing as pkg_billing
|
|
19
19
|
from databricks.sdk.service import catalog as pkg_catalog
|
|
@@ -36,7 +36,7 @@ from databricks.sdk.service import sharing as pkg_sharing
|
|
|
36
36
|
from databricks.sdk.service import sql as pkg_sql
|
|
37
37
|
from databricks.sdk.service import vectorsearch as pkg_vectorsearch
|
|
38
38
|
from databricks.sdk.service import workspace as pkg_workspace
|
|
39
|
-
from databricks.sdk.service.
|
|
39
|
+
from databricks.sdk.service.agentbricks import AgentBricksAPI
|
|
40
40
|
from databricks.sdk.service.apps import AppsAPI
|
|
41
41
|
from databricks.sdk.service.billing import (BillableUsageAPI, BudgetPolicyAPI,
|
|
42
42
|
BudgetsAPI, LogDeliveryAPI,
|
|
@@ -240,7 +240,7 @@ class WorkspaceClient:
|
|
|
240
240
|
serving_endpoints = ServingEndpointsExt(self._api_client)
|
|
241
241
|
self._access_control = pkg_iam.AccessControlAPI(self._api_client)
|
|
242
242
|
self._account_access_control_proxy = pkg_iam.AccountAccessControlProxyAPI(self._api_client)
|
|
243
|
-
self.
|
|
243
|
+
self._agent_bricks = pkg_agentbricks.AgentBricksAPI(self._api_client)
|
|
244
244
|
self._alerts = pkg_sql.AlertsAPI(self._api_client)
|
|
245
245
|
self._alerts_legacy = pkg_sql.AlertsLegacyAPI(self._api_client)
|
|
246
246
|
self._alerts_v2 = pkg_sql.AlertsV2API(self._api_client)
|
|
@@ -377,9 +377,9 @@ class WorkspaceClient:
|
|
|
377
377
|
return self._account_access_control_proxy
|
|
378
378
|
|
|
379
379
|
@property
|
|
380
|
-
def
|
|
380
|
+
def agent_bricks(self) -> pkg_agentbricks.AgentBricksAPI:
|
|
381
381
|
"""The Custom LLMs service manages state and powers the UI for the Custom LLM product."""
|
|
382
|
-
return self.
|
|
382
|
+
return self._agent_bricks
|
|
383
383
|
|
|
384
384
|
@property
|
|
385
385
|
def alerts(self) -> pkg_sql.AlertsAPI:
|
|
@@ -331,7 +331,7 @@ def file_oidc(cfg) -> Optional[CredentialsProvider]:
|
|
|
331
331
|
# that provides a Databricks token from an IdTokenSource.
|
|
332
332
|
def _oidc_credentials_provider(cfg, id_token_source: oidc.IdTokenSource) -> Optional[CredentialsProvider]:
|
|
333
333
|
try:
|
|
334
|
-
|
|
334
|
+
id_token_source.id_token() # validate the id_token_source
|
|
335
335
|
except Exception as e:
|
|
336
336
|
logger.debug(f"Failed to get OIDC token: {e}")
|
|
337
337
|
return None
|
|
@@ -341,7 +341,7 @@ def _oidc_credentials_provider(cfg, id_token_source: oidc.IdTokenSource) -> Opti
|
|
|
341
341
|
token_endpoint=cfg.oidc_endpoints.token_endpoint,
|
|
342
342
|
client_id=cfg.client_id,
|
|
343
343
|
account_id=cfg.account_id,
|
|
344
|
-
|
|
344
|
+
id_token_source=id_token_source,
|
|
345
345
|
disable_async=cfg.disable_async_token_refresh,
|
|
346
346
|
)
|
|
347
347
|
|
|
@@ -28,7 +28,6 @@ from .._base_client import _BaseClient, _RawResponse, _StreamingResponse
|
|
|
28
28
|
from .._property import _cached_property
|
|
29
29
|
from ..config import Config
|
|
30
30
|
from ..errors import AlreadyExists, NotFound
|
|
31
|
-
from ..errors.customizer import _RetryAfterCustomizer
|
|
32
31
|
from ..errors.mapper import _error_mapper
|
|
33
32
|
from ..retries import retried
|
|
34
33
|
from ..service import files
|
|
@@ -577,6 +576,27 @@ class _DbfsPath(_Path):
|
|
|
577
576
|
return f"<_DbfsPath {self._path}>"
|
|
578
577
|
|
|
579
578
|
|
|
579
|
+
class _RetryableException(Exception):
|
|
580
|
+
"""Base class for retryable exceptions in DBFS operations."""
|
|
581
|
+
|
|
582
|
+
def __init__(self, message: str, http_status_code: int):
|
|
583
|
+
super().__init__()
|
|
584
|
+
self.message = message
|
|
585
|
+
self.http_status_code = http_status_code
|
|
586
|
+
|
|
587
|
+
def __str__(self) -> str:
|
|
588
|
+
return f"{self.message} (HTTP Status: {self.http_status_code})"
|
|
589
|
+
|
|
590
|
+
@staticmethod
|
|
591
|
+
def make_error(response: requests.Response) -> "_RetryableException":
|
|
592
|
+
"""Map the response to a retryable exception."""
|
|
593
|
+
|
|
594
|
+
return _RetryableException(
|
|
595
|
+
message=response.text,
|
|
596
|
+
http_status_code=response.status_code,
|
|
597
|
+
)
|
|
598
|
+
|
|
599
|
+
|
|
580
600
|
class DbfsExt(files.DbfsAPI):
|
|
581
601
|
__doc__ = files.DbfsAPI.__doc__
|
|
582
602
|
|
|
@@ -885,7 +905,7 @@ class FilesExt(files.FilesAPI):
|
|
|
885
905
|
timeout=self._config.multipart_upload_single_chunk_upload_timeout_seconds,
|
|
886
906
|
)
|
|
887
907
|
|
|
888
|
-
upload_response = self.
|
|
908
|
+
upload_response = self._retry_cloud_idempotent_operation(perform, rewind)
|
|
889
909
|
|
|
890
910
|
if upload_response.status_code in (200, 201):
|
|
891
911
|
# Chunk upload successful
|
|
@@ -1097,7 +1117,7 @@ class FilesExt(files.FilesAPI):
|
|
|
1097
1117
|
)
|
|
1098
1118
|
|
|
1099
1119
|
try:
|
|
1100
|
-
return self.
|
|
1120
|
+
return self._retry_cloud_idempotent_operation(perform)
|
|
1101
1121
|
except RequestException:
|
|
1102
1122
|
_LOG.warning("Failed to retrieve upload status")
|
|
1103
1123
|
return None
|
|
@@ -1116,7 +1136,7 @@ class FilesExt(files.FilesAPI):
|
|
|
1116
1136
|
# a 503 or 500 response, then you need to resume the interrupted upload from where it left off.
|
|
1117
1137
|
|
|
1118
1138
|
# Let's follow that for all potentially retryable status codes.
|
|
1119
|
-
# Together with the catch block below we replicate the logic in
|
|
1139
|
+
# Together with the catch block below we replicate the logic in _retry_databricks_idempotent_operation().
|
|
1120
1140
|
if upload_response.status_code in self._RETRYABLE_STATUS_CODES:
|
|
1121
1141
|
if retry_count < self._config.multipart_upload_max_retries:
|
|
1122
1142
|
retry_count += 1
|
|
@@ -1243,7 +1263,7 @@ class FilesExt(files.FilesAPI):
|
|
|
1243
1263
|
timeout=self._config.multipart_upload_single_chunk_upload_timeout_seconds,
|
|
1244
1264
|
)
|
|
1245
1265
|
|
|
1246
|
-
abort_response = self.
|
|
1266
|
+
abort_response = self._retry_cloud_idempotent_operation(perform)
|
|
1247
1267
|
|
|
1248
1268
|
if abort_response.status_code not in (200, 201):
|
|
1249
1269
|
raise ValueError(abort_response)
|
|
@@ -1265,7 +1285,7 @@ class FilesExt(files.FilesAPI):
|
|
|
1265
1285
|
timeout=self._config.multipart_upload_single_chunk_upload_timeout_seconds,
|
|
1266
1286
|
)
|
|
1267
1287
|
|
|
1268
|
-
abort_response = self.
|
|
1288
|
+
abort_response = self._retry_cloud_idempotent_operation(perform)
|
|
1269
1289
|
|
|
1270
1290
|
if abort_response.status_code not in (200, 201):
|
|
1271
1291
|
raise ValueError(abort_response)
|
|
@@ -1283,23 +1303,31 @@ class FilesExt(files.FilesAPI):
|
|
|
1283
1303
|
session.mount("http://", http_adapter)
|
|
1284
1304
|
return session
|
|
1285
1305
|
|
|
1286
|
-
def
|
|
1306
|
+
def _retry_cloud_idempotent_operation(
|
|
1287
1307
|
self, operation: Callable[[], requests.Response], before_retry: Callable = None
|
|
1288
1308
|
) -> requests.Response:
|
|
1289
|
-
"""Perform given idempotent operation with necessary retries
|
|
1290
|
-
|
|
1309
|
+
"""Perform given idempotent operation with necessary retries for requests to non Databricks APIs.
|
|
1310
|
+
For cloud APIs, we will retry on network errors and on server response codes.
|
|
1311
|
+
Since operation is idempotent it's safe to retry it for response codes where server state might have changed.
|
|
1291
1312
|
"""
|
|
1292
1313
|
|
|
1293
|
-
def delegate():
|
|
1314
|
+
def delegate() -> requests.Response:
|
|
1294
1315
|
response = operation()
|
|
1295
1316
|
if response.status_code in self._RETRYABLE_STATUS_CODES:
|
|
1296
|
-
|
|
1297
|
-
# this will assign "retry_after_secs" to the attrs, essentially making exception look retryable
|
|
1298
|
-
_RetryAfterCustomizer().customize_error(response, attrs)
|
|
1299
|
-
raise _error_mapper(response, attrs)
|
|
1317
|
+
raise _RetryableException.make_error(response)
|
|
1300
1318
|
else:
|
|
1301
1319
|
return response
|
|
1302
1320
|
|
|
1321
|
+
def extended_is_retryable(e: BaseException) -> Optional[str]:
|
|
1322
|
+
retry_reason_from_base = _BaseClient._is_retryable(e)
|
|
1323
|
+
if retry_reason_from_base is not None:
|
|
1324
|
+
return retry_reason_from_base
|
|
1325
|
+
|
|
1326
|
+
if isinstance(e, _RetryableException):
|
|
1327
|
+
# this is a retriable exception, but not a network error
|
|
1328
|
+
return f"retryable exception (status_code:{e.http_status_code})"
|
|
1329
|
+
return None
|
|
1330
|
+
|
|
1303
1331
|
# following _BaseClient timeout
|
|
1304
1332
|
retry_timeout_seconds = self._config.retry_timeout_seconds or 300
|
|
1305
1333
|
|
|
@@ -1307,7 +1335,7 @@ class FilesExt(files.FilesAPI):
|
|
|
1307
1335
|
timeout=timedelta(seconds=retry_timeout_seconds),
|
|
1308
1336
|
# also retry on network errors (connection error, connection timeout)
|
|
1309
1337
|
# where we believe request didn't reach the server
|
|
1310
|
-
is_retryable=
|
|
1338
|
+
is_retryable=extended_is_retryable,
|
|
1311
1339
|
before_retry=before_retry,
|
|
1312
1340
|
)(delegate)()
|
|
1313
1341
|
|
|
@@ -4,6 +4,7 @@ from typing import Dict, Optional
|
|
|
4
4
|
from requests import Response
|
|
5
5
|
|
|
6
6
|
from databricks.sdk.service.serving import (ExternalFunctionRequestHttpMethod,
|
|
7
|
+
HttpRequestResponse,
|
|
7
8
|
ServingEndpointsAPI)
|
|
8
9
|
|
|
9
10
|
|
|
@@ -88,15 +89,30 @@ class ServingEndpointsExt(ServingEndpointsAPI):
|
|
|
88
89
|
"""
|
|
89
90
|
response = Response()
|
|
90
91
|
response.status_code = 200
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
92
|
+
|
|
93
|
+
# We currently don't call super.http_request because we need to pass in response_headers
|
|
94
|
+
# This is a temporary fix to get the headers we need for the MCP session id
|
|
95
|
+
# TODO: Remove this once we have a better way to get back the response headers
|
|
96
|
+
headers_to_capture = ["mcp-session-id"]
|
|
97
|
+
res = self._api.do(
|
|
98
|
+
"POST",
|
|
99
|
+
"/api/2.0/external-function",
|
|
100
|
+
body={
|
|
101
|
+
"connection_name": conn,
|
|
102
|
+
"method": method.value,
|
|
103
|
+
"path": path,
|
|
104
|
+
"headers": js.dumps(headers) if headers is not None else None,
|
|
105
|
+
"json": js.dumps(json) if json is not None else None,
|
|
106
|
+
"params": js.dumps(params) if params is not None else None,
|
|
107
|
+
},
|
|
108
|
+
headers={"Accept": "text/plain", "Content-Type": "application/json"},
|
|
109
|
+
raw=True,
|
|
110
|
+
response_headers=headers_to_capture,
|
|
98
111
|
)
|
|
99
112
|
|
|
113
|
+
# Create HttpRequestResponse from the raw response
|
|
114
|
+
server_response = HttpRequestResponse.from_dict(res)
|
|
115
|
+
|
|
100
116
|
# Read the content from the HttpRequestResponse object
|
|
101
117
|
if hasattr(server_response, "contents") and hasattr(server_response.contents, "read"):
|
|
102
118
|
raw_content = server_response.contents.read() # Read the bytes
|
|
@@ -109,4 +125,9 @@ class ServingEndpointsExt(ServingEndpointsAPI):
|
|
|
109
125
|
else:
|
|
110
126
|
raise ValueError("Contents must be bytes.")
|
|
111
127
|
|
|
128
|
+
# Copy headers from raw response to Response
|
|
129
|
+
for header_name in headers_to_capture:
|
|
130
|
+
if header_name in res:
|
|
131
|
+
response.headers[header_name] = res[header_name]
|
|
132
|
+
|
|
112
133
|
return response
|
|
@@ -188,14 +188,18 @@ class DatabricksOidcTokenSource(oauth.TokenSource):
|
|
|
188
188
|
logger.debug("Client ID provided, authenticating with Workload Identity Federation")
|
|
189
189
|
|
|
190
190
|
id_token = self._id_token_source.id_token()
|
|
191
|
+
return self._exchange_id_token(id_token)
|
|
191
192
|
|
|
193
|
+
# This function is used to create the OAuth client.
|
|
194
|
+
# It exists to make it easier to test.
|
|
195
|
+
def _exchange_id_token(self, id_token: IdToken) -> oauth.Token:
|
|
192
196
|
client = oauth.ClientCredentials(
|
|
193
197
|
client_id=self._client_id,
|
|
194
|
-
client_secret="", #
|
|
198
|
+
client_secret="", # there is no (rotatable) secrets in the OIDC flow
|
|
195
199
|
token_url=self._token_endpoint,
|
|
196
200
|
endpoint_params={
|
|
197
201
|
"subject_token_type": "urn:ietf:params:oauth:token-type:jwt",
|
|
198
|
-
"subject_token": id_token,
|
|
202
|
+
"subject_token": id_token.jwt,
|
|
199
203
|
"grant_type": "urn:ietf:params:oauth:grant-type:token-exchange",
|
|
200
204
|
},
|
|
201
205
|
scopes=["all-apis"],
|
|
@@ -23,9 +23,6 @@ class CustomLlm:
|
|
|
23
23
|
instructions: str
|
|
24
24
|
"""Instructions for the custom LLM to follow"""
|
|
25
25
|
|
|
26
|
-
optimization_state: State
|
|
27
|
-
"""If optimization is kicked off, tracks the state of the custom LLM"""
|
|
28
|
-
|
|
29
26
|
agent_artifact_path: Optional[str] = None
|
|
30
27
|
|
|
31
28
|
creation_time: Optional[str] = None
|
|
@@ -45,6 +42,9 @@ class CustomLlm:
|
|
|
45
42
|
|
|
46
43
|
id: Optional[str] = None
|
|
47
44
|
|
|
45
|
+
optimization_state: Optional[State] = None
|
|
46
|
+
"""If optimization is kicked off, tracks the state of the custom LLM"""
|
|
47
|
+
|
|
48
48
|
def as_dict(self) -> dict:
|
|
49
49
|
"""Serializes the CustomLlm into a dictionary suitable for use as a JSON request body."""
|
|
50
50
|
body = {}
|
|
@@ -190,7 +190,7 @@ class Table:
|
|
|
190
190
|
)
|
|
191
191
|
|
|
192
192
|
|
|
193
|
-
class
|
|
193
|
+
class AgentBricksAPI:
|
|
194
194
|
"""The Custom LLMs service manages state and powers the UI for the Custom LLM product."""
|
|
195
195
|
|
|
196
196
|
def __init__(self, api_client):
|
|
@@ -270,7 +270,7 @@ class AiBuilderAPI:
|
|
|
270
270
|
"Accept": "application/json",
|
|
271
271
|
}
|
|
272
272
|
|
|
273
|
-
self._api.do("DELETE", f"/api/2.0/custom-
|
|
273
|
+
self._api.do("DELETE", f"/api/2.0/custom-llms/{id}", headers=headers)
|
|
274
274
|
|
|
275
275
|
def get_custom_llm(self, id: str) -> CustomLlm:
|
|
276
276
|
"""Get a Custom LLM.
|
|
@@ -132,15 +132,24 @@ class CleanRoomAccessRestricted(Enum):
|
|
|
132
132
|
class CleanRoomAsset:
|
|
133
133
|
"""Metadata of the clean room asset"""
|
|
134
134
|
|
|
135
|
-
|
|
136
|
-
"""
|
|
135
|
+
name: str
|
|
136
|
+
"""A fully qualified name that uniquely identifies the asset within the clean room. This is also
|
|
137
|
+
the name displayed in the clean room UI.
|
|
138
|
+
|
|
139
|
+
For UC securable assets (tables, volumes, etc.), the format is
|
|
140
|
+
*shared_catalog*.*shared_schema*.*asset_name*
|
|
141
|
+
|
|
142
|
+
For notebooks, the name is the notebook file name."""
|
|
137
143
|
|
|
138
|
-
asset_type:
|
|
144
|
+
asset_type: CleanRoomAssetAssetType
|
|
139
145
|
"""The type of the asset."""
|
|
140
146
|
|
|
147
|
+
added_at: Optional[int] = None
|
|
148
|
+
"""When the asset is added to the clean room, in epoch milliseconds."""
|
|
149
|
+
|
|
141
150
|
clean_room_name: Optional[str] = None
|
|
142
|
-
"""The name of the clean room this asset belongs to. This is
|
|
143
|
-
|
|
151
|
+
"""The name of the clean room this asset belongs to. This field is required for create operations
|
|
152
|
+
and populated by the server for responses."""
|
|
144
153
|
|
|
145
154
|
foreign_table: Optional[CleanRoomAssetForeignTable] = None
|
|
146
155
|
"""Foreign table details available to all collaborators of the clean room. Present if and only if
|
|
@@ -150,15 +159,6 @@ class CleanRoomAsset:
|
|
|
150
159
|
"""Local details for a foreign that are only available to its owner. Present if and only if
|
|
151
160
|
**asset_type** is **FOREIGN_TABLE**"""
|
|
152
161
|
|
|
153
|
-
name: Optional[str] = None
|
|
154
|
-
"""A fully qualified name that uniquely identifies the asset within the clean room. This is also
|
|
155
|
-
the name displayed in the clean room UI.
|
|
156
|
-
|
|
157
|
-
For UC securable assets (tables, volumes, etc.), the format is
|
|
158
|
-
*shared_catalog*.*shared_schema*.*asset_name*
|
|
159
|
-
|
|
160
|
-
For notebooks, the name is the notebook file name."""
|
|
161
|
-
|
|
162
162
|
notebook: Optional[CleanRoomAssetNotebook] = None
|
|
163
163
|
"""Notebook details available to all collaborators of the clean room. Present if and only if
|
|
164
164
|
**asset_type** is **NOTEBOOK_FILE**"""
|
|
@@ -314,7 +314,7 @@ class CleanRoomAssetForeignTable:
|
|
|
314
314
|
|
|
315
315
|
@dataclass
|
|
316
316
|
class CleanRoomAssetForeignTableLocalDetails:
|
|
317
|
-
local_name:
|
|
317
|
+
local_name: str
|
|
318
318
|
"""The fully qualified name of the foreign table in its owner's local metastore, in the format of
|
|
319
319
|
*catalog*.*schema*.*foreign_table_name*"""
|
|
320
320
|
|
|
@@ -340,13 +340,13 @@ class CleanRoomAssetForeignTableLocalDetails:
|
|
|
340
340
|
|
|
341
341
|
@dataclass
|
|
342
342
|
class CleanRoomAssetNotebook:
|
|
343
|
-
|
|
344
|
-
"""Server generated etag that represents the notebook version."""
|
|
345
|
-
|
|
346
|
-
notebook_content: Optional[str] = None
|
|
343
|
+
notebook_content: str
|
|
347
344
|
"""Base 64 representation of the notebook contents. This is the same format as returned by
|
|
348
345
|
:method:workspace/export with the format of **HTML**."""
|
|
349
346
|
|
|
347
|
+
etag: Optional[str] = None
|
|
348
|
+
"""Server generated etag that represents the notebook version."""
|
|
349
|
+
|
|
350
350
|
review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None
|
|
351
351
|
"""top-level status derived from all reviews"""
|
|
352
352
|
|
|
@@ -432,7 +432,7 @@ class CleanRoomAssetTable:
|
|
|
432
432
|
|
|
433
433
|
@dataclass
|
|
434
434
|
class CleanRoomAssetTableLocalDetails:
|
|
435
|
-
local_name:
|
|
435
|
+
local_name: str
|
|
436
436
|
"""The fully qualified name of the table in its owner's local metastore, in the format of
|
|
437
437
|
*catalog*.*schema*.*table_name*"""
|
|
438
438
|
|
|
@@ -490,7 +490,7 @@ class CleanRoomAssetView:
|
|
|
490
490
|
|
|
491
491
|
@dataclass
|
|
492
492
|
class CleanRoomAssetViewLocalDetails:
|
|
493
|
-
local_name:
|
|
493
|
+
local_name: str
|
|
494
494
|
"""The fully qualified name of the view in its owner's local metastore, in the format of
|
|
495
495
|
*catalog*.*schema*.*view_name*"""
|
|
496
496
|
|
|
@@ -516,7 +516,7 @@ class CleanRoomAssetViewLocalDetails:
|
|
|
516
516
|
|
|
517
517
|
@dataclass
|
|
518
518
|
class CleanRoomAssetVolumeLocalDetails:
|
|
519
|
-
local_name:
|
|
519
|
+
local_name: str
|
|
520
520
|
"""The fully qualified name of the volume in its owner's local metastore, in the format of
|
|
521
521
|
*catalog*.*schema*.*volume_name*"""
|
|
522
522
|
|
|
@@ -1178,8 +1178,8 @@ class CleanRoomAssetsAPI:
|
|
|
1178
1178
|
access the asset. Typically, you should use a group as the clean room owner.
|
|
1179
1179
|
|
|
1180
1180
|
:param clean_room_name: str
|
|
1181
|
-
The name of the clean room this asset belongs to. This is
|
|
1182
|
-
|
|
1181
|
+
The name of the clean room this asset belongs to. This field is required for create operations and
|
|
1182
|
+
populated by the server for responses.
|
|
1183
1183
|
:param asset: :class:`CleanRoomAsset`
|
|
1184
1184
|
|
|
1185
1185
|
:returns: :class:`CleanRoomAsset`
|
|
@@ -707,6 +707,13 @@ class ProvisioningInfoState(Enum):
|
|
|
707
707
|
UPDATING = "UPDATING"
|
|
708
708
|
|
|
709
709
|
|
|
710
|
+
class ProvisioningPhase(Enum):
|
|
711
|
+
|
|
712
|
+
PROVISIONING_PHASE_INDEX_SCAN = "PROVISIONING_PHASE_INDEX_SCAN"
|
|
713
|
+
PROVISIONING_PHASE_INDEX_SORT = "PROVISIONING_PHASE_INDEX_SORT"
|
|
714
|
+
PROVISIONING_PHASE_MAIN = "PROVISIONING_PHASE_MAIN"
|
|
715
|
+
|
|
716
|
+
|
|
710
717
|
@dataclass
|
|
711
718
|
class RequestedClaims:
|
|
712
719
|
permission_set: Optional[RequestedClaimsPermissionSet] = None
|
|
@@ -960,6 +967,9 @@ class SyncedTablePipelineProgress:
|
|
|
960
967
|
"""The source table Delta version that was last processed by the pipeline. The pipeline may not
|
|
961
968
|
have completely processed this version yet."""
|
|
962
969
|
|
|
970
|
+
provisioning_phase: Optional[ProvisioningPhase] = None
|
|
971
|
+
"""The current phase of the data synchronization pipeline."""
|
|
972
|
+
|
|
963
973
|
sync_progress_completion: Optional[float] = None
|
|
964
974
|
"""The completion ratio of this update. This is a number between 0 and 1."""
|
|
965
975
|
|
|
@@ -976,6 +986,8 @@ class SyncedTablePipelineProgress:
|
|
|
976
986
|
body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds
|
|
977
987
|
if self.latest_version_currently_processing is not None:
|
|
978
988
|
body["latest_version_currently_processing"] = self.latest_version_currently_processing
|
|
989
|
+
if self.provisioning_phase is not None:
|
|
990
|
+
body["provisioning_phase"] = self.provisioning_phase.value
|
|
979
991
|
if self.sync_progress_completion is not None:
|
|
980
992
|
body["sync_progress_completion"] = self.sync_progress_completion
|
|
981
993
|
if self.synced_row_count is not None:
|
|
@@ -991,6 +1003,8 @@ class SyncedTablePipelineProgress:
|
|
|
991
1003
|
body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds
|
|
992
1004
|
if self.latest_version_currently_processing is not None:
|
|
993
1005
|
body["latest_version_currently_processing"] = self.latest_version_currently_processing
|
|
1006
|
+
if self.provisioning_phase is not None:
|
|
1007
|
+
body["provisioning_phase"] = self.provisioning_phase
|
|
994
1008
|
if self.sync_progress_completion is not None:
|
|
995
1009
|
body["sync_progress_completion"] = self.sync_progress_completion
|
|
996
1010
|
if self.synced_row_count is not None:
|
|
@@ -1005,6 +1019,7 @@ class SyncedTablePipelineProgress:
|
|
|
1005
1019
|
return cls(
|
|
1006
1020
|
estimated_completion_time_seconds=d.get("estimated_completion_time_seconds", None),
|
|
1007
1021
|
latest_version_currently_processing=d.get("latest_version_currently_processing", None),
|
|
1022
|
+
provisioning_phase=_enum(d, "provisioning_phase", ProvisioningPhase),
|
|
1008
1023
|
sync_progress_completion=d.get("sync_progress_completion", None),
|
|
1009
1024
|
synced_row_count=d.get("synced_row_count", None),
|
|
1010
1025
|
total_row_count=d.get("total_row_count", None),
|
|
@@ -1735,7 +1750,7 @@ class DatabaseAPI:
|
|
|
1735
1750
|
The name of the instance. This is the unique identifier for the instance.
|
|
1736
1751
|
:param database_instance: :class:`DatabaseInstance`
|
|
1737
1752
|
:param update_mask: str
|
|
1738
|
-
The list of fields to update.
|
|
1753
|
+
The list of fields to update. This field is not yet supported, and is ignored by the server.
|
|
1739
1754
|
|
|
1740
1755
|
:returns: :class:`DatabaseInstance`
|
|
1741
1756
|
"""
|
|
@@ -734,9 +734,11 @@ class IngestionSourceType(Enum):
|
|
|
734
734
|
NETSUITE = "NETSUITE"
|
|
735
735
|
ORACLE = "ORACLE"
|
|
736
736
|
POSTGRESQL = "POSTGRESQL"
|
|
737
|
+
REDSHIFT = "REDSHIFT"
|
|
737
738
|
SALESFORCE = "SALESFORCE"
|
|
738
739
|
SERVICENOW = "SERVICENOW"
|
|
739
740
|
SHAREPOINT = "SHAREPOINT"
|
|
741
|
+
SQLDW = "SQLDW"
|
|
740
742
|
SQLSERVER = "SQLSERVER"
|
|
741
743
|
TERADATA = "TERADATA"
|
|
742
744
|
WORKDAY_RAAS = "WORKDAY_RAAS"
|
|
@@ -2121,7 +2121,8 @@ class SharedDataObjectUpdate:
|
|
|
2121
2121
|
"""One of: **ADD**, **REMOVE**, **UPDATE**."""
|
|
2122
2122
|
|
|
2123
2123
|
data_object: Optional[SharedDataObject] = None
|
|
2124
|
-
"""The data object that is being added, removed, or updated.
|
|
2124
|
+
"""The data object that is being added, removed, or updated. The maximum number update data objects
|
|
2125
|
+
allowed is a 100."""
|
|
2125
2126
|
|
|
2126
2127
|
def as_dict(self) -> dict:
|
|
2127
2128
|
"""Serializes the SharedDataObjectUpdate into a dictionary suitable for use as a JSON request body."""
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.60.0"
|
|
@@ -55,7 +55,7 @@ databricks/sdk/runtime/__init__.py
|
|
|
55
55
|
databricks/sdk/runtime/dbutils_stub.py
|
|
56
56
|
databricks/sdk/service/__init__.py
|
|
57
57
|
databricks/sdk/service/_internal.py
|
|
58
|
-
databricks/sdk/service/
|
|
58
|
+
databricks/sdk/service/agentbricks.py
|
|
59
59
|
databricks/sdk/service/apps.py
|
|
60
60
|
databricks/sdk/service/billing.py
|
|
61
61
|
databricks/sdk/service/catalog.py
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = "0.59.0"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks/sdk/_widgets/default_widgets_utils.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{databricks_sdk-0.59.0 → databricks_sdk-0.60.0}/databricks_sdk.egg-info/dependency_links.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|